id
stringlengths
9
104
author
stringlengths
3
36
task_category
stringclasses
32 values
tags
sequencelengths
1
4.05k
created_time
unknowndate
2022-03-02 23:29:04
2025-03-18 02:34:30
last_modified
stringdate
2021-02-13 00:06:56
2025-03-18 09:30:19
downloads
int64
0
15.6M
likes
int64
0
4.86k
README
stringlengths
44
1.01M
matched_bigbio_names
sequencelengths
1
8
WhereIsAI/UAE-Large-V1
WhereIsAI
feature-extraction
[ "sentence-transformers", "onnx", "safetensors", "openvino", "bert", "feature-extraction", "mteb", "sentence_embedding", "feature_extraction", "transformers", "transformers.js", "en", "arxiv:2309.12871", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-12-04T02:03:27Z"
2024-12-31T08:00:51+00:00
15,561,625
220
--- language: - en license: mit tags: - mteb - sentence_embedding - feature_extraction - sentence-transformers - transformers - transformers.js model-index: - name: UAE-Large-V1 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.55223880597015 - type: ap value: 38.264070815317794 - type: f1 value: 69.40977934769845 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 92.84267499999999 - type: ap value: 89.57568507997713 - type: f1 value: 92.82590734337774 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.292 - type: f1 value: 47.90257816032778 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 42.105 - type: map_at_10 value: 58.181000000000004 - type: map_at_100 value: 58.653999999999996 - type: map_at_1000 value: 58.657000000000004 - type: map_at_3 value: 54.386 - type: map_at_5 value: 56.757999999999996 - type: mrr_at_1 value: 42.745 - type: mrr_at_10 value: 58.437 - type: mrr_at_100 value: 58.894999999999996 - type: mrr_at_1000 value: 58.897999999999996 - type: mrr_at_3 value: 54.635 - type: mrr_at_5 value: 56.99999999999999 - type: ndcg_at_1 value: 42.105 - type: ndcg_at_10 value: 66.14999999999999 - type: ndcg_at_100 value: 68.048 - type: ndcg_at_1000 value: 68.11399999999999 - type: ndcg_at_3 value: 58.477000000000004 - type: ndcg_at_5 value: 62.768 - type: precision_at_1 value: 42.105 - type: precision_at_10 value: 9.110999999999999 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 23.447000000000003 - type: precision_at_5 value: 16.159000000000002 - type: recall_at_1 value: 42.105 - type: recall_at_10 value: 91.11 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 70.341 - type: recall_at_5 value: 80.797 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 49.02580759154173 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 43.093601280163554 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.19590406875427 - type: mrr value: 77.09547992788991 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 87.86678362843676 - type: cos_sim_spearman value: 86.1423242570783 - type: euclidean_pearson value: 85.98994198511751 - type: euclidean_spearman value: 86.48209103503942 - type: manhattan_pearson value: 85.6446436316182 - type: manhattan_spearman value: 86.21039809734357 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.69155844155844 - type: f1 value: 87.68109381943547 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.37501687500394 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 37.23401405155885 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 30.232 - type: map_at_10 value: 41.404999999999994 - type: map_at_100 value: 42.896 - type: map_at_1000 value: 43.028 - type: map_at_3 value: 37.925 - type: map_at_5 value: 39.865 - type: mrr_at_1 value: 36.338 - type: mrr_at_10 value: 46.969 - type: mrr_at_100 value: 47.684 - type: mrr_at_1000 value: 47.731 - type: mrr_at_3 value: 44.063 - type: mrr_at_5 value: 45.908 - type: ndcg_at_1 value: 36.338 - type: ndcg_at_10 value: 47.887 - type: ndcg_at_100 value: 53.357 - type: ndcg_at_1000 value: 55.376999999999995 - type: ndcg_at_3 value: 42.588 - type: ndcg_at_5 value: 45.132 - type: precision_at_1 value: 36.338 - type: precision_at_10 value: 9.17 - type: precision_at_100 value: 1.4909999999999999 - type: precision_at_1000 value: 0.196 - type: precision_at_3 value: 20.315 - type: precision_at_5 value: 14.793000000000001 - type: recall_at_1 value: 30.232 - type: recall_at_10 value: 60.67399999999999 - type: recall_at_100 value: 83.628 - type: recall_at_1000 value: 96.209 - type: recall_at_3 value: 45.48 - type: recall_at_5 value: 52.354 - type: map_at_1 value: 32.237 - type: map_at_10 value: 42.829 - type: map_at_100 value: 44.065 - type: map_at_1000 value: 44.199 - type: map_at_3 value: 39.885999999999996 - type: map_at_5 value: 41.55 - type: mrr_at_1 value: 40.064 - type: mrr_at_10 value: 48.611 - type: mrr_at_100 value: 49.245 - type: mrr_at_1000 value: 49.29 - type: mrr_at_3 value: 46.561 - type: mrr_at_5 value: 47.771 - type: ndcg_at_1 value: 40.064 - type: ndcg_at_10 value: 48.388 - type: ndcg_at_100 value: 52.666999999999994 - type: ndcg_at_1000 value: 54.67100000000001 - type: ndcg_at_3 value: 44.504 - type: ndcg_at_5 value: 46.303 - type: precision_at_1 value: 40.064 - type: precision_at_10 value: 9.051 - type: precision_at_100 value: 1.4500000000000002 - type: precision_at_1000 value: 0.193 - type: precision_at_3 value: 21.444 - type: precision_at_5 value: 15.045 - type: recall_at_1 value: 32.237 - type: recall_at_10 value: 57.943999999999996 - type: recall_at_100 value: 75.98700000000001 - type: recall_at_1000 value: 88.453 - type: recall_at_3 value: 46.268 - type: recall_at_5 value: 51.459999999999994 - type: map_at_1 value: 38.797 - type: map_at_10 value: 51.263000000000005 - type: map_at_100 value: 52.333 - type: map_at_1000 value: 52.393 - type: map_at_3 value: 47.936 - type: map_at_5 value: 49.844 - type: mrr_at_1 value: 44.389 - type: mrr_at_10 value: 54.601 - type: mrr_at_100 value: 55.300000000000004 - type: mrr_at_1000 value: 55.333 - type: mrr_at_3 value: 52.068999999999996 - type: mrr_at_5 value: 53.627 - type: ndcg_at_1 value: 44.389 - type: ndcg_at_10 value: 57.193000000000005 - type: ndcg_at_100 value: 61.307 - type: ndcg_at_1000 value: 62.529 - type: ndcg_at_3 value: 51.607 - type: ndcg_at_5 value: 54.409 - type: precision_at_1 value: 44.389 - type: precision_at_10 value: 9.26 - type: precision_at_100 value: 1.222 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 23.03 - type: precision_at_5 value: 15.887 - type: recall_at_1 value: 38.797 - type: recall_at_10 value: 71.449 - type: recall_at_100 value: 88.881 - type: recall_at_1000 value: 97.52 - type: recall_at_3 value: 56.503 - type: recall_at_5 value: 63.392 - type: map_at_1 value: 27.291999999999998 - type: map_at_10 value: 35.65 - type: map_at_100 value: 36.689 - type: map_at_1000 value: 36.753 - type: map_at_3 value: 32.995000000000005 - type: map_at_5 value: 34.409 - type: mrr_at_1 value: 29.04 - type: mrr_at_10 value: 37.486000000000004 - type: mrr_at_100 value: 38.394 - type: mrr_at_1000 value: 38.445 - type: mrr_at_3 value: 35.028 - type: mrr_at_5 value: 36.305 - type: ndcg_at_1 value: 29.04 - type: ndcg_at_10 value: 40.613 - type: ndcg_at_100 value: 45.733000000000004 - type: ndcg_at_1000 value: 47.447 - type: ndcg_at_3 value: 35.339999999999996 - type: ndcg_at_5 value: 37.706 - type: precision_at_1 value: 29.04 - type: precision_at_10 value: 6.192 - type: precision_at_100 value: 0.9249999999999999 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 14.802000000000001 - type: precision_at_5 value: 10.305 - type: recall_at_1 value: 27.291999999999998 - type: recall_at_10 value: 54.25299999999999 - type: recall_at_100 value: 77.773 - type: recall_at_1000 value: 90.795 - type: recall_at_3 value: 39.731 - type: recall_at_5 value: 45.403999999999996 - type: map_at_1 value: 18.326 - type: map_at_10 value: 26.290999999999997 - type: map_at_100 value: 27.456999999999997 - type: map_at_1000 value: 27.583000000000002 - type: map_at_3 value: 23.578 - type: map_at_5 value: 25.113000000000003 - type: mrr_at_1 value: 22.637 - type: mrr_at_10 value: 31.139 - type: mrr_at_100 value: 32.074999999999996 - type: mrr_at_1000 value: 32.147 - type: mrr_at_3 value: 28.483000000000004 - type: mrr_at_5 value: 29.963 - type: ndcg_at_1 value: 22.637 - type: ndcg_at_10 value: 31.717000000000002 - type: ndcg_at_100 value: 37.201 - type: ndcg_at_1000 value: 40.088 - type: ndcg_at_3 value: 26.686 - type: ndcg_at_5 value: 29.076999999999998 - type: precision_at_1 value: 22.637 - type: precision_at_10 value: 5.7090000000000005 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 12.894 - type: precision_at_5 value: 9.328 - type: recall_at_1 value: 18.326 - type: recall_at_10 value: 43.824999999999996 - type: recall_at_100 value: 67.316 - type: recall_at_1000 value: 87.481 - type: recall_at_3 value: 29.866999999999997 - type: recall_at_5 value: 35.961999999999996 - type: map_at_1 value: 29.875 - type: map_at_10 value: 40.458 - type: map_at_100 value: 41.772 - type: map_at_1000 value: 41.882999999999996 - type: map_at_3 value: 37.086999999999996 - type: map_at_5 value: 39.153 - type: mrr_at_1 value: 36.381 - type: mrr_at_10 value: 46.190999999999995 - type: mrr_at_100 value: 46.983999999999995 - type: mrr_at_1000 value: 47.032000000000004 - type: mrr_at_3 value: 43.486999999999995 - type: mrr_at_5 value: 45.249 - type: ndcg_at_1 value: 36.381 - type: ndcg_at_10 value: 46.602 - type: ndcg_at_100 value: 51.885999999999996 - type: ndcg_at_1000 value: 53.895 - type: ndcg_at_3 value: 41.155 - type: ndcg_at_5 value: 44.182 - type: precision_at_1 value: 36.381 - type: precision_at_10 value: 8.402 - type: precision_at_100 value: 1.278 - type: precision_at_1000 value: 0.16199999999999998 - type: precision_at_3 value: 19.346 - type: precision_at_5 value: 14.09 - type: recall_at_1 value: 29.875 - type: recall_at_10 value: 59.065999999999995 - type: recall_at_100 value: 80.923 - type: recall_at_1000 value: 93.927 - type: recall_at_3 value: 44.462 - type: recall_at_5 value: 51.89 - type: map_at_1 value: 24.94 - type: map_at_10 value: 35.125 - type: map_at_100 value: 36.476 - type: map_at_1000 value: 36.579 - type: map_at_3 value: 31.840000000000003 - type: map_at_5 value: 33.647 - type: mrr_at_1 value: 30.936000000000003 - type: mrr_at_10 value: 40.637 - type: mrr_at_100 value: 41.471000000000004 - type: mrr_at_1000 value: 41.525 - type: mrr_at_3 value: 38.013999999999996 - type: mrr_at_5 value: 39.469 - type: ndcg_at_1 value: 30.936000000000003 - type: ndcg_at_10 value: 41.295 - type: ndcg_at_100 value: 46.92 - type: ndcg_at_1000 value: 49.183 - type: ndcg_at_3 value: 35.811 - type: ndcg_at_5 value: 38.306000000000004 - type: precision_at_1 value: 30.936000000000003 - type: precision_at_10 value: 7.728 - type: precision_at_100 value: 1.226 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 17.237 - type: precision_at_5 value: 12.42 - type: recall_at_1 value: 24.94 - type: recall_at_10 value: 54.235 - type: recall_at_100 value: 78.314 - type: recall_at_1000 value: 93.973 - type: recall_at_3 value: 38.925 - type: recall_at_5 value: 45.505 - type: map_at_1 value: 26.250833333333333 - type: map_at_10 value: 35.46875 - type: map_at_100 value: 36.667 - type: map_at_1000 value: 36.78025 - type: map_at_3 value: 32.56733333333334 - type: map_at_5 value: 34.20333333333333 - type: mrr_at_1 value: 30.8945 - type: mrr_at_10 value: 39.636833333333335 - type: mrr_at_100 value: 40.46508333333333 - type: mrr_at_1000 value: 40.521249999999995 - type: mrr_at_3 value: 37.140166666666666 - type: mrr_at_5 value: 38.60999999999999 - type: ndcg_at_1 value: 30.8945 - type: ndcg_at_10 value: 40.93441666666667 - type: ndcg_at_100 value: 46.062416666666664 - type: ndcg_at_1000 value: 48.28341666666667 - type: ndcg_at_3 value: 35.97575 - type: ndcg_at_5 value: 38.3785 - type: precision_at_1 value: 30.8945 - type: precision_at_10 value: 7.180250000000001 - type: precision_at_100 value: 1.1468333333333334 - type: precision_at_1000 value: 0.15283333333333332 - type: precision_at_3 value: 16.525583333333334 - type: precision_at_5 value: 11.798333333333332 - type: recall_at_1 value: 26.250833333333333 - type: recall_at_10 value: 52.96108333333333 - type: recall_at_100 value: 75.45908333333334 - type: recall_at_1000 value: 90.73924999999998 - type: recall_at_3 value: 39.25483333333333 - type: recall_at_5 value: 45.37950000000001 - type: map_at_1 value: 24.595 - type: map_at_10 value: 31.747999999999998 - type: map_at_100 value: 32.62 - type: map_at_1000 value: 32.713 - type: map_at_3 value: 29.48 - type: map_at_5 value: 30.635 - type: mrr_at_1 value: 27.607 - type: mrr_at_10 value: 34.449000000000005 - type: mrr_at_100 value: 35.182 - type: mrr_at_1000 value: 35.254000000000005 - type: mrr_at_3 value: 32.413 - type: mrr_at_5 value: 33.372 - type: ndcg_at_1 value: 27.607 - type: ndcg_at_10 value: 36.041000000000004 - type: ndcg_at_100 value: 40.514 - type: ndcg_at_1000 value: 42.851 - type: ndcg_at_3 value: 31.689 - type: ndcg_at_5 value: 33.479 - type: precision_at_1 value: 27.607 - type: precision_at_10 value: 5.66 - type: precision_at_100 value: 0.868 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 13.446 - type: precision_at_5 value: 9.264 - type: recall_at_1 value: 24.595 - type: recall_at_10 value: 46.79 - type: recall_at_100 value: 67.413 - type: recall_at_1000 value: 84.753 - type: recall_at_3 value: 34.644999999999996 - type: recall_at_5 value: 39.09 - type: map_at_1 value: 17.333000000000002 - type: map_at_10 value: 24.427 - type: map_at_100 value: 25.576 - type: map_at_1000 value: 25.692999999999998 - type: map_at_3 value: 22.002 - type: map_at_5 value: 23.249 - type: mrr_at_1 value: 20.716 - type: mrr_at_10 value: 28.072000000000003 - type: mrr_at_100 value: 29.067 - type: mrr_at_1000 value: 29.137 - type: mrr_at_3 value: 25.832 - type: mrr_at_5 value: 27.045 - type: ndcg_at_1 value: 20.716 - type: ndcg_at_10 value: 29.109 - type: ndcg_at_100 value: 34.797 - type: ndcg_at_1000 value: 37.503 - type: ndcg_at_3 value: 24.668 - type: ndcg_at_5 value: 26.552999999999997 - type: precision_at_1 value: 20.716 - type: precision_at_10 value: 5.351 - type: precision_at_100 value: 0.955 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 11.584999999999999 - type: precision_at_5 value: 8.362 - type: recall_at_1 value: 17.333000000000002 - type: recall_at_10 value: 39.604 - type: recall_at_100 value: 65.525 - type: recall_at_1000 value: 84.651 - type: recall_at_3 value: 27.199 - type: recall_at_5 value: 32.019 - type: map_at_1 value: 26.342 - type: map_at_10 value: 35.349000000000004 - type: map_at_100 value: 36.443 - type: map_at_1000 value: 36.548 - type: map_at_3 value: 32.307 - type: map_at_5 value: 34.164 - type: mrr_at_1 value: 31.063000000000002 - type: mrr_at_10 value: 39.703 - type: mrr_at_100 value: 40.555 - type: mrr_at_1000 value: 40.614 - type: mrr_at_3 value: 37.141999999999996 - type: mrr_at_5 value: 38.812000000000005 - type: ndcg_at_1 value: 31.063000000000002 - type: ndcg_at_10 value: 40.873 - type: ndcg_at_100 value: 45.896 - type: ndcg_at_1000 value: 48.205999999999996 - type: ndcg_at_3 value: 35.522 - type: ndcg_at_5 value: 38.419 - type: precision_at_1 value: 31.063000000000002 - type: precision_at_10 value: 6.866 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 16.014 - type: precision_at_5 value: 11.604000000000001 - type: recall_at_1 value: 26.342 - type: recall_at_10 value: 53.40200000000001 - type: recall_at_100 value: 75.251 - type: recall_at_1000 value: 91.13799999999999 - type: recall_at_3 value: 39.103 - type: recall_at_5 value: 46.357 - type: map_at_1 value: 23.71 - type: map_at_10 value: 32.153999999999996 - type: map_at_100 value: 33.821 - type: map_at_1000 value: 34.034 - type: map_at_3 value: 29.376 - type: map_at_5 value: 30.878 - type: mrr_at_1 value: 28.458 - type: mrr_at_10 value: 36.775999999999996 - type: mrr_at_100 value: 37.804 - type: mrr_at_1000 value: 37.858999999999995 - type: mrr_at_3 value: 34.123999999999995 - type: mrr_at_5 value: 35.596 - type: ndcg_at_1 value: 28.458 - type: ndcg_at_10 value: 37.858999999999995 - type: ndcg_at_100 value: 44.194 - type: ndcg_at_1000 value: 46.744 - type: ndcg_at_3 value: 33.348 - type: ndcg_at_5 value: 35.448 - type: precision_at_1 value: 28.458 - type: precision_at_10 value: 7.4510000000000005 - type: precision_at_100 value: 1.5 - type: precision_at_1000 value: 0.23700000000000002 - type: precision_at_3 value: 15.809999999999999 - type: precision_at_5 value: 11.462 - type: recall_at_1 value: 23.71 - type: recall_at_10 value: 48.272999999999996 - type: recall_at_100 value: 77.134 - type: recall_at_1000 value: 93.001 - type: recall_at_3 value: 35.480000000000004 - type: recall_at_5 value: 41.19 - type: map_at_1 value: 21.331 - type: map_at_10 value: 28.926000000000002 - type: map_at_100 value: 29.855999999999998 - type: map_at_1000 value: 29.957 - type: map_at_3 value: 26.395999999999997 - type: map_at_5 value: 27.933000000000003 - type: mrr_at_1 value: 23.105 - type: mrr_at_10 value: 31.008000000000003 - type: mrr_at_100 value: 31.819999999999997 - type: mrr_at_1000 value: 31.887999999999998 - type: mrr_at_3 value: 28.466 - type: mrr_at_5 value: 30.203000000000003 - type: ndcg_at_1 value: 23.105 - type: ndcg_at_10 value: 33.635999999999996 - type: ndcg_at_100 value: 38.277 - type: ndcg_at_1000 value: 40.907 - type: ndcg_at_3 value: 28.791 - type: ndcg_at_5 value: 31.528 - type: precision_at_1 value: 23.105 - type: precision_at_10 value: 5.323 - type: precision_at_100 value: 0.815 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 12.384 - type: precision_at_5 value: 9.02 - type: recall_at_1 value: 21.331 - type: recall_at_10 value: 46.018 - type: recall_at_100 value: 67.364 - type: recall_at_1000 value: 86.97 - type: recall_at_3 value: 33.395 - type: recall_at_5 value: 39.931 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 17.011000000000003 - type: map_at_10 value: 28.816999999999997 - type: map_at_100 value: 30.761 - type: map_at_1000 value: 30.958000000000002 - type: map_at_3 value: 24.044999999999998 - type: map_at_5 value: 26.557 - type: mrr_at_1 value: 38.696999999999996 - type: mrr_at_10 value: 50.464 - type: mrr_at_100 value: 51.193999999999996 - type: mrr_at_1000 value: 51.219 - type: mrr_at_3 value: 47.339999999999996 - type: mrr_at_5 value: 49.346000000000004 - type: ndcg_at_1 value: 38.696999999999996 - type: ndcg_at_10 value: 38.53 - type: ndcg_at_100 value: 45.525 - type: ndcg_at_1000 value: 48.685 - type: ndcg_at_3 value: 32.282 - type: ndcg_at_5 value: 34.482 - type: precision_at_1 value: 38.696999999999996 - type: precision_at_10 value: 11.895999999999999 - type: precision_at_100 value: 1.95 - type: precision_at_1000 value: 0.254 - type: precision_at_3 value: 24.038999999999998 - type: precision_at_5 value: 18.332 - type: recall_at_1 value: 17.011000000000003 - type: recall_at_10 value: 44.452999999999996 - type: recall_at_100 value: 68.223 - type: recall_at_1000 value: 85.653 - type: recall_at_3 value: 28.784 - type: recall_at_5 value: 35.66 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.516 - type: map_at_10 value: 21.439 - type: map_at_100 value: 31.517 - type: map_at_1000 value: 33.267 - type: map_at_3 value: 15.004999999999999 - type: map_at_5 value: 17.793999999999997 - type: mrr_at_1 value: 71.25 - type: mrr_at_10 value: 79.071 - type: mrr_at_100 value: 79.325 - type: mrr_at_1000 value: 79.33 - type: mrr_at_3 value: 77.708 - type: mrr_at_5 value: 78.546 - type: ndcg_at_1 value: 58.62500000000001 - type: ndcg_at_10 value: 44.889 - type: ndcg_at_100 value: 50.536 - type: ndcg_at_1000 value: 57.724 - type: ndcg_at_3 value: 49.32 - type: ndcg_at_5 value: 46.775 - type: precision_at_1 value: 71.25 - type: precision_at_10 value: 36.175000000000004 - type: precision_at_100 value: 11.940000000000001 - type: precision_at_1000 value: 2.178 - type: precision_at_3 value: 53.583000000000006 - type: precision_at_5 value: 45.550000000000004 - type: recall_at_1 value: 9.516 - type: recall_at_10 value: 27.028000000000002 - type: recall_at_100 value: 57.581 - type: recall_at_1000 value: 80.623 - type: recall_at_3 value: 16.313 - type: recall_at_5 value: 20.674 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.74999999999999 - type: f1 value: 46.46706502669774 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 77.266 - type: map_at_10 value: 84.89999999999999 - type: map_at_100 value: 85.109 - type: map_at_1000 value: 85.123 - type: map_at_3 value: 83.898 - type: map_at_5 value: 84.541 - type: mrr_at_1 value: 83.138 - type: mrr_at_10 value: 89.37 - type: mrr_at_100 value: 89.432 - type: mrr_at_1000 value: 89.43299999999999 - type: mrr_at_3 value: 88.836 - type: mrr_at_5 value: 89.21 - type: ndcg_at_1 value: 83.138 - type: ndcg_at_10 value: 88.244 - type: ndcg_at_100 value: 88.98700000000001 - type: ndcg_at_1000 value: 89.21900000000001 - type: ndcg_at_3 value: 86.825 - type: ndcg_at_5 value: 87.636 - type: precision_at_1 value: 83.138 - type: precision_at_10 value: 10.47 - type: precision_at_100 value: 1.1079999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 32.933 - type: precision_at_5 value: 20.36 - type: recall_at_1 value: 77.266 - type: recall_at_10 value: 94.063 - type: recall_at_100 value: 96.993 - type: recall_at_1000 value: 98.414 - type: recall_at_3 value: 90.228 - type: recall_at_5 value: 92.328 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.319 - type: map_at_10 value: 36.943 - type: map_at_100 value: 38.951 - type: map_at_1000 value: 39.114 - type: map_at_3 value: 32.82 - type: map_at_5 value: 34.945 - type: mrr_at_1 value: 44.135999999999996 - type: mrr_at_10 value: 53.071999999999996 - type: mrr_at_100 value: 53.87 - type: mrr_at_1000 value: 53.90200000000001 - type: mrr_at_3 value: 50.77199999999999 - type: mrr_at_5 value: 52.129999999999995 - type: ndcg_at_1 value: 44.135999999999996 - type: ndcg_at_10 value: 44.836 - type: ndcg_at_100 value: 51.754 - type: ndcg_at_1000 value: 54.36 - type: ndcg_at_3 value: 41.658 - type: ndcg_at_5 value: 42.354 - type: precision_at_1 value: 44.135999999999996 - type: precision_at_10 value: 12.284 - type: precision_at_100 value: 1.952 - type: precision_at_1000 value: 0.242 - type: precision_at_3 value: 27.828999999999997 - type: precision_at_5 value: 20.093 - type: recall_at_1 value: 22.319 - type: recall_at_10 value: 51.528 - type: recall_at_100 value: 76.70700000000001 - type: recall_at_1000 value: 92.143 - type: recall_at_3 value: 38.641 - type: recall_at_5 value: 43.653999999999996 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 40.182 - type: map_at_10 value: 65.146 - type: map_at_100 value: 66.023 - type: map_at_1000 value: 66.078 - type: map_at_3 value: 61.617999999999995 - type: map_at_5 value: 63.82299999999999 - type: mrr_at_1 value: 80.365 - type: mrr_at_10 value: 85.79 - type: mrr_at_100 value: 85.963 - type: mrr_at_1000 value: 85.968 - type: mrr_at_3 value: 84.952 - type: mrr_at_5 value: 85.503 - type: ndcg_at_1 value: 80.365 - type: ndcg_at_10 value: 73.13499999999999 - type: ndcg_at_100 value: 76.133 - type: ndcg_at_1000 value: 77.151 - type: ndcg_at_3 value: 68.255 - type: ndcg_at_5 value: 70.978 - type: precision_at_1 value: 80.365 - type: precision_at_10 value: 15.359 - type: precision_at_100 value: 1.7690000000000001 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 44.024 - type: precision_at_5 value: 28.555999999999997 - type: recall_at_1 value: 40.182 - type: recall_at_10 value: 76.793 - type: recall_at_100 value: 88.474 - type: recall_at_1000 value: 95.159 - type: recall_at_3 value: 66.036 - type: recall_at_5 value: 71.391 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 92.7796 - type: ap value: 89.24883716810874 - type: f1 value: 92.7706903433313 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.016 - type: map_at_10 value: 34.408 - type: map_at_100 value: 35.592 - type: map_at_1000 value: 35.64 - type: map_at_3 value: 30.459999999999997 - type: map_at_5 value: 32.721000000000004 - type: mrr_at_1 value: 22.593 - type: mrr_at_10 value: 34.993 - type: mrr_at_100 value: 36.113 - type: mrr_at_1000 value: 36.156 - type: mrr_at_3 value: 31.101 - type: mrr_at_5 value: 33.364 - type: ndcg_at_1 value: 22.579 - type: ndcg_at_10 value: 41.404999999999994 - type: ndcg_at_100 value: 47.018 - type: ndcg_at_1000 value: 48.211999999999996 - type: ndcg_at_3 value: 33.389 - type: ndcg_at_5 value: 37.425000000000004 - type: precision_at_1 value: 22.579 - type: precision_at_10 value: 6.59 - type: precision_at_100 value: 0.938 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.241000000000001 - type: precision_at_5 value: 10.59 - type: recall_at_1 value: 22.016 - type: recall_at_10 value: 62.927 - type: recall_at_100 value: 88.72 - type: recall_at_1000 value: 97.80799999999999 - type: recall_at_3 value: 41.229 - type: recall_at_5 value: 50.88 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 94.01732786137711 - type: f1 value: 93.76353126402202 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.91746466028272 - type: f1 value: 57.715651682646765 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.5030262273033 - type: f1 value: 74.6693629986121 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.74781439139207 - type: f1 value: 79.96684171018774 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.2156206892017 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.180539484816137 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.51125957874274 - type: mrr value: 33.777037359249995 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 7.248 - type: map_at_10 value: 15.340000000000002 - type: map_at_100 value: 19.591 - type: map_at_1000 value: 21.187 - type: map_at_3 value: 11.329 - type: map_at_5 value: 13.209999999999999 - type: mrr_at_1 value: 47.678 - type: mrr_at_10 value: 57.493 - type: mrr_at_100 value: 58.038999999999994 - type: mrr_at_1000 value: 58.07 - type: mrr_at_3 value: 55.36600000000001 - type: mrr_at_5 value: 56.635999999999996 - type: ndcg_at_1 value: 46.129999999999995 - type: ndcg_at_10 value: 38.653999999999996 - type: ndcg_at_100 value: 36.288 - type: ndcg_at_1000 value: 44.765 - type: ndcg_at_3 value: 43.553 - type: ndcg_at_5 value: 41.317 - type: precision_at_1 value: 47.368 - type: precision_at_10 value: 28.669 - type: precision_at_100 value: 9.158 - type: precision_at_1000 value: 2.207 - type: precision_at_3 value: 40.97 - type: precision_at_5 value: 35.604 - type: recall_at_1 value: 7.248 - type: recall_at_10 value: 19.46 - type: recall_at_100 value: 37.214000000000006 - type: recall_at_1000 value: 67.64099999999999 - type: recall_at_3 value: 12.025 - type: recall_at_5 value: 15.443999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 31.595000000000002 - type: map_at_10 value: 47.815999999999995 - type: map_at_100 value: 48.811 - type: map_at_1000 value: 48.835 - type: map_at_3 value: 43.225 - type: map_at_5 value: 46.017 - type: mrr_at_1 value: 35.689 - type: mrr_at_10 value: 50.341 - type: mrr_at_100 value: 51.044999999999995 - type: mrr_at_1000 value: 51.062 - type: mrr_at_3 value: 46.553 - type: mrr_at_5 value: 48.918 - type: ndcg_at_1 value: 35.66 - type: ndcg_at_10 value: 55.859 - type: ndcg_at_100 value: 59.864 - type: ndcg_at_1000 value: 60.419999999999995 - type: ndcg_at_3 value: 47.371 - type: ndcg_at_5 value: 51.995000000000005 - type: precision_at_1 value: 35.66 - type: precision_at_10 value: 9.27 - type: precision_at_100 value: 1.1520000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 21.63 - type: precision_at_5 value: 15.655 - type: recall_at_1 value: 31.595000000000002 - type: recall_at_10 value: 77.704 - type: recall_at_100 value: 94.774 - type: recall_at_1000 value: 98.919 - type: recall_at_3 value: 56.052 - type: recall_at_5 value: 66.623 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.489 - type: map_at_10 value: 85.411 - type: map_at_100 value: 86.048 - type: map_at_1000 value: 86.064 - type: map_at_3 value: 82.587 - type: map_at_5 value: 84.339 - type: mrr_at_1 value: 82.28 - type: mrr_at_10 value: 88.27199999999999 - type: mrr_at_100 value: 88.362 - type: mrr_at_1000 value: 88.362 - type: mrr_at_3 value: 87.372 - type: mrr_at_5 value: 87.995 - type: ndcg_at_1 value: 82.27 - type: ndcg_at_10 value: 89.023 - type: ndcg_at_100 value: 90.191 - type: ndcg_at_1000 value: 90.266 - type: ndcg_at_3 value: 86.37 - type: ndcg_at_5 value: 87.804 - type: precision_at_1 value: 82.27 - type: precision_at_10 value: 13.469000000000001 - type: precision_at_100 value: 1.533 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.797 - type: precision_at_5 value: 24.734 - type: recall_at_1 value: 71.489 - type: recall_at_10 value: 95.824 - type: recall_at_100 value: 99.70599999999999 - type: recall_at_1000 value: 99.979 - type: recall_at_3 value: 88.099 - type: recall_at_5 value: 92.285 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 60.52398807444541 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 65.34855891507871 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.188000000000001 - type: map_at_10 value: 13.987 - type: map_at_100 value: 16.438 - type: map_at_1000 value: 16.829 - type: map_at_3 value: 9.767000000000001 - type: map_at_5 value: 11.912 - type: mrr_at_1 value: 25.6 - type: mrr_at_10 value: 37.744 - type: mrr_at_100 value: 38.847 - type: mrr_at_1000 value: 38.894 - type: mrr_at_3 value: 34.166999999999994 - type: mrr_at_5 value: 36.207 - type: ndcg_at_1 value: 25.6 - type: ndcg_at_10 value: 22.980999999999998 - type: ndcg_at_100 value: 32.039 - type: ndcg_at_1000 value: 38.157000000000004 - type: ndcg_at_3 value: 21.567 - type: ndcg_at_5 value: 19.070999999999998 - type: precision_at_1 value: 25.6 - type: precision_at_10 value: 12.02 - type: precision_at_100 value: 2.5100000000000002 - type: precision_at_1000 value: 0.396 - type: precision_at_3 value: 20.333000000000002 - type: precision_at_5 value: 16.98 - type: recall_at_1 value: 5.188000000000001 - type: recall_at_10 value: 24.372 - type: recall_at_100 value: 50.934999999999995 - type: recall_at_1000 value: 80.477 - type: recall_at_3 value: 12.363 - type: recall_at_5 value: 17.203 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 87.24286275535398 - type: cos_sim_spearman value: 82.62333770991818 - type: euclidean_pearson value: 84.60353717637284 - type: euclidean_spearman value: 82.32990108810047 - type: manhattan_pearson value: 84.6089049738196 - type: manhattan_spearman value: 82.33361785438936 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.87428858503165 - type: cos_sim_spearman value: 79.09145886519929 - type: euclidean_pearson value: 86.42669231664036 - type: euclidean_spearman value: 80.03127375435449 - type: manhattan_pearson value: 86.41330338305022 - type: manhattan_spearman value: 80.02492538673368 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.67912277322645 - type: cos_sim_spearman value: 89.6171319711762 - type: euclidean_pearson value: 86.56571917398725 - type: euclidean_spearman value: 87.71216907898948 - type: manhattan_pearson value: 86.57459050182473 - type: manhattan_spearman value: 87.71916648349993 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 86.71957379085862 - type: cos_sim_spearman value: 85.01784075851465 - type: euclidean_pearson value: 84.7407848472801 - type: euclidean_spearman value: 84.61063091345538 - type: manhattan_pearson value: 84.71494352494403 - type: manhattan_spearman value: 84.58772077604254 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.40508326325175 - type: cos_sim_spearman value: 89.50912897763186 - type: euclidean_pearson value: 87.82349070086627 - type: euclidean_spearman value: 88.44179162727521 - type: manhattan_pearson value: 87.80181927025595 - type: manhattan_spearman value: 88.43205129636243 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.35846741715478 - type: cos_sim_spearman value: 86.61172476741842 - type: euclidean_pearson value: 84.60123125491637 - type: euclidean_spearman value: 85.3001948141827 - type: manhattan_pearson value: 84.56231142658329 - type: manhattan_spearman value: 85.23579900798813 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.94539129818824 - type: cos_sim_spearman value: 88.99349064256742 - type: euclidean_pearson value: 88.7142444640351 - type: euclidean_spearman value: 88.34120813505011 - type: manhattan_pearson value: 88.70363008238084 - type: manhattan_spearman value: 88.31952816956954 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 68.29910260369893 - type: cos_sim_spearman value: 68.79263346213466 - type: euclidean_pearson value: 68.41627521422252 - type: euclidean_spearman value: 66.61602587398579 - type: manhattan_pearson value: 68.49402183447361 - type: manhattan_spearman value: 66.80157792354453 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.43703906343708 - type: cos_sim_spearman value: 89.06081805093662 - type: euclidean_pearson value: 87.48311456299662 - type: euclidean_spearman value: 88.07417597580013 - type: manhattan_pearson value: 87.48202249768894 - type: manhattan_spearman value: 88.04758031111642 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.49080620485203 - type: mrr value: 96.19145378949301 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 59.317 - type: map_at_10 value: 69.296 - type: map_at_100 value: 69.738 - type: map_at_1000 value: 69.759 - type: map_at_3 value: 66.12599999999999 - type: map_at_5 value: 67.532 - type: mrr_at_1 value: 62 - type: mrr_at_10 value: 70.176 - type: mrr_at_100 value: 70.565 - type: mrr_at_1000 value: 70.583 - type: mrr_at_3 value: 67.833 - type: mrr_at_5 value: 68.93299999999999 - type: ndcg_at_1 value: 62 - type: ndcg_at_10 value: 74.069 - type: ndcg_at_100 value: 76.037 - type: ndcg_at_1000 value: 76.467 - type: ndcg_at_3 value: 68.628 - type: ndcg_at_5 value: 70.57600000000001 - type: precision_at_1 value: 62 - type: precision_at_10 value: 10 - type: precision_at_100 value: 1.097 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.667 - type: precision_at_5 value: 17.4 - type: recall_at_1 value: 59.317 - type: recall_at_10 value: 87.822 - type: recall_at_100 value: 96.833 - type: recall_at_1000 value: 100 - type: recall_at_3 value: 73.06099999999999 - type: recall_at_5 value: 77.928 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.88910891089108 - type: cos_sim_ap value: 97.236958456951 - type: cos_sim_f1 value: 94.39999999999999 - type: cos_sim_precision value: 94.39999999999999 - type: cos_sim_recall value: 94.39999999999999 - type: dot_accuracy value: 99.82574257425742 - type: dot_ap value: 94.94344759441888 - type: dot_f1 value: 91.17352056168507 - type: dot_precision value: 91.44869215291752 - type: dot_recall value: 90.9 - type: euclidean_accuracy value: 99.88415841584158 - type: euclidean_ap value: 97.2044250782305 - type: euclidean_f1 value: 94.210786739238 - type: euclidean_precision value: 93.24191968658178 - type: euclidean_recall value: 95.19999999999999 - type: manhattan_accuracy value: 99.88613861386139 - type: manhattan_ap value: 97.20683205497689 - type: manhattan_f1 value: 94.2643391521197 - type: manhattan_precision value: 94.02985074626866 - type: manhattan_recall value: 94.5 - type: max_accuracy value: 99.88910891089108 - type: max_ap value: 97.236958456951 - type: max_f1 value: 94.39999999999999 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.53940781726187 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.71865011295108 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.3218674533331 - type: mrr value: 56.28279910449028 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.723915667479673 - type: cos_sim_spearman value: 32.029070449745234 - type: dot_pearson value: 28.864944212481454 - type: dot_spearman value: 27.939266999596725 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.231 - type: map_at_10 value: 1.949 - type: map_at_100 value: 10.023 - type: map_at_1000 value: 23.485 - type: map_at_3 value: 0.652 - type: map_at_5 value: 1.054 - type: mrr_at_1 value: 86 - type: mrr_at_10 value: 92.067 - type: mrr_at_100 value: 92.067 - type: mrr_at_1000 value: 92.067 - type: mrr_at_3 value: 91.667 - type: mrr_at_5 value: 92.067 - type: ndcg_at_1 value: 83 - type: ndcg_at_10 value: 76.32900000000001 - type: ndcg_at_100 value: 54.662 - type: ndcg_at_1000 value: 48.062 - type: ndcg_at_3 value: 81.827 - type: ndcg_at_5 value: 80.664 - type: precision_at_1 value: 86 - type: precision_at_10 value: 80 - type: precision_at_100 value: 55.48 - type: precision_at_1000 value: 20.938000000000002 - type: precision_at_3 value: 85.333 - type: precision_at_5 value: 84.39999999999999 - type: recall_at_1 value: 0.231 - type: recall_at_10 value: 2.158 - type: recall_at_100 value: 13.344000000000001 - type: recall_at_1000 value: 44.31 - type: recall_at_3 value: 0.6779999999999999 - type: recall_at_5 value: 1.13 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.524 - type: map_at_10 value: 10.183 - type: map_at_100 value: 16.625 - type: map_at_1000 value: 18.017 - type: map_at_3 value: 5.169 - type: map_at_5 value: 6.772 - type: mrr_at_1 value: 32.653 - type: mrr_at_10 value: 47.128 - type: mrr_at_100 value: 48.458 - type: mrr_at_1000 value: 48.473 - type: mrr_at_3 value: 44.897999999999996 - type: mrr_at_5 value: 45.306000000000004 - type: ndcg_at_1 value: 30.612000000000002 - type: ndcg_at_10 value: 24.928 - type: ndcg_at_100 value: 37.613 - type: ndcg_at_1000 value: 48.528 - type: ndcg_at_3 value: 28.829 - type: ndcg_at_5 value: 25.237 - type: precision_at_1 value: 32.653 - type: precision_at_10 value: 22.448999999999998 - type: precision_at_100 value: 8.02 - type: precision_at_1000 value: 1.537 - type: precision_at_3 value: 30.612000000000002 - type: precision_at_5 value: 24.490000000000002 - type: recall_at_1 value: 2.524 - type: recall_at_10 value: 16.38 - type: recall_at_100 value: 49.529 - type: recall_at_1000 value: 83.598 - type: recall_at_3 value: 6.411 - type: recall_at_5 value: 8.932 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.09020000000001 - type: ap value: 14.451710060978993 - type: f1 value: 54.7874410609049 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.745331069609506 - type: f1 value: 60.08387848592697 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 51.71549485462037 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.39345532574357 - type: cos_sim_ap value: 78.16796549696478 - type: cos_sim_f1 value: 71.27713276123171 - type: cos_sim_precision value: 68.3115626511853 - type: cos_sim_recall value: 74.51187335092348 - type: dot_accuracy value: 85.12248912201228 - type: dot_ap value: 69.26039256107077 - type: dot_f1 value: 65.04294321240867 - type: dot_precision value: 63.251059586138126 - type: dot_recall value: 66.93931398416886 - type: euclidean_accuracy value: 87.07754664123503 - type: euclidean_ap value: 77.7872176038945 - type: euclidean_f1 value: 70.85587801278899 - type: euclidean_precision value: 66.3519115614924 - type: euclidean_recall value: 76.01583113456465 - type: manhattan_accuracy value: 87.07754664123503 - type: manhattan_ap value: 77.7341400185556 - type: manhattan_f1 value: 70.80310880829015 - type: manhattan_precision value: 69.54198473282443 - type: manhattan_recall value: 72.1108179419525 - type: max_accuracy value: 87.39345532574357 - type: max_ap value: 78.16796549696478 - type: max_f1 value: 71.27713276123171 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.09457833663213 - type: cos_sim_ap value: 86.33024314706873 - type: cos_sim_f1 value: 78.59623733719248 - type: cos_sim_precision value: 74.13322413322413 - type: cos_sim_recall value: 83.63104404065291 - type: dot_accuracy value: 88.3086894089339 - type: dot_ap value: 83.92225241805097 - type: dot_f1 value: 76.8721826377781 - type: dot_precision value: 72.8168044077135 - type: dot_recall value: 81.40591315060055 - type: euclidean_accuracy value: 88.77052043311213 - type: euclidean_ap value: 85.7410710218755 - type: euclidean_f1 value: 77.97705489398781 - type: euclidean_precision value: 73.77713657598241 - type: euclidean_recall value: 82.68401601478288 - type: manhattan_accuracy value: 88.73753250281368 - type: manhattan_ap value: 85.72867199072802 - type: manhattan_f1 value: 77.89774182922812 - type: manhattan_precision value: 74.23787931635857 - type: manhattan_recall value: 81.93717277486911 - type: max_accuracy value: 89.09457833663213 - type: max_ap value: 86.33024314706873 - type: max_f1 value: 78.59623733719248 --- # [Universal AnglE Embedding](https://github.com/SeanLee97/AnglE) 📢 `WhereIsAI/UAE-Large-V1` **is licensed under MIT. Feel free to use it in any scenario.** **If you use it for academic papers, you could cite us via 👉 [citation info](#citation).** **🤝 Follow us on:** - GitHub: https://github.com/SeanLee97/AnglE. - Preprint Paper: [AnglE-optimized Text Embeddings](https://arxiv.org/abs/2309.12871) - Conference Paper: [AoE: Angle-optimized Embeddings for Semantic Textual Similarity](https://aclanthology.org/2024.acl-long.101/) (ACL24) - **📘 Documentation**: https://angle.readthedocs.io/en/latest/index.html Welcome to using AnglE to train and infer powerful sentence embeddings. **🏆 Achievements** - 📅 May 16, 2024 | AnglE's paper is accepted by ACL 2024 Main Conference - 📅 Dec 4, 2024 | 🔥 Our universal English sentence embedding `WhereIsAI/UAE-Large-V1` achieves **SOTA** on the [MTEB Leaderboard](https://huggingface.co/spaces/mteb/leaderboard) with an average score of 64.64! ![image/jpeg](https://cdn-uploads.huggingface.co/production/uploads/635cc29de7aef2358a9b03ee/jY3tr0DCMdyJXOihSqJFr.jpeg) **🧑‍🤝‍🧑 Siblings:** - [WhereIsAI/UAE-Code-Large-V1](https://huggingface.co/WhereIsAI/UAE-Code-Large-V1): This model can be used for code or GitHub issue similarity measurement. # Usage ## 1. angle_emb ```bash python -m pip install -U angle-emb ``` 1) Non-Retrieval Tasks There is no need to specify any prompts. ```python from angle_emb import AnglE from angle_emb.utils import cosine_similarity angle = AnglE.from_pretrained('WhereIsAI/UAE-Large-V1', pooling_strategy='cls').cuda() doc_vecs = angle.encode([ 'The weather is great!', 'The weather is very good!', 'i am going to bed' ], normalize_embedding=True) for i, dv1 in enumerate(doc_vecs): for dv2 in doc_vecs[i+1:]: print(cosine_similarity(dv1, dv2)) ``` 2) Retrieval Tasks For retrieval purposes, please use the prompt `Prompts.C` for query (not for document). ```python from angle_emb import AnglE, Prompts from angle_emb.utils import cosine_similarity angle = AnglE.from_pretrained('WhereIsAI/UAE-Large-V1', pooling_strategy='cls').cuda() qv = angle.encode(Prompts.C.format(text='what is the weather?')) doc_vecs = angle.encode([ 'The weather is great!', 'it is rainy today.', 'i am going to bed' ]) for dv in doc_vecs: print(cosine_similarity(qv[0], dv)) ``` ## 2. sentence transformer ```python from angle_emb import Prompts from sentence_transformers import SentenceTransformer model = SentenceTransformer("WhereIsAI/UAE-Large-V1").cuda() qv = model.encode(Prompts.C.format(text='what is the weather?')) doc_vecs = model.encode([ 'The weather is great!', 'it is rainy today.', 'i am going to bed' ]) for dv in doc_vecs: print(1 - spatial.distance.cosine(qv, dv)) ``` ## 3. Infinity [Infinity](https://github.com/michaelfeil/infinity) is a MIT licensed server for OpenAI-compatible deployment. ``` docker run --gpus all -v $PWD/data:/app/.cache -p "7997":"7997" \ michaelf34/infinity:latest \ v2 --model-id WhereIsAI/UAE-Large-V1 --revision "369c368f70f16a613f19f5598d4f12d9f44235d4" --dtype float16 --batch-size 32 --device cuda --engine torch --port 7997 ``` # Citation If you use our pre-trained models, welcome to support us by citing our work: ``` @article{li2023angle, title={AnglE-optimized Text Embeddings}, author={Li, Xianming and Li, Jing}, journal={arXiv preprint arXiv:2309.12871}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
patrickjohncyh/fashion-clip
patrickjohncyh
zero-shot-image-classification
[ "transformers", "pytorch", "onnx", "safetensors", "clip", "zero-shot-image-classification", "vision", "language", "fashion", "ecommerce", "en", "license:mit", "endpoints_compatible", "region:us" ]
"2023-02-21T19:51:47Z"
2024-09-17T15:19:43+00:00
4,122,169
211
--- language: - en library_name: transformers license: mit tags: - vision - language - fashion - ecommerce widget: - src: https://cdn-images.farfetch-contents.com/19/76/05/56/19760556_44221665_1000.jpg candidate_labels: black shoe, red shoe, a cat example_title: Black Shoe --- [![Youtube Video](https://img.shields.io/badge/youtube-video-red)](https://www.youtube.com/watch?v=uqRSc-KSA1Y) [![HuggingFace Model](https://img.shields.io/badge/HF%20Model-Weights-yellow)](https://huggingface.co/patrickjohncyh/fashion-clip) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1Z1hAxBnWjF76bEi9KQ6CMBBEmI_FVDrW?usp=sharing) [![Medium Blog Post](https://raw.githubusercontent.com/aleen42/badges/master/src/medium.svg)](https://towardsdatascience.com/teaching-clip-some-fashion-3005ac3fdcc3) [![Open in Streamlit](https://static.streamlit.io/badges/streamlit_badge_black_white.svg)](https://huggingface.co/spaces/vinid/fashion-clip-app) # Model Card: Fashion CLIP Disclaimer: The model card adapts the model card from [here](https://huggingface.co/openai/clip-vit-base-patch32). ## Model Details UPDATE (10/03/23): We have updated the model! We found that [laion/CLIP-ViT-B-32-laion2B-s34B-b79K](https://huggingface.co/laion/CLIP-ViT-B-32-laion2B-s34B-b79K) checkpoint (thanks [Bin](https://www.linkedin.com/in/bin-duan-56205310/)!) worked better than original OpenAI CLIP on Fashion. We thus fine-tune a newer (and better!) version of FashionCLIP (henceforth FashionCLIP 2.0), while keeping the architecture the same. We postulate that the perofrmance gains afforded by `laion/CLIP-ViT-B-32-laion2B-s34B-b79K` are due to the increased training data (5x OpenAI CLIP data). Our [thesis](https://www.nature.com/articles/s41598-022-23052-9), however, remains the same -- fine-tuning `laion/CLIP` on our fashion dataset improved zero-shot perofrmance across our benchmarks. See the below table comparing weighted macro F1 score across models. | Model | FMNIST | KAGL | DEEP | | ------------- | ------------- | ------------- | ------------- | | OpenAI CLIP | 0.66 | 0.63 | 0.45 | | FashionCLIP | 0.74 | 0.67 | 0.48 | | Laion CLIP | 0.78 | 0.71 | 0.58 | | FashionCLIP 2.0 | __0.83__ | __0.73__ | __0.62__ | --- FashionCLIP is a CLIP-based model developed to produce general product representations for fashion concepts. Leveraging the pre-trained checkpoint (ViT-B/32) released by [OpenAI](https://github.com/openai/CLIP), we train FashionCLIP on a large, high-quality novel fashion dataset to study whether domain specific fine-tuning of CLIP-like models is sufficient to produce product representations that are zero-shot transferable to entirely new datasets and tasks. FashionCLIP was not developed for model deplyoment - to do so, researchers will first need to carefully study their capabilities in relation to the specific context they’re being deployed within. ### Model Date March 2023 ### Model Type The model uses a ViT-B/32 Transformer architecture as an image encoder and uses a masked self-attention Transformer as a text encoder. These encoders are trained, starting from a pre-trained checkpoint, to maximize the similarity of (image, text) pairs via a contrastive loss on a fashion dataset containing 800K products. ### Documents - [FashionCLIP Github Repo](https://github.com/patrickjohncyh/fashion-clip) - [FashionCLIP Paper](https://www.nature.com/articles/s41598-022-23052-9) ## Data The model was trained on (image, text) pairs obtained from the Farfecth dataset[^1 Awaiting official release.], an English dataset comprising over 800K fashion products, with more than 3K brands across dozens of object types. The image used for encoding is the standard product image, which is a picture of the item over a white background, with no humans. The text used is a concatenation of the _highlight_ (e.g., “stripes”, “long sleeves”, “Armani”) and _short description_ (“80s styled t-shirt”)) available in the Farfetch dataset. ## Limitations, Bias and Fiarness We acknowledge certain limitations of FashionCLIP and expect that it inherits certain limitations and biases present in the original CLIP model. We do not expect our fine-tuning to significantly augment these limitations: we acknowledge that the fashion data we use makes explicit assumptions about the notion of gender as in "blue shoes for a woman" that inevitably associate aspects of clothing with specific people. Our investigations also suggest that the data used introduces certain limitations in FashionCLIP. From the textual modality, given that most captions derived from the Farfetch dataset are long, we observe that FashionCLIP may be more performant in longer queries than shorter ones. From the image modality, FashionCLIP is also biased towards standard product images (centered, white background). Model selection, i.e. selecting an appropariate stopping critera during fine-tuning, remains an open challenge. We observed that using loss on an in-domain (i.e. same distribution as test) validation dataset is a poor selection critera when out-of-domain generalization (i.e. across different datasets) is desired, even when the dataset used is relatively diverse and large. ## Citation ``` @Article{Chia2022, title="Contrastive language and vision learning of general fashion concepts", author="Chia, Patrick John and Attanasio, Giuseppe and Bianchi, Federico and Terragni, Silvia and Magalh{\~a}es, Ana Rita and Goncalves, Diogo and Greco, Ciro and Tagliabue, Jacopo", journal="Scientific Reports", year="2022", month="Nov", day="08", volume="12", number="1", abstract="The steady rise of online shopping goes hand in hand with the development of increasingly complex ML and NLP models. While most use cases are cast as specialized supervised learning problems, we argue that practitioners would greatly benefit from general and transferable representations of products. In this work, we build on recent developments in contrastive learning to train FashionCLIP, a CLIP-like model adapted for the fashion industry. We demonstrate the effectiveness of the representations learned by FashionCLIP with extensive tests across a variety of tasks, datasets and generalization probes. We argue that adaptations of large pre-trained models such as CLIP offer new perspectives in terms of scalability and sustainability for certain types of players in the industry. Finally, we detail the costs and environmental impact of training, and release the model weights and code as open source contribution to the community.", issn="2045-2322", doi="10.1038/s41598-022-23052-9", url="https://doi.org/10.1038/s41598-022-23052-9" } ```
[ "CHIA" ]
thenlper/gte-small
thenlper
sentence-similarity
[ "sentence-transformers", "pytorch", "tf", "coreml", "onnx", "safetensors", "openvino", "bert", "mteb", "sentence-similarity", "Sentence Transformers", "en", "arxiv:2308.03281", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-07-27T10:14:55Z"
2024-11-16T08:17:33+00:00
3,841,887
152
--- language: - en license: mit tags: - mteb - sentence-similarity - sentence-transformers - Sentence Transformers model-index: - name: gte-small results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.22388059701493 - type: ap value: 36.09895941426988 - type: f1 value: 67.3205651539195 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.81894999999999 - type: ap value: 88.5240138417305 - type: f1 value: 91.80367382706962 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.032 - type: f1 value: 47.4490665674719 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 30.725 - type: map_at_10 value: 46.604 - type: map_at_100 value: 47.535 - type: map_at_1000 value: 47.538000000000004 - type: map_at_3 value: 41.833 - type: map_at_5 value: 44.61 - type: mrr_at_1 value: 31.223 - type: mrr_at_10 value: 46.794000000000004 - type: mrr_at_100 value: 47.725 - type: mrr_at_1000 value: 47.727000000000004 - type: mrr_at_3 value: 42.07 - type: mrr_at_5 value: 44.812000000000005 - type: ndcg_at_1 value: 30.725 - type: ndcg_at_10 value: 55.440999999999995 - type: ndcg_at_100 value: 59.134 - type: ndcg_at_1000 value: 59.199 - type: ndcg_at_3 value: 45.599000000000004 - type: ndcg_at_5 value: 50.637 - type: precision_at_1 value: 30.725 - type: precision_at_10 value: 8.364 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 18.848000000000003 - type: precision_at_5 value: 13.77 - type: recall_at_1 value: 30.725 - type: recall_at_10 value: 83.64200000000001 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 56.543 - type: recall_at_5 value: 68.848 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.90178078197678 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 40.25728393431922 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.720297062897764 - type: mrr value: 75.24139295607439 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.43527309184616 - type: cos_sim_spearman value: 88.17128615100206 - type: euclidean_pearson value: 87.89922623089282 - type: euclidean_spearman value: 87.96104039655451 - type: manhattan_pearson value: 87.9818290932077 - type: manhattan_spearman value: 88.00923426576885 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.0844155844156 - type: f1 value: 84.01485017302213 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 38.36574769259432 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 35.4857033165287 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 30.261 - type: map_at_10 value: 42.419000000000004 - type: map_at_100 value: 43.927 - type: map_at_1000 value: 44.055 - type: map_at_3 value: 38.597 - type: map_at_5 value: 40.701 - type: mrr_at_1 value: 36.91 - type: mrr_at_10 value: 48.02 - type: mrr_at_100 value: 48.658 - type: mrr_at_1000 value: 48.708 - type: mrr_at_3 value: 44.945 - type: mrr_at_5 value: 46.705000000000005 - type: ndcg_at_1 value: 36.91 - type: ndcg_at_10 value: 49.353 - type: ndcg_at_100 value: 54.456 - type: ndcg_at_1000 value: 56.363 - type: ndcg_at_3 value: 43.483 - type: ndcg_at_5 value: 46.150999999999996 - type: precision_at_1 value: 36.91 - type: precision_at_10 value: 9.700000000000001 - type: precision_at_100 value: 1.557 - type: precision_at_1000 value: 0.202 - type: precision_at_3 value: 21.078 - type: precision_at_5 value: 15.421999999999999 - type: recall_at_1 value: 30.261 - type: recall_at_10 value: 63.242 - type: recall_at_100 value: 84.09100000000001 - type: recall_at_1000 value: 96.143 - type: recall_at_3 value: 46.478 - type: recall_at_5 value: 53.708 - type: map_at_1 value: 31.145 - type: map_at_10 value: 40.996 - type: map_at_100 value: 42.266999999999996 - type: map_at_1000 value: 42.397 - type: map_at_3 value: 38.005 - type: map_at_5 value: 39.628 - type: mrr_at_1 value: 38.344 - type: mrr_at_10 value: 46.827000000000005 - type: mrr_at_100 value: 47.446 - type: mrr_at_1000 value: 47.489 - type: mrr_at_3 value: 44.448 - type: mrr_at_5 value: 45.747 - type: ndcg_at_1 value: 38.344 - type: ndcg_at_10 value: 46.733000000000004 - type: ndcg_at_100 value: 51.103 - type: ndcg_at_1000 value: 53.075 - type: ndcg_at_3 value: 42.366 - type: ndcg_at_5 value: 44.242 - type: precision_at_1 value: 38.344 - type: precision_at_10 value: 8.822000000000001 - type: precision_at_100 value: 1.417 - type: precision_at_1000 value: 0.187 - type: precision_at_3 value: 20.403 - type: precision_at_5 value: 14.306 - type: recall_at_1 value: 31.145 - type: recall_at_10 value: 56.909 - type: recall_at_100 value: 75.274 - type: recall_at_1000 value: 87.629 - type: recall_at_3 value: 43.784 - type: recall_at_5 value: 49.338 - type: map_at_1 value: 38.83 - type: map_at_10 value: 51.553000000000004 - type: map_at_100 value: 52.581 - type: map_at_1000 value: 52.638 - type: map_at_3 value: 48.112 - type: map_at_5 value: 50.095 - type: mrr_at_1 value: 44.513999999999996 - type: mrr_at_10 value: 54.998000000000005 - type: mrr_at_100 value: 55.650999999999996 - type: mrr_at_1000 value: 55.679 - type: mrr_at_3 value: 52.602000000000004 - type: mrr_at_5 value: 53.931 - type: ndcg_at_1 value: 44.513999999999996 - type: ndcg_at_10 value: 57.67400000000001 - type: ndcg_at_100 value: 61.663999999999994 - type: ndcg_at_1000 value: 62.743 - type: ndcg_at_3 value: 51.964 - type: ndcg_at_5 value: 54.773 - type: precision_at_1 value: 44.513999999999996 - type: precision_at_10 value: 9.423 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 23.323 - type: precision_at_5 value: 16.163 - type: recall_at_1 value: 38.83 - type: recall_at_10 value: 72.327 - type: recall_at_100 value: 89.519 - type: recall_at_1000 value: 97.041 - type: recall_at_3 value: 57.206 - type: recall_at_5 value: 63.88399999999999 - type: map_at_1 value: 25.484 - type: map_at_10 value: 34.527 - type: map_at_100 value: 35.661 - type: map_at_1000 value: 35.739 - type: map_at_3 value: 32.199 - type: map_at_5 value: 33.632 - type: mrr_at_1 value: 27.458 - type: mrr_at_10 value: 36.543 - type: mrr_at_100 value: 37.482 - type: mrr_at_1000 value: 37.543 - type: mrr_at_3 value: 34.256 - type: mrr_at_5 value: 35.618 - type: ndcg_at_1 value: 27.458 - type: ndcg_at_10 value: 39.396 - type: ndcg_at_100 value: 44.742 - type: ndcg_at_1000 value: 46.708 - type: ndcg_at_3 value: 34.817 - type: ndcg_at_5 value: 37.247 - type: precision_at_1 value: 27.458 - type: precision_at_10 value: 5.976999999999999 - type: precision_at_100 value: 0.907 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 14.878 - type: precision_at_5 value: 10.35 - type: recall_at_1 value: 25.484 - type: recall_at_10 value: 52.317 - type: recall_at_100 value: 76.701 - type: recall_at_1000 value: 91.408 - type: recall_at_3 value: 40.043 - type: recall_at_5 value: 45.879 - type: map_at_1 value: 16.719 - type: map_at_10 value: 25.269000000000002 - type: map_at_100 value: 26.442 - type: map_at_1000 value: 26.557 - type: map_at_3 value: 22.56 - type: map_at_5 value: 24.082 - type: mrr_at_1 value: 20.896 - type: mrr_at_10 value: 29.982999999999997 - type: mrr_at_100 value: 30.895 - type: mrr_at_1000 value: 30.961 - type: mrr_at_3 value: 27.239 - type: mrr_at_5 value: 28.787000000000003 - type: ndcg_at_1 value: 20.896 - type: ndcg_at_10 value: 30.814000000000004 - type: ndcg_at_100 value: 36.418 - type: ndcg_at_1000 value: 39.182 - type: ndcg_at_3 value: 25.807999999999996 - type: ndcg_at_5 value: 28.143 - type: precision_at_1 value: 20.896 - type: precision_at_10 value: 5.821 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 12.562000000000001 - type: precision_at_5 value: 9.254 - type: recall_at_1 value: 16.719 - type: recall_at_10 value: 43.155 - type: recall_at_100 value: 67.831 - type: recall_at_1000 value: 87.617 - type: recall_at_3 value: 29.259 - type: recall_at_5 value: 35.260999999999996 - type: map_at_1 value: 29.398999999999997 - type: map_at_10 value: 39.876 - type: map_at_100 value: 41.205999999999996 - type: map_at_1000 value: 41.321999999999996 - type: map_at_3 value: 36.588 - type: map_at_5 value: 38.538 - type: mrr_at_1 value: 35.9 - type: mrr_at_10 value: 45.528 - type: mrr_at_100 value: 46.343 - type: mrr_at_1000 value: 46.388 - type: mrr_at_3 value: 42.862 - type: mrr_at_5 value: 44.440000000000005 - type: ndcg_at_1 value: 35.9 - type: ndcg_at_10 value: 45.987 - type: ndcg_at_100 value: 51.370000000000005 - type: ndcg_at_1000 value: 53.400000000000006 - type: ndcg_at_3 value: 40.841 - type: ndcg_at_5 value: 43.447 - type: precision_at_1 value: 35.9 - type: precision_at_10 value: 8.393 - type: precision_at_100 value: 1.283 - type: precision_at_1000 value: 0.166 - type: precision_at_3 value: 19.538 - type: precision_at_5 value: 13.975000000000001 - type: recall_at_1 value: 29.398999999999997 - type: recall_at_10 value: 58.361 - type: recall_at_100 value: 81.081 - type: recall_at_1000 value: 94.004 - type: recall_at_3 value: 43.657000000000004 - type: recall_at_5 value: 50.519999999999996 - type: map_at_1 value: 21.589 - type: map_at_10 value: 31.608999999999998 - type: map_at_100 value: 33.128 - type: map_at_1000 value: 33.247 - type: map_at_3 value: 28.671999999999997 - type: map_at_5 value: 30.233999999999998 - type: mrr_at_1 value: 26.712000000000003 - type: mrr_at_10 value: 36.713 - type: mrr_at_100 value: 37.713 - type: mrr_at_1000 value: 37.771 - type: mrr_at_3 value: 34.075 - type: mrr_at_5 value: 35.451 - type: ndcg_at_1 value: 26.712000000000003 - type: ndcg_at_10 value: 37.519999999999996 - type: ndcg_at_100 value: 43.946000000000005 - type: ndcg_at_1000 value: 46.297 - type: ndcg_at_3 value: 32.551 - type: ndcg_at_5 value: 34.660999999999994 - type: precision_at_1 value: 26.712000000000003 - type: precision_at_10 value: 7.066 - type: precision_at_100 value: 1.216 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 15.906 - type: precision_at_5 value: 11.437999999999999 - type: recall_at_1 value: 21.589 - type: recall_at_10 value: 50.090999999999994 - type: recall_at_100 value: 77.43900000000001 - type: recall_at_1000 value: 93.35900000000001 - type: recall_at_3 value: 36.028999999999996 - type: recall_at_5 value: 41.698 - type: map_at_1 value: 25.121666666666663 - type: map_at_10 value: 34.46258333333334 - type: map_at_100 value: 35.710499999999996 - type: map_at_1000 value: 35.82691666666666 - type: map_at_3 value: 31.563249999999996 - type: map_at_5 value: 33.189750000000004 - type: mrr_at_1 value: 29.66441666666667 - type: mrr_at_10 value: 38.5455 - type: mrr_at_100 value: 39.39566666666667 - type: mrr_at_1000 value: 39.45325 - type: mrr_at_3 value: 36.003333333333345 - type: mrr_at_5 value: 37.440916666666666 - type: ndcg_at_1 value: 29.66441666666667 - type: ndcg_at_10 value: 39.978416666666675 - type: ndcg_at_100 value: 45.278666666666666 - type: ndcg_at_1000 value: 47.52275 - type: ndcg_at_3 value: 35.00058333333334 - type: ndcg_at_5 value: 37.34908333333333 - type: precision_at_1 value: 29.66441666666667 - type: precision_at_10 value: 7.094500000000001 - type: precision_at_100 value: 1.1523333333333332 - type: precision_at_1000 value: 0.15358333333333332 - type: precision_at_3 value: 16.184166666666663 - type: precision_at_5 value: 11.6005 - type: recall_at_1 value: 25.121666666666663 - type: recall_at_10 value: 52.23975000000001 - type: recall_at_100 value: 75.48408333333333 - type: recall_at_1000 value: 90.95316666666668 - type: recall_at_3 value: 38.38458333333333 - type: recall_at_5 value: 44.39933333333333 - type: map_at_1 value: 23.569000000000003 - type: map_at_10 value: 30.389 - type: map_at_100 value: 31.396 - type: map_at_1000 value: 31.493 - type: map_at_3 value: 28.276 - type: map_at_5 value: 29.459000000000003 - type: mrr_at_1 value: 26.534000000000002 - type: mrr_at_10 value: 33.217999999999996 - type: mrr_at_100 value: 34.054 - type: mrr_at_1000 value: 34.12 - type: mrr_at_3 value: 31.058000000000003 - type: mrr_at_5 value: 32.330999999999996 - type: ndcg_at_1 value: 26.534000000000002 - type: ndcg_at_10 value: 34.608 - type: ndcg_at_100 value: 39.391999999999996 - type: ndcg_at_1000 value: 41.837999999999994 - type: ndcg_at_3 value: 30.564999999999998 - type: ndcg_at_5 value: 32.509 - type: precision_at_1 value: 26.534000000000002 - type: precision_at_10 value: 5.414 - type: precision_at_100 value: 0.847 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 12.986 - type: precision_at_5 value: 9.202 - type: recall_at_1 value: 23.569000000000003 - type: recall_at_10 value: 44.896 - type: recall_at_100 value: 66.476 - type: recall_at_1000 value: 84.548 - type: recall_at_3 value: 33.79 - type: recall_at_5 value: 38.512 - type: map_at_1 value: 16.36 - type: map_at_10 value: 23.57 - type: map_at_100 value: 24.698999999999998 - type: map_at_1000 value: 24.834999999999997 - type: map_at_3 value: 21.093 - type: map_at_5 value: 22.418 - type: mrr_at_1 value: 19.718 - type: mrr_at_10 value: 27.139999999999997 - type: mrr_at_100 value: 28.097 - type: mrr_at_1000 value: 28.177999999999997 - type: mrr_at_3 value: 24.805 - type: mrr_at_5 value: 26.121 - type: ndcg_at_1 value: 19.718 - type: ndcg_at_10 value: 28.238999999999997 - type: ndcg_at_100 value: 33.663 - type: ndcg_at_1000 value: 36.763 - type: ndcg_at_3 value: 23.747 - type: ndcg_at_5 value: 25.796000000000003 - type: precision_at_1 value: 19.718 - type: precision_at_10 value: 5.282 - type: precision_at_100 value: 0.9390000000000001 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 11.264000000000001 - type: precision_at_5 value: 8.341 - type: recall_at_1 value: 16.36 - type: recall_at_10 value: 38.669 - type: recall_at_100 value: 63.184 - type: recall_at_1000 value: 85.33800000000001 - type: recall_at_3 value: 26.214 - type: recall_at_5 value: 31.423000000000002 - type: map_at_1 value: 25.618999999999996 - type: map_at_10 value: 34.361999999999995 - type: map_at_100 value: 35.534 - type: map_at_1000 value: 35.634 - type: map_at_3 value: 31.402 - type: map_at_5 value: 32.815 - type: mrr_at_1 value: 30.037000000000003 - type: mrr_at_10 value: 38.284 - type: mrr_at_100 value: 39.141999999999996 - type: mrr_at_1000 value: 39.2 - type: mrr_at_3 value: 35.603 - type: mrr_at_5 value: 36.867 - type: ndcg_at_1 value: 30.037000000000003 - type: ndcg_at_10 value: 39.87 - type: ndcg_at_100 value: 45.243 - type: ndcg_at_1000 value: 47.507 - type: ndcg_at_3 value: 34.371 - type: ndcg_at_5 value: 36.521 - type: precision_at_1 value: 30.037000000000003 - type: precision_at_10 value: 6.819 - type: precision_at_100 value: 1.0699999999999998 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 15.392 - type: precision_at_5 value: 10.821 - type: recall_at_1 value: 25.618999999999996 - type: recall_at_10 value: 52.869 - type: recall_at_100 value: 76.395 - type: recall_at_1000 value: 92.19500000000001 - type: recall_at_3 value: 37.943 - type: recall_at_5 value: 43.342999999999996 - type: map_at_1 value: 23.283 - type: map_at_10 value: 32.155 - type: map_at_100 value: 33.724 - type: map_at_1000 value: 33.939 - type: map_at_3 value: 29.018 - type: map_at_5 value: 30.864000000000004 - type: mrr_at_1 value: 28.063 - type: mrr_at_10 value: 36.632 - type: mrr_at_100 value: 37.606 - type: mrr_at_1000 value: 37.671 - type: mrr_at_3 value: 33.992 - type: mrr_at_5 value: 35.613 - type: ndcg_at_1 value: 28.063 - type: ndcg_at_10 value: 38.024 - type: ndcg_at_100 value: 44.292 - type: ndcg_at_1000 value: 46.818 - type: ndcg_at_3 value: 32.965 - type: ndcg_at_5 value: 35.562 - type: precision_at_1 value: 28.063 - type: precision_at_10 value: 7.352 - type: precision_at_100 value: 1.514 - type: precision_at_1000 value: 0.23800000000000002 - type: precision_at_3 value: 15.481 - type: precision_at_5 value: 11.542 - type: recall_at_1 value: 23.283 - type: recall_at_10 value: 49.756 - type: recall_at_100 value: 78.05 - type: recall_at_1000 value: 93.854 - type: recall_at_3 value: 35.408 - type: recall_at_5 value: 42.187000000000005 - type: map_at_1 value: 19.201999999999998 - type: map_at_10 value: 26.826 - type: map_at_100 value: 27.961000000000002 - type: map_at_1000 value: 28.066999999999997 - type: map_at_3 value: 24.237000000000002 - type: map_at_5 value: 25.811 - type: mrr_at_1 value: 20.887 - type: mrr_at_10 value: 28.660000000000004 - type: mrr_at_100 value: 29.660999999999998 - type: mrr_at_1000 value: 29.731 - type: mrr_at_3 value: 26.155 - type: mrr_at_5 value: 27.68 - type: ndcg_at_1 value: 20.887 - type: ndcg_at_10 value: 31.523 - type: ndcg_at_100 value: 37.055 - type: ndcg_at_1000 value: 39.579 - type: ndcg_at_3 value: 26.529000000000003 - type: ndcg_at_5 value: 29.137 - type: precision_at_1 value: 20.887 - type: precision_at_10 value: 5.065 - type: precision_at_100 value: 0.856 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 11.399 - type: precision_at_5 value: 8.392 - type: recall_at_1 value: 19.201999999999998 - type: recall_at_10 value: 44.285000000000004 - type: recall_at_100 value: 69.768 - type: recall_at_1000 value: 88.302 - type: recall_at_3 value: 30.804 - type: recall_at_5 value: 37.039 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 11.244 - type: map_at_10 value: 18.956 - type: map_at_100 value: 20.674 - type: map_at_1000 value: 20.863 - type: map_at_3 value: 15.923000000000002 - type: map_at_5 value: 17.518 - type: mrr_at_1 value: 25.080999999999996 - type: mrr_at_10 value: 35.94 - type: mrr_at_100 value: 36.969 - type: mrr_at_1000 value: 37.013 - type: mrr_at_3 value: 32.617000000000004 - type: mrr_at_5 value: 34.682 - type: ndcg_at_1 value: 25.080999999999996 - type: ndcg_at_10 value: 26.539 - type: ndcg_at_100 value: 33.601 - type: ndcg_at_1000 value: 37.203 - type: ndcg_at_3 value: 21.695999999999998 - type: ndcg_at_5 value: 23.567 - type: precision_at_1 value: 25.080999999999996 - type: precision_at_10 value: 8.143 - type: precision_at_100 value: 1.5650000000000002 - type: precision_at_1000 value: 0.22300000000000003 - type: precision_at_3 value: 15.983 - type: precision_at_5 value: 12.417 - type: recall_at_1 value: 11.244 - type: recall_at_10 value: 31.457 - type: recall_at_100 value: 55.92 - type: recall_at_1000 value: 76.372 - type: recall_at_3 value: 19.784 - type: recall_at_5 value: 24.857000000000003 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.595 - type: map_at_10 value: 18.75 - type: map_at_100 value: 26.354 - type: map_at_1000 value: 27.912 - type: map_at_3 value: 13.794 - type: map_at_5 value: 16.021 - type: mrr_at_1 value: 65.75 - type: mrr_at_10 value: 73.837 - type: mrr_at_100 value: 74.22800000000001 - type: mrr_at_1000 value: 74.234 - type: mrr_at_3 value: 72.5 - type: mrr_at_5 value: 73.387 - type: ndcg_at_1 value: 52.625 - type: ndcg_at_10 value: 39.101 - type: ndcg_at_100 value: 43.836000000000006 - type: ndcg_at_1000 value: 51.086 - type: ndcg_at_3 value: 44.229 - type: ndcg_at_5 value: 41.555 - type: precision_at_1 value: 65.75 - type: precision_at_10 value: 30.45 - type: precision_at_100 value: 9.81 - type: precision_at_1000 value: 2.045 - type: precision_at_3 value: 48.667 - type: precision_at_5 value: 40.8 - type: recall_at_1 value: 8.595 - type: recall_at_10 value: 24.201 - type: recall_at_100 value: 50.096 - type: recall_at_1000 value: 72.677 - type: recall_at_3 value: 15.212 - type: recall_at_5 value: 18.745 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 46.565 - type: f1 value: 41.49914329345582 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 66.60000000000001 - type: map_at_10 value: 76.838 - type: map_at_100 value: 77.076 - type: map_at_1000 value: 77.09 - type: map_at_3 value: 75.545 - type: map_at_5 value: 76.39 - type: mrr_at_1 value: 71.707 - type: mrr_at_10 value: 81.514 - type: mrr_at_100 value: 81.64099999999999 - type: mrr_at_1000 value: 81.645 - type: mrr_at_3 value: 80.428 - type: mrr_at_5 value: 81.159 - type: ndcg_at_1 value: 71.707 - type: ndcg_at_10 value: 81.545 - type: ndcg_at_100 value: 82.477 - type: ndcg_at_1000 value: 82.73899999999999 - type: ndcg_at_3 value: 79.292 - type: ndcg_at_5 value: 80.599 - type: precision_at_1 value: 71.707 - type: precision_at_10 value: 10.035 - type: precision_at_100 value: 1.068 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 30.918 - type: precision_at_5 value: 19.328 - type: recall_at_1 value: 66.60000000000001 - type: recall_at_10 value: 91.353 - type: recall_at_100 value: 95.21 - type: recall_at_1000 value: 96.89999999999999 - type: recall_at_3 value: 85.188 - type: recall_at_5 value: 88.52 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 19.338 - type: map_at_10 value: 31.752000000000002 - type: map_at_100 value: 33.516 - type: map_at_1000 value: 33.694 - type: map_at_3 value: 27.716 - type: map_at_5 value: 29.67 - type: mrr_at_1 value: 38.117000000000004 - type: mrr_at_10 value: 47.323 - type: mrr_at_100 value: 48.13 - type: mrr_at_1000 value: 48.161 - type: mrr_at_3 value: 45.062000000000005 - type: mrr_at_5 value: 46.358 - type: ndcg_at_1 value: 38.117000000000004 - type: ndcg_at_10 value: 39.353 - type: ndcg_at_100 value: 46.044000000000004 - type: ndcg_at_1000 value: 49.083 - type: ndcg_at_3 value: 35.891 - type: ndcg_at_5 value: 36.661 - type: precision_at_1 value: 38.117000000000004 - type: precision_at_10 value: 11.187999999999999 - type: precision_at_100 value: 1.802 - type: precision_at_1000 value: 0.234 - type: precision_at_3 value: 24.126 - type: precision_at_5 value: 17.562 - type: recall_at_1 value: 19.338 - type: recall_at_10 value: 45.735 - type: recall_at_100 value: 71.281 - type: recall_at_1000 value: 89.537 - type: recall_at_3 value: 32.525 - type: recall_at_5 value: 37.671 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 36.995 - type: map_at_10 value: 55.032000000000004 - type: map_at_100 value: 55.86 - type: map_at_1000 value: 55.932 - type: map_at_3 value: 52.125 - type: map_at_5 value: 53.884 - type: mrr_at_1 value: 73.991 - type: mrr_at_10 value: 80.096 - type: mrr_at_100 value: 80.32000000000001 - type: mrr_at_1000 value: 80.331 - type: mrr_at_3 value: 79.037 - type: mrr_at_5 value: 79.719 - type: ndcg_at_1 value: 73.991 - type: ndcg_at_10 value: 63.786 - type: ndcg_at_100 value: 66.78 - type: ndcg_at_1000 value: 68.255 - type: ndcg_at_3 value: 59.501000000000005 - type: ndcg_at_5 value: 61.82299999999999 - type: precision_at_1 value: 73.991 - type: precision_at_10 value: 13.157 - type: precision_at_100 value: 1.552 - type: precision_at_1000 value: 0.17500000000000002 - type: precision_at_3 value: 37.519999999999996 - type: precision_at_5 value: 24.351 - type: recall_at_1 value: 36.995 - type: recall_at_10 value: 65.78699999999999 - type: recall_at_100 value: 77.583 - type: recall_at_1000 value: 87.421 - type: recall_at_3 value: 56.279999999999994 - type: recall_at_5 value: 60.878 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 86.80239999999999 - type: ap value: 81.97305141128378 - type: f1 value: 86.76976305549273 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.166 - type: map_at_10 value: 33.396 - type: map_at_100 value: 34.588 - type: map_at_1000 value: 34.637 - type: map_at_3 value: 29.509999999999998 - type: map_at_5 value: 31.719 - type: mrr_at_1 value: 21.762 - type: mrr_at_10 value: 33.969 - type: mrr_at_100 value: 35.099000000000004 - type: mrr_at_1000 value: 35.141 - type: mrr_at_3 value: 30.148000000000003 - type: mrr_at_5 value: 32.324000000000005 - type: ndcg_at_1 value: 21.776999999999997 - type: ndcg_at_10 value: 40.306999999999995 - type: ndcg_at_100 value: 46.068 - type: ndcg_at_1000 value: 47.3 - type: ndcg_at_3 value: 32.416 - type: ndcg_at_5 value: 36.345 - type: precision_at_1 value: 21.776999999999997 - type: precision_at_10 value: 6.433 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.897 - type: precision_at_5 value: 10.324 - type: recall_at_1 value: 21.166 - type: recall_at_10 value: 61.587 - type: recall_at_100 value: 88.251 - type: recall_at_1000 value: 97.727 - type: recall_at_3 value: 40.196 - type: recall_at_5 value: 49.611 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.04605563155496 - type: f1 value: 92.78007303978372 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 69.65116279069767 - type: f1 value: 52.75775172527262 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.34633490248822 - type: f1 value: 68.15345065392562 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.63887020847343 - type: f1 value: 76.08074680233685 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.77933406071333 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.06504927238196 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.20682480490871 - type: mrr value: 33.41462721527003 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.548 - type: map_at_10 value: 13.086999999999998 - type: map_at_100 value: 16.698 - type: map_at_1000 value: 18.151999999999997 - type: map_at_3 value: 9.576 - type: map_at_5 value: 11.175 - type: mrr_at_1 value: 44.272 - type: mrr_at_10 value: 53.635999999999996 - type: mrr_at_100 value: 54.228 - type: mrr_at_1000 value: 54.26499999999999 - type: mrr_at_3 value: 51.754 - type: mrr_at_5 value: 53.086 - type: ndcg_at_1 value: 42.724000000000004 - type: ndcg_at_10 value: 34.769 - type: ndcg_at_100 value: 32.283 - type: ndcg_at_1000 value: 40.843 - type: ndcg_at_3 value: 39.852 - type: ndcg_at_5 value: 37.858999999999995 - type: precision_at_1 value: 44.272 - type: precision_at_10 value: 26.068 - type: precision_at_100 value: 8.328000000000001 - type: precision_at_1000 value: 2.1 - type: precision_at_3 value: 37.874 - type: precision_at_5 value: 33.065 - type: recall_at_1 value: 5.548 - type: recall_at_10 value: 16.936999999999998 - type: recall_at_100 value: 33.72 - type: recall_at_1000 value: 64.348 - type: recall_at_3 value: 10.764999999999999 - type: recall_at_5 value: 13.361 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 28.008 - type: map_at_10 value: 42.675000000000004 - type: map_at_100 value: 43.85 - type: map_at_1000 value: 43.884 - type: map_at_3 value: 38.286 - type: map_at_5 value: 40.78 - type: mrr_at_1 value: 31.518 - type: mrr_at_10 value: 45.015 - type: mrr_at_100 value: 45.924 - type: mrr_at_1000 value: 45.946999999999996 - type: mrr_at_3 value: 41.348 - type: mrr_at_5 value: 43.428 - type: ndcg_at_1 value: 31.489 - type: ndcg_at_10 value: 50.285999999999994 - type: ndcg_at_100 value: 55.291999999999994 - type: ndcg_at_1000 value: 56.05 - type: ndcg_at_3 value: 41.976 - type: ndcg_at_5 value: 46.103 - type: precision_at_1 value: 31.489 - type: precision_at_10 value: 8.456 - type: precision_at_100 value: 1.125 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 19.09 - type: precision_at_5 value: 13.841000000000001 - type: recall_at_1 value: 28.008 - type: recall_at_10 value: 71.21499999999999 - type: recall_at_100 value: 92.99 - type: recall_at_1000 value: 98.578 - type: recall_at_3 value: 49.604 - type: recall_at_5 value: 59.094 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.351 - type: map_at_10 value: 84.163 - type: map_at_100 value: 84.785 - type: map_at_1000 value: 84.801 - type: map_at_3 value: 81.16 - type: map_at_5 value: 83.031 - type: mrr_at_1 value: 80.96 - type: mrr_at_10 value: 87.241 - type: mrr_at_100 value: 87.346 - type: mrr_at_1000 value: 87.347 - type: mrr_at_3 value: 86.25699999999999 - type: mrr_at_5 value: 86.907 - type: ndcg_at_1 value: 80.97 - type: ndcg_at_10 value: 88.017 - type: ndcg_at_100 value: 89.241 - type: ndcg_at_1000 value: 89.34299999999999 - type: ndcg_at_3 value: 85.053 - type: ndcg_at_5 value: 86.663 - type: precision_at_1 value: 80.97 - type: precision_at_10 value: 13.358 - type: precision_at_100 value: 1.525 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.143 - type: precision_at_5 value: 24.451999999999998 - type: recall_at_1 value: 70.351 - type: recall_at_10 value: 95.39800000000001 - type: recall_at_100 value: 99.55199999999999 - type: recall_at_1000 value: 99.978 - type: recall_at_3 value: 86.913 - type: recall_at_5 value: 91.448 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 55.62406719814139 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 61.386700035141736 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.618 - type: map_at_10 value: 12.920000000000002 - type: map_at_100 value: 15.304 - type: map_at_1000 value: 15.656999999999998 - type: map_at_3 value: 9.187 - type: map_at_5 value: 10.937 - type: mrr_at_1 value: 22.8 - type: mrr_at_10 value: 35.13 - type: mrr_at_100 value: 36.239 - type: mrr_at_1000 value: 36.291000000000004 - type: mrr_at_3 value: 31.917 - type: mrr_at_5 value: 33.787 - type: ndcg_at_1 value: 22.8 - type: ndcg_at_10 value: 21.382 - type: ndcg_at_100 value: 30.257 - type: ndcg_at_1000 value: 36.001 - type: ndcg_at_3 value: 20.43 - type: ndcg_at_5 value: 17.622 - type: precision_at_1 value: 22.8 - type: precision_at_10 value: 11.26 - type: precision_at_100 value: 2.405 - type: precision_at_1000 value: 0.377 - type: precision_at_3 value: 19.633 - type: precision_at_5 value: 15.68 - type: recall_at_1 value: 4.618 - type: recall_at_10 value: 22.811999999999998 - type: recall_at_100 value: 48.787000000000006 - type: recall_at_1000 value: 76.63799999999999 - type: recall_at_3 value: 11.952 - type: recall_at_5 value: 15.892000000000001 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.01529458252244 - type: cos_sim_spearman value: 77.92985224770254 - type: euclidean_pearson value: 81.04251429422487 - type: euclidean_spearman value: 77.92838490549133 - type: manhattan_pearson value: 80.95892251458979 - type: manhattan_spearman value: 77.81028089705941 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.97885282534388 - type: cos_sim_spearman value: 75.1221970851712 - type: euclidean_pearson value: 80.34455956720097 - type: euclidean_spearman value: 74.5894274239938 - type: manhattan_pearson value: 80.38999766325465 - type: manhattan_spearman value: 74.68524557166975 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 82.95746064915672 - type: cos_sim_spearman value: 85.08683458043946 - type: euclidean_pearson value: 84.56699492836385 - type: euclidean_spearman value: 85.66089116133713 - type: manhattan_pearson value: 84.47553323458541 - type: manhattan_spearman value: 85.56142206781472 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 82.71377893595067 - type: cos_sim_spearman value: 81.03453291428589 - type: euclidean_pearson value: 82.57136298308613 - type: euclidean_spearman value: 81.15839961890875 - type: manhattan_pearson value: 82.55157879373837 - type: manhattan_spearman value: 81.1540163767054 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.64197832372373 - type: cos_sim_spearman value: 88.31966852492485 - type: euclidean_pearson value: 87.98692129976983 - type: euclidean_spearman value: 88.6247340837856 - type: manhattan_pearson value: 87.90437827826412 - type: manhattan_spearman value: 88.56278787131457 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 81.84159950146693 - type: cos_sim_spearman value: 83.90678384140168 - type: euclidean_pearson value: 83.19005018860221 - type: euclidean_spearman value: 84.16260415876295 - type: manhattan_pearson value: 83.05030612994494 - type: manhattan_spearman value: 83.99605629718336 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.49935350176666 - type: cos_sim_spearman value: 87.59086606735383 - type: euclidean_pearson value: 88.06537181129983 - type: euclidean_spearman value: 87.6687448086014 - type: manhattan_pearson value: 87.96599131972935 - type: manhattan_spearman value: 87.63295748969642 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.68232799482763 - type: cos_sim_spearman value: 67.99930378085793 - type: euclidean_pearson value: 68.50275360001696 - type: euclidean_spearman value: 67.81588179309259 - type: manhattan_pearson value: 68.5892154749763 - type: manhattan_spearman value: 67.84357259640682 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.37049618406554 - type: cos_sim_spearman value: 85.57014313159492 - type: euclidean_pearson value: 85.57469513908282 - type: euclidean_spearman value: 85.661948135258 - type: manhattan_pearson value: 85.36866831229028 - type: manhattan_spearman value: 85.5043455368843 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 84.83259065376154 - type: mrr value: 95.58455433455433 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 58.817 - type: map_at_10 value: 68.459 - type: map_at_100 value: 68.951 - type: map_at_1000 value: 68.979 - type: map_at_3 value: 65.791 - type: map_at_5 value: 67.583 - type: mrr_at_1 value: 61.667 - type: mrr_at_10 value: 69.368 - type: mrr_at_100 value: 69.721 - type: mrr_at_1000 value: 69.744 - type: mrr_at_3 value: 67.278 - type: mrr_at_5 value: 68.611 - type: ndcg_at_1 value: 61.667 - type: ndcg_at_10 value: 72.70100000000001 - type: ndcg_at_100 value: 74.928 - type: ndcg_at_1000 value: 75.553 - type: ndcg_at_3 value: 68.203 - type: ndcg_at_5 value: 70.804 - type: precision_at_1 value: 61.667 - type: precision_at_10 value: 9.533 - type: precision_at_100 value: 1.077 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.444000000000003 - type: precision_at_5 value: 17.599999999999998 - type: recall_at_1 value: 58.817 - type: recall_at_10 value: 84.789 - type: recall_at_100 value: 95.0 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 72.8 - type: recall_at_5 value: 79.294 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.8108910891089 - type: cos_sim_ap value: 95.5743678558349 - type: cos_sim_f1 value: 90.43133366385722 - type: cos_sim_precision value: 89.67551622418878 - type: cos_sim_recall value: 91.2 - type: dot_accuracy value: 99.75841584158415 - type: dot_ap value: 94.00786363627253 - type: dot_f1 value: 87.51910341314316 - type: dot_precision value: 89.20041536863967 - type: dot_recall value: 85.9 - type: euclidean_accuracy value: 99.81485148514851 - type: euclidean_ap value: 95.4752113136905 - type: euclidean_f1 value: 90.44334975369456 - type: euclidean_precision value: 89.126213592233 - type: euclidean_recall value: 91.8 - type: manhattan_accuracy value: 99.81584158415842 - type: manhattan_ap value: 95.5163172682464 - type: manhattan_f1 value: 90.51987767584097 - type: manhattan_precision value: 92.3076923076923 - type: manhattan_recall value: 88.8 - type: max_accuracy value: 99.81584158415842 - type: max_ap value: 95.5743678558349 - type: max_f1 value: 90.51987767584097 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 62.63235986949449 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.334795589585575 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.02955214518782 - type: mrr value: 52.8004838298956 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.63769566275453 - type: cos_sim_spearman value: 30.422379185989335 - type: dot_pearson value: 26.88493071882256 - type: dot_spearman value: 26.505249740971305 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.21 - type: map_at_10 value: 1.654 - type: map_at_100 value: 10.095 - type: map_at_1000 value: 25.808999999999997 - type: map_at_3 value: 0.594 - type: map_at_5 value: 0.9289999999999999 - type: mrr_at_1 value: 78.0 - type: mrr_at_10 value: 87.019 - type: mrr_at_100 value: 87.019 - type: mrr_at_1000 value: 87.019 - type: mrr_at_3 value: 86.333 - type: mrr_at_5 value: 86.733 - type: ndcg_at_1 value: 73.0 - type: ndcg_at_10 value: 66.52900000000001 - type: ndcg_at_100 value: 53.433 - type: ndcg_at_1000 value: 51.324000000000005 - type: ndcg_at_3 value: 72.02199999999999 - type: ndcg_at_5 value: 69.696 - type: precision_at_1 value: 78.0 - type: precision_at_10 value: 70.39999999999999 - type: precision_at_100 value: 55.46 - type: precision_at_1000 value: 22.758 - type: precision_at_3 value: 76.667 - type: precision_at_5 value: 74.0 - type: recall_at_1 value: 0.21 - type: recall_at_10 value: 1.8849999999999998 - type: recall_at_100 value: 13.801 - type: recall_at_1000 value: 49.649 - type: recall_at_3 value: 0.632 - type: recall_at_5 value: 1.009 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.797 - type: map_at_10 value: 9.01 - type: map_at_100 value: 14.682 - type: map_at_1000 value: 16.336000000000002 - type: map_at_3 value: 4.546 - type: map_at_5 value: 5.9270000000000005 - type: mrr_at_1 value: 24.490000000000002 - type: mrr_at_10 value: 41.156 - type: mrr_at_100 value: 42.392 - type: mrr_at_1000 value: 42.408 - type: mrr_at_3 value: 38.775999999999996 - type: mrr_at_5 value: 40.102 - type: ndcg_at_1 value: 21.429000000000002 - type: ndcg_at_10 value: 22.222 - type: ndcg_at_100 value: 34.405 - type: ndcg_at_1000 value: 46.599000000000004 - type: ndcg_at_3 value: 25.261 - type: ndcg_at_5 value: 22.695999999999998 - type: precision_at_1 value: 24.490000000000002 - type: precision_at_10 value: 19.796 - type: precision_at_100 value: 7.306 - type: precision_at_1000 value: 1.5350000000000001 - type: precision_at_3 value: 27.211000000000002 - type: precision_at_5 value: 22.857 - type: recall_at_1 value: 1.797 - type: recall_at_10 value: 15.706000000000001 - type: recall_at_100 value: 46.412 - type: recall_at_1000 value: 83.159 - type: recall_at_3 value: 6.1370000000000005 - type: recall_at_5 value: 8.599 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 70.3302 - type: ap value: 14.169121204575601 - type: f1 value: 54.229345975274235 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 58.22297679683077 - type: f1 value: 58.62984908377875 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.952922428464255 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 84.68140907194373 - type: cos_sim_ap value: 70.12180123666836 - type: cos_sim_f1 value: 65.77501791258658 - type: cos_sim_precision value: 60.07853403141361 - type: cos_sim_recall value: 72.66490765171504 - type: dot_accuracy value: 81.92167848840674 - type: dot_ap value: 60.49837581423469 - type: dot_f1 value: 58.44186046511628 - type: dot_precision value: 52.24532224532224 - type: dot_recall value: 66.3060686015831 - type: euclidean_accuracy value: 84.73505394289802 - type: euclidean_ap value: 70.3278904593286 - type: euclidean_f1 value: 65.98851124940161 - type: euclidean_precision value: 60.38107752956636 - type: euclidean_recall value: 72.74406332453826 - type: manhattan_accuracy value: 84.73505394289802 - type: manhattan_ap value: 70.00737738537337 - type: manhattan_f1 value: 65.80150784822642 - type: manhattan_precision value: 61.892583120204606 - type: manhattan_recall value: 70.23746701846966 - type: max_accuracy value: 84.73505394289802 - type: max_ap value: 70.3278904593286 - type: max_f1 value: 65.98851124940161 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.44258159661582 - type: cos_sim_ap value: 84.91926704880888 - type: cos_sim_f1 value: 77.07651086632926 - type: cos_sim_precision value: 74.5894554883319 - type: cos_sim_recall value: 79.73514012935017 - type: dot_accuracy value: 85.88116583226608 - type: dot_ap value: 78.9753854779923 - type: dot_f1 value: 72.17757637979255 - type: dot_precision value: 66.80647486729143 - type: dot_recall value: 78.48783492454572 - type: euclidean_accuracy value: 88.5299025885823 - type: euclidean_ap value: 85.08006075642194 - type: euclidean_f1 value: 77.29637336504163 - type: euclidean_precision value: 74.69836253950014 - type: euclidean_recall value: 80.08161379735141 - type: manhattan_accuracy value: 88.55124771995187 - type: manhattan_ap value: 85.00941529932851 - type: manhattan_f1 value: 77.33100233100232 - type: manhattan_precision value: 73.37572573956317 - type: manhattan_recall value: 81.73698798891284 - type: max_accuracy value: 88.55124771995187 - type: max_ap value: 85.08006075642194 - type: max_f1 value: 77.33100233100232 --- # gte-small General Text Embeddings (GTE) model. [Towards General Text Embeddings with Multi-stage Contrastive Learning](https://arxiv.org/abs/2308.03281) The GTE models are trained by Alibaba DAMO Academy. They are mainly based on the BERT framework and currently offer three different sizes of models, including [GTE-large](https://huggingface.co/thenlper/gte-large), [GTE-base](https://huggingface.co/thenlper/gte-base), and [GTE-small](https://huggingface.co/thenlper/gte-small). The GTE models are trained on a large-scale corpus of relevance text pairs, covering a wide range of domains and scenarios. This enables the GTE models to be applied to various downstream tasks of text embeddings, including **information retrieval**, **semantic textual similarity**, **text reranking**, etc. ## Metrics We compared the performance of the GTE models with other popular text embedding models on the MTEB benchmark. For more detailed comparison results, please refer to the [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). | Model Name | Model Size (GB) | Dimension | Sequence Length | Average (56) | Clustering (11) | Pair Classification (3) | Reranking (4) | Retrieval (15) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [**gte-large**](https://huggingface.co/thenlper/gte-large) | 0.67 | 1024 | 512 | **63.13** | 46.84 | 85.00 | 59.13 | 52.22 | 83.35 | 31.66 | 73.33 | | [**gte-base**](https://huggingface.co/thenlper/gte-base) | 0.22 | 768 | 512 | **62.39** | 46.2 | 84.57 | 58.61 | 51.14 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1.34 | 1024| 512 | 62.25 | 44.49 | 86.03 | 56.61 | 50.56 | 82.05 | 30.19 | 75.24 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.44 | 768 | 512 | 61.5 | 43.80 | 85.73 | 55.91 | 50.29 | 81.05 | 30.28 | 73.84 | | [**gte-small**](https://huggingface.co/thenlper/gte-small) | 0.07 | 384 | 512 | **61.36** | 44.89 | 83.54 | 57.7 | 49.46 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | - | 1536 | 8192 | 60.99 | 45.9 | 84.89 | 56.32 | 49.25 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.13 | 384 | 512 | 59.93 | 39.92 | 84.67 | 54.32 | 49.04 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 9.73 | 768 | 512 | 59.51 | 43.72 | 85.06 | 56.42 | 42.24 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 0.44 | 768 | 514 | 57.78 | 43.69 | 83.04 | 59.36 | 43.81 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 28.27 | 4096 | 2048 | 57.59 | 38.93 | 81.9 | 55.65 | 48.22 | 77.74 | 33.6 | 66.19 | | [all-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2) | 0.13 | 384 | 512 | 56.53 | 41.81 | 82.41 | 58.44 | 42.69 | 79.8 | 27.9 | 63.21 | | [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | 0.09 | 384 | 512 | 56.26 | 42.35 | 82.37 | 58.04 | 41.95 | 78.9 | 30.81 | 63.05 | | [contriever-base-msmarco](https://huggingface.co/nthakur/contriever-base-msmarco) | 0.44 | 768 | 512 | 56.00 | 41.1 | 82.54 | 53.14 | 41.88 | 76.51 | 30.36 | 66.68 | | [sentence-t5-base](https://huggingface.co/sentence-transformers/sentence-t5-base) | 0.22 | 768 | 512 | 55.27 | 40.21 | 85.18 | 53.09 | 33.63 | 81.14 | 31.39 | 69.81 | ## Usage Code example ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] tokenizer = AutoTokenizer.from_pretrained("thenlper/gte-small") model = AutoModel.from_pretrained("thenlper/gte-small") # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # (Optionally) normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) ``` Use with sentence-transformers: ```python from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim sentences = ['That is a happy person', 'That is a very happy person'] model = SentenceTransformer('thenlper/gte-large') embeddings = model.encode(sentences) print(cos_sim(embeddings[0], embeddings[1])) ``` ### Limitation This model exclusively caters to English texts, and any lengthy texts will be truncated to a maximum of 512 tokens. ### Citation If you find our paper or models helpful, please consider citing them as follows: ``` @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
Alibaba-NLP/gte-large-en-v1.5
Alibaba-NLP
sentence-similarity
[ "transformers", "onnx", "safetensors", "new", "feature-extraction", "sentence-transformers", "gte", "mteb", "transformers.js", "sentence-similarity", "custom_code", "en", "dataset:allenai/c4", "arxiv:2407.19669", "arxiv:2308.03281", "license:apache-2.0", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-04-20T02:54:30Z"
2024-08-09T03:32:05+00:00
3,819,623
204
--- datasets: - allenai/c4 language: - en library_name: transformers license: apache-2.0 tags: - sentence-transformers - gte - mteb - transformers.js - sentence-similarity model-index: - name: gte-large-en-v1.5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.01492537313432 - type: ap value: 35.05341696659522 - type: f1 value: 66.71270310883853 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.97189999999999 - type: ap value: 90.5952493948908 - type: f1 value: 93.95848137716877 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 54.196 - type: f1 value: 53.80122334012787 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 47.297 - type: map_at_10 value: 64.303 - type: map_at_100 value: 64.541 - type: map_at_1000 value: 64.541 - type: map_at_3 value: 60.728 - type: map_at_5 value: 63.114000000000004 - type: mrr_at_1 value: 48.435 - type: mrr_at_10 value: 64.657 - type: mrr_at_100 value: 64.901 - type: mrr_at_1000 value: 64.901 - type: mrr_at_3 value: 61.06 - type: mrr_at_5 value: 63.514 - type: ndcg_at_1 value: 47.297 - type: ndcg_at_10 value: 72.107 - type: ndcg_at_100 value: 72.963 - type: ndcg_at_1000 value: 72.963 - type: ndcg_at_3 value: 65.063 - type: ndcg_at_5 value: 69.352 - type: precision_at_1 value: 47.297 - type: precision_at_10 value: 9.623 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 25.865 - type: precision_at_5 value: 17.596 - type: recall_at_1 value: 47.297 - type: recall_at_10 value: 96.23 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 77.596 - type: recall_at_5 value: 87.98 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.467787861077475 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 43.39198391914257 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.12794820591384 - type: mrr value: 75.9331442641692 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 87.85062993863319 - type: cos_sim_spearman value: 85.39049989733459 - type: euclidean_pearson value: 86.00222680278333 - type: euclidean_spearman value: 85.45556162077396 - type: manhattan_pearson value: 85.88769871785621 - type: manhattan_spearman value: 85.11760211290839 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.32792207792208 - type: f1 value: 87.29132945999555 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.5779328301945 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 37.94425623865118 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 32.978 - type: map_at_10 value: 44.45 - type: map_at_100 value: 46.19 - type: map_at_1000 value: 46.303 - type: map_at_3 value: 40.849000000000004 - type: map_at_5 value: 42.55 - type: mrr_at_1 value: 40.629 - type: mrr_at_10 value: 50.848000000000006 - type: mrr_at_100 value: 51.669 - type: mrr_at_1000 value: 51.705 - type: mrr_at_3 value: 47.997 - type: mrr_at_5 value: 49.506 - type: ndcg_at_1 value: 40.629 - type: ndcg_at_10 value: 51.102000000000004 - type: ndcg_at_100 value: 57.159000000000006 - type: ndcg_at_1000 value: 58.669000000000004 - type: ndcg_at_3 value: 45.738 - type: ndcg_at_5 value: 47.632999999999996 - type: precision_at_1 value: 40.629 - type: precision_at_10 value: 9.700000000000001 - type: precision_at_100 value: 1.5970000000000002 - type: precision_at_1000 value: 0.202 - type: precision_at_3 value: 21.698 - type: precision_at_5 value: 15.393 - type: recall_at_1 value: 32.978 - type: recall_at_10 value: 63.711 - type: recall_at_100 value: 88.39399999999999 - type: recall_at_1000 value: 97.513 - type: recall_at_3 value: 48.025 - type: recall_at_5 value: 53.52 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 30.767 - type: map_at_10 value: 42.195 - type: map_at_100 value: 43.541999999999994 - type: map_at_1000 value: 43.673 - type: map_at_3 value: 38.561 - type: map_at_5 value: 40.532000000000004 - type: mrr_at_1 value: 38.79 - type: mrr_at_10 value: 48.021 - type: mrr_at_100 value: 48.735 - type: mrr_at_1000 value: 48.776 - type: mrr_at_3 value: 45.594 - type: mrr_at_5 value: 46.986 - type: ndcg_at_1 value: 38.79 - type: ndcg_at_10 value: 48.468 - type: ndcg_at_100 value: 53.037 - type: ndcg_at_1000 value: 55.001999999999995 - type: ndcg_at_3 value: 43.409 - type: ndcg_at_5 value: 45.654 - type: precision_at_1 value: 38.79 - type: precision_at_10 value: 9.452 - type: precision_at_100 value: 1.518 - type: precision_at_1000 value: 0.201 - type: precision_at_3 value: 21.21 - type: precision_at_5 value: 15.171999999999999 - type: recall_at_1 value: 30.767 - type: recall_at_10 value: 60.118 - type: recall_at_100 value: 79.271 - type: recall_at_1000 value: 91.43299999999999 - type: recall_at_3 value: 45.36 - type: recall_at_5 value: 51.705 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 40.007 - type: map_at_10 value: 53.529 - type: map_at_100 value: 54.602 - type: map_at_1000 value: 54.647 - type: map_at_3 value: 49.951 - type: map_at_5 value: 52.066 - type: mrr_at_1 value: 45.705 - type: mrr_at_10 value: 56.745000000000005 - type: mrr_at_100 value: 57.43899999999999 - type: mrr_at_1000 value: 57.462999999999994 - type: mrr_at_3 value: 54.25299999999999 - type: mrr_at_5 value: 55.842000000000006 - type: ndcg_at_1 value: 45.705 - type: ndcg_at_10 value: 59.809 - type: ndcg_at_100 value: 63.837999999999994 - type: ndcg_at_1000 value: 64.729 - type: ndcg_at_3 value: 53.994 - type: ndcg_at_5 value: 57.028 - type: precision_at_1 value: 45.705 - type: precision_at_10 value: 9.762 - type: precision_at_100 value: 1.275 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.368000000000002 - type: precision_at_5 value: 16.84 - type: recall_at_1 value: 40.007 - type: recall_at_10 value: 75.017 - type: recall_at_100 value: 91.99000000000001 - type: recall_at_1000 value: 98.265 - type: recall_at_3 value: 59.704 - type: recall_at_5 value: 67.109 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 26.639000000000003 - type: map_at_10 value: 35.926 - type: map_at_100 value: 37.126999999999995 - type: map_at_1000 value: 37.202 - type: map_at_3 value: 32.989000000000004 - type: map_at_5 value: 34.465 - type: mrr_at_1 value: 28.475 - type: mrr_at_10 value: 37.7 - type: mrr_at_100 value: 38.753 - type: mrr_at_1000 value: 38.807 - type: mrr_at_3 value: 35.066 - type: mrr_at_5 value: 36.512 - type: ndcg_at_1 value: 28.475 - type: ndcg_at_10 value: 41.245 - type: ndcg_at_100 value: 46.814 - type: ndcg_at_1000 value: 48.571 - type: ndcg_at_3 value: 35.528999999999996 - type: ndcg_at_5 value: 38.066 - type: precision_at_1 value: 28.475 - type: precision_at_10 value: 6.497 - type: precision_at_100 value: 0.9650000000000001 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 15.065999999999999 - type: precision_at_5 value: 10.599 - type: recall_at_1 value: 26.639000000000003 - type: recall_at_10 value: 55.759 - type: recall_at_100 value: 80.913 - type: recall_at_1000 value: 93.929 - type: recall_at_3 value: 40.454 - type: recall_at_5 value: 46.439 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 15.767999999999999 - type: map_at_10 value: 24.811 - type: map_at_100 value: 26.064999999999998 - type: map_at_1000 value: 26.186999999999998 - type: map_at_3 value: 21.736 - type: map_at_5 value: 23.283 - type: mrr_at_1 value: 19.527 - type: mrr_at_10 value: 29.179 - type: mrr_at_100 value: 30.153999999999996 - type: mrr_at_1000 value: 30.215999999999998 - type: mrr_at_3 value: 26.223000000000003 - type: mrr_at_5 value: 27.733999999999998 - type: ndcg_at_1 value: 19.527 - type: ndcg_at_10 value: 30.786 - type: ndcg_at_100 value: 36.644 - type: ndcg_at_1000 value: 39.440999999999995 - type: ndcg_at_3 value: 24.958 - type: ndcg_at_5 value: 27.392 - type: precision_at_1 value: 19.527 - type: precision_at_10 value: 5.995 - type: precision_at_100 value: 1.03 - type: precision_at_1000 value: 0.14100000000000001 - type: precision_at_3 value: 12.520999999999999 - type: precision_at_5 value: 9.129 - type: recall_at_1 value: 15.767999999999999 - type: recall_at_10 value: 44.824000000000005 - type: recall_at_100 value: 70.186 - type: recall_at_1000 value: 89.934 - type: recall_at_3 value: 28.607 - type: recall_at_5 value: 34.836 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 31.952 - type: map_at_10 value: 44.438 - type: map_at_100 value: 45.778 - type: map_at_1000 value: 45.883 - type: map_at_3 value: 41.044000000000004 - type: map_at_5 value: 42.986000000000004 - type: mrr_at_1 value: 39.172000000000004 - type: mrr_at_10 value: 49.76 - type: mrr_at_100 value: 50.583999999999996 - type: mrr_at_1000 value: 50.621 - type: mrr_at_3 value: 47.353 - type: mrr_at_5 value: 48.739 - type: ndcg_at_1 value: 39.172000000000004 - type: ndcg_at_10 value: 50.760000000000005 - type: ndcg_at_100 value: 56.084 - type: ndcg_at_1000 value: 57.865 - type: ndcg_at_3 value: 45.663 - type: ndcg_at_5 value: 48.178 - type: precision_at_1 value: 39.172000000000004 - type: precision_at_10 value: 9.22 - type: precision_at_100 value: 1.387 - type: precision_at_1000 value: 0.17099999999999999 - type: precision_at_3 value: 21.976000000000003 - type: precision_at_5 value: 15.457 - type: recall_at_1 value: 31.952 - type: recall_at_10 value: 63.900999999999996 - type: recall_at_100 value: 85.676 - type: recall_at_1000 value: 97.03699999999999 - type: recall_at_3 value: 49.781 - type: recall_at_5 value: 56.330000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 25.332 - type: map_at_10 value: 36.874 - type: map_at_100 value: 38.340999999999994 - type: map_at_1000 value: 38.452 - type: map_at_3 value: 33.068 - type: map_at_5 value: 35.324 - type: mrr_at_1 value: 30.822 - type: mrr_at_10 value: 41.641 - type: mrr_at_100 value: 42.519 - type: mrr_at_1000 value: 42.573 - type: mrr_at_3 value: 38.413000000000004 - type: mrr_at_5 value: 40.542 - type: ndcg_at_1 value: 30.822 - type: ndcg_at_10 value: 43.414 - type: ndcg_at_100 value: 49.196 - type: ndcg_at_1000 value: 51.237 - type: ndcg_at_3 value: 37.230000000000004 - type: ndcg_at_5 value: 40.405 - type: precision_at_1 value: 30.822 - type: precision_at_10 value: 8.379 - type: precision_at_100 value: 1.315 - type: precision_at_1000 value: 0.168 - type: precision_at_3 value: 18.417 - type: precision_at_5 value: 13.744 - type: recall_at_1 value: 25.332 - type: recall_at_10 value: 57.774 - type: recall_at_100 value: 82.071 - type: recall_at_1000 value: 95.60600000000001 - type: recall_at_3 value: 40.722 - type: recall_at_5 value: 48.754999999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 25.91033333333334 - type: map_at_10 value: 36.23225000000001 - type: map_at_100 value: 37.55766666666667 - type: map_at_1000 value: 37.672583333333336 - type: map_at_3 value: 32.95666666666667 - type: map_at_5 value: 34.73375 - type: mrr_at_1 value: 30.634 - type: mrr_at_10 value: 40.19449999999999 - type: mrr_at_100 value: 41.099250000000005 - type: mrr_at_1000 value: 41.15091666666667 - type: mrr_at_3 value: 37.4615 - type: mrr_at_5 value: 39.00216666666667 - type: ndcg_at_1 value: 30.634 - type: ndcg_at_10 value: 42.162166666666664 - type: ndcg_at_100 value: 47.60708333333333 - type: ndcg_at_1000 value: 49.68616666666666 - type: ndcg_at_3 value: 36.60316666666666 - type: ndcg_at_5 value: 39.15616666666668 - type: precision_at_1 value: 30.634 - type: precision_at_10 value: 7.6193333333333335 - type: precision_at_100 value: 1.2198333333333333 - type: precision_at_1000 value: 0.15975000000000003 - type: precision_at_3 value: 17.087 - type: precision_at_5 value: 12.298333333333334 - type: recall_at_1 value: 25.91033333333334 - type: recall_at_10 value: 55.67300000000001 - type: recall_at_100 value: 79.20608333333334 - type: recall_at_1000 value: 93.34866666666667 - type: recall_at_3 value: 40.34858333333333 - type: recall_at_5 value: 46.834083333333325 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 25.006 - type: map_at_10 value: 32.177 - type: map_at_100 value: 33.324999999999996 - type: map_at_1000 value: 33.419 - type: map_at_3 value: 29.952 - type: map_at_5 value: 31.095 - type: mrr_at_1 value: 28.066999999999997 - type: mrr_at_10 value: 34.995 - type: mrr_at_100 value: 35.978 - type: mrr_at_1000 value: 36.042 - type: mrr_at_3 value: 33.103 - type: mrr_at_5 value: 34.001 - type: ndcg_at_1 value: 28.066999999999997 - type: ndcg_at_10 value: 36.481 - type: ndcg_at_100 value: 42.022999999999996 - type: ndcg_at_1000 value: 44.377 - type: ndcg_at_3 value: 32.394 - type: ndcg_at_5 value: 34.108 - type: precision_at_1 value: 28.066999999999997 - type: precision_at_10 value: 5.736 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 13.804 - type: precision_at_5 value: 9.508999999999999 - type: recall_at_1 value: 25.006 - type: recall_at_10 value: 46.972 - type: recall_at_100 value: 72.138 - type: recall_at_1000 value: 89.479 - type: recall_at_3 value: 35.793 - type: recall_at_5 value: 39.947 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 16.07 - type: map_at_10 value: 24.447 - type: map_at_100 value: 25.685999999999996 - type: map_at_1000 value: 25.813999999999997 - type: map_at_3 value: 21.634 - type: map_at_5 value: 23.133 - type: mrr_at_1 value: 19.580000000000002 - type: mrr_at_10 value: 28.127999999999997 - type: mrr_at_100 value: 29.119 - type: mrr_at_1000 value: 29.192 - type: mrr_at_3 value: 25.509999999999998 - type: mrr_at_5 value: 26.878 - type: ndcg_at_1 value: 19.580000000000002 - type: ndcg_at_10 value: 29.804000000000002 - type: ndcg_at_100 value: 35.555 - type: ndcg_at_1000 value: 38.421 - type: ndcg_at_3 value: 24.654999999999998 - type: ndcg_at_5 value: 26.881 - type: precision_at_1 value: 19.580000000000002 - type: precision_at_10 value: 5.736 - type: precision_at_100 value: 1.005 - type: precision_at_1000 value: 0.145 - type: precision_at_3 value: 12.033000000000001 - type: precision_at_5 value: 8.871 - type: recall_at_1 value: 16.07 - type: recall_at_10 value: 42.364000000000004 - type: recall_at_100 value: 68.01899999999999 - type: recall_at_1000 value: 88.122 - type: recall_at_3 value: 27.846 - type: recall_at_5 value: 33.638 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 26.365 - type: map_at_10 value: 36.591 - type: map_at_100 value: 37.730000000000004 - type: map_at_1000 value: 37.84 - type: map_at_3 value: 33.403 - type: map_at_5 value: 35.272999999999996 - type: mrr_at_1 value: 30.503999999999998 - type: mrr_at_10 value: 39.940999999999995 - type: mrr_at_100 value: 40.818 - type: mrr_at_1000 value: 40.876000000000005 - type: mrr_at_3 value: 37.065 - type: mrr_at_5 value: 38.814 - type: ndcg_at_1 value: 30.503999999999998 - type: ndcg_at_10 value: 42.185 - type: ndcg_at_100 value: 47.416000000000004 - type: ndcg_at_1000 value: 49.705 - type: ndcg_at_3 value: 36.568 - type: ndcg_at_5 value: 39.416000000000004 - type: precision_at_1 value: 30.503999999999998 - type: precision_at_10 value: 7.276000000000001 - type: precision_at_100 value: 1.118 - type: precision_at_1000 value: 0.14300000000000002 - type: precision_at_3 value: 16.729 - type: precision_at_5 value: 12.107999999999999 - type: recall_at_1 value: 26.365 - type: recall_at_10 value: 55.616 - type: recall_at_100 value: 78.129 - type: recall_at_1000 value: 93.95599999999999 - type: recall_at_3 value: 40.686 - type: recall_at_5 value: 47.668 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 22.750999999999998 - type: map_at_10 value: 33.446 - type: map_at_100 value: 35.235 - type: map_at_1000 value: 35.478 - type: map_at_3 value: 29.358 - type: map_at_5 value: 31.525 - type: mrr_at_1 value: 27.668 - type: mrr_at_10 value: 37.694 - type: mrr_at_100 value: 38.732 - type: mrr_at_1000 value: 38.779 - type: mrr_at_3 value: 34.223 - type: mrr_at_5 value: 36.08 - type: ndcg_at_1 value: 27.668 - type: ndcg_at_10 value: 40.557 - type: ndcg_at_100 value: 46.605999999999995 - type: ndcg_at_1000 value: 48.917 - type: ndcg_at_3 value: 33.677 - type: ndcg_at_5 value: 36.85 - type: precision_at_1 value: 27.668 - type: precision_at_10 value: 8.3 - type: precision_at_100 value: 1.6260000000000001 - type: precision_at_1000 value: 0.253 - type: precision_at_3 value: 16.008 - type: precision_at_5 value: 12.292 - type: recall_at_1 value: 22.750999999999998 - type: recall_at_10 value: 55.643 - type: recall_at_100 value: 82.151 - type: recall_at_1000 value: 95.963 - type: recall_at_3 value: 36.623 - type: recall_at_5 value: 44.708 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 17.288999999999998 - type: map_at_10 value: 25.903 - type: map_at_100 value: 27.071 - type: map_at_1000 value: 27.173000000000002 - type: map_at_3 value: 22.935 - type: map_at_5 value: 24.573 - type: mrr_at_1 value: 18.669 - type: mrr_at_10 value: 27.682000000000002 - type: mrr_at_100 value: 28.691 - type: mrr_at_1000 value: 28.761 - type: mrr_at_3 value: 24.738 - type: mrr_at_5 value: 26.392 - type: ndcg_at_1 value: 18.669 - type: ndcg_at_10 value: 31.335 - type: ndcg_at_100 value: 36.913000000000004 - type: ndcg_at_1000 value: 39.300000000000004 - type: ndcg_at_3 value: 25.423000000000002 - type: ndcg_at_5 value: 28.262999999999998 - type: precision_at_1 value: 18.669 - type: precision_at_10 value: 5.379 - type: precision_at_100 value: 0.876 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 11.214 - type: precision_at_5 value: 8.466 - type: recall_at_1 value: 17.288999999999998 - type: recall_at_10 value: 46.377 - type: recall_at_100 value: 71.53500000000001 - type: recall_at_1000 value: 88.947 - type: recall_at_3 value: 30.581999999999997 - type: recall_at_5 value: 37.354 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 21.795 - type: map_at_10 value: 37.614999999999995 - type: map_at_100 value: 40.037 - type: map_at_1000 value: 40.184999999999995 - type: map_at_3 value: 32.221 - type: map_at_5 value: 35.154999999999994 - type: mrr_at_1 value: 50.358000000000004 - type: mrr_at_10 value: 62.129 - type: mrr_at_100 value: 62.613 - type: mrr_at_1000 value: 62.62 - type: mrr_at_3 value: 59.272999999999996 - type: mrr_at_5 value: 61.138999999999996 - type: ndcg_at_1 value: 50.358000000000004 - type: ndcg_at_10 value: 48.362 - type: ndcg_at_100 value: 55.932 - type: ndcg_at_1000 value: 58.062999999999995 - type: ndcg_at_3 value: 42.111 - type: ndcg_at_5 value: 44.063 - type: precision_at_1 value: 50.358000000000004 - type: precision_at_10 value: 14.677999999999999 - type: precision_at_100 value: 2.2950000000000004 - type: precision_at_1000 value: 0.271 - type: precision_at_3 value: 31.77 - type: precision_at_5 value: 23.375 - type: recall_at_1 value: 21.795 - type: recall_at_10 value: 53.846000000000004 - type: recall_at_100 value: 78.952 - type: recall_at_1000 value: 90.41900000000001 - type: recall_at_3 value: 37.257 - type: recall_at_5 value: 44.661 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.728 - type: map_at_10 value: 22.691 - type: map_at_100 value: 31.734 - type: map_at_1000 value: 33.464 - type: map_at_3 value: 16.273 - type: map_at_5 value: 19.016 - type: mrr_at_1 value: 73.25 - type: mrr_at_10 value: 80.782 - type: mrr_at_100 value: 81.01899999999999 - type: mrr_at_1000 value: 81.021 - type: mrr_at_3 value: 79.583 - type: mrr_at_5 value: 80.146 - type: ndcg_at_1 value: 59.62499999999999 - type: ndcg_at_10 value: 46.304 - type: ndcg_at_100 value: 51.23 - type: ndcg_at_1000 value: 58.048 - type: ndcg_at_3 value: 51.541000000000004 - type: ndcg_at_5 value: 48.635 - type: precision_at_1 value: 73.25 - type: precision_at_10 value: 36.375 - type: precision_at_100 value: 11.53 - type: precision_at_1000 value: 2.23 - type: precision_at_3 value: 55.583000000000006 - type: precision_at_5 value: 47.15 - type: recall_at_1 value: 9.728 - type: recall_at_10 value: 28.793999999999997 - type: recall_at_100 value: 57.885 - type: recall_at_1000 value: 78.759 - type: recall_at_3 value: 17.79 - type: recall_at_5 value: 21.733 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 46.775 - type: f1 value: 41.89794273264891 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 85.378 - type: map_at_10 value: 91.51 - type: map_at_100 value: 91.666 - type: map_at_1000 value: 91.676 - type: map_at_3 value: 90.757 - type: map_at_5 value: 91.277 - type: mrr_at_1 value: 91.839 - type: mrr_at_10 value: 95.49 - type: mrr_at_100 value: 95.493 - type: mrr_at_1000 value: 95.493 - type: mrr_at_3 value: 95.345 - type: mrr_at_5 value: 95.47200000000001 - type: ndcg_at_1 value: 91.839 - type: ndcg_at_10 value: 93.806 - type: ndcg_at_100 value: 94.255 - type: ndcg_at_1000 value: 94.399 - type: ndcg_at_3 value: 93.027 - type: ndcg_at_5 value: 93.51 - type: precision_at_1 value: 91.839 - type: precision_at_10 value: 10.93 - type: precision_at_100 value: 1.1400000000000001 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 34.873 - type: precision_at_5 value: 21.44 - type: recall_at_1 value: 85.378 - type: recall_at_10 value: 96.814 - type: recall_at_100 value: 98.386 - type: recall_at_1000 value: 99.21600000000001 - type: recall_at_3 value: 94.643 - type: recall_at_5 value: 95.976 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 32.190000000000005 - type: map_at_10 value: 53.605000000000004 - type: map_at_100 value: 55.550999999999995 - type: map_at_1000 value: 55.665 - type: map_at_3 value: 46.62 - type: map_at_5 value: 50.517999999999994 - type: mrr_at_1 value: 60.34 - type: mrr_at_10 value: 70.775 - type: mrr_at_100 value: 71.238 - type: mrr_at_1000 value: 71.244 - type: mrr_at_3 value: 68.72399999999999 - type: mrr_at_5 value: 69.959 - type: ndcg_at_1 value: 60.34 - type: ndcg_at_10 value: 63.226000000000006 - type: ndcg_at_100 value: 68.60300000000001 - type: ndcg_at_1000 value: 69.901 - type: ndcg_at_3 value: 58.048 - type: ndcg_at_5 value: 59.789 - type: precision_at_1 value: 60.34 - type: precision_at_10 value: 17.130000000000003 - type: precision_at_100 value: 2.29 - type: precision_at_1000 value: 0.256 - type: precision_at_3 value: 38.323 - type: precision_at_5 value: 27.87 - type: recall_at_1 value: 32.190000000000005 - type: recall_at_10 value: 73.041 - type: recall_at_100 value: 91.31 - type: recall_at_1000 value: 98.104 - type: recall_at_3 value: 53.70399999999999 - type: recall_at_5 value: 62.358999999999995 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 43.511 - type: map_at_10 value: 58.15 - type: map_at_100 value: 58.95399999999999 - type: map_at_1000 value: 59.018 - type: map_at_3 value: 55.31700000000001 - type: map_at_5 value: 57.04900000000001 - type: mrr_at_1 value: 87.022 - type: mrr_at_10 value: 91.32000000000001 - type: mrr_at_100 value: 91.401 - type: mrr_at_1000 value: 91.403 - type: mrr_at_3 value: 90.77 - type: mrr_at_5 value: 91.156 - type: ndcg_at_1 value: 87.022 - type: ndcg_at_10 value: 68.183 - type: ndcg_at_100 value: 70.781 - type: ndcg_at_1000 value: 72.009 - type: ndcg_at_3 value: 64.334 - type: ndcg_at_5 value: 66.449 - type: precision_at_1 value: 87.022 - type: precision_at_10 value: 13.406 - type: precision_at_100 value: 1.542 - type: precision_at_1000 value: 0.17099999999999999 - type: precision_at_3 value: 39.023 - type: precision_at_5 value: 25.080000000000002 - type: recall_at_1 value: 43.511 - type: recall_at_10 value: 67.02900000000001 - type: recall_at_100 value: 77.11 - type: recall_at_1000 value: 85.294 - type: recall_at_3 value: 58.535000000000004 - type: recall_at_5 value: 62.70099999999999 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 92.0996 - type: ap value: 87.86206089096373 - type: f1 value: 92.07554547510763 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 23.179 - type: map_at_10 value: 35.86 - type: map_at_100 value: 37.025999999999996 - type: map_at_1000 value: 37.068 - type: map_at_3 value: 31.921 - type: map_at_5 value: 34.172000000000004 - type: mrr_at_1 value: 23.926 - type: mrr_at_10 value: 36.525999999999996 - type: mrr_at_100 value: 37.627 - type: mrr_at_1000 value: 37.665 - type: mrr_at_3 value: 32.653 - type: mrr_at_5 value: 34.897 - type: ndcg_at_1 value: 23.910999999999998 - type: ndcg_at_10 value: 42.927 - type: ndcg_at_100 value: 48.464 - type: ndcg_at_1000 value: 49.533 - type: ndcg_at_3 value: 34.910000000000004 - type: ndcg_at_5 value: 38.937 - type: precision_at_1 value: 23.910999999999998 - type: precision_at_10 value: 6.758 - type: precision_at_100 value: 0.9520000000000001 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.838000000000001 - type: precision_at_5 value: 10.934000000000001 - type: recall_at_1 value: 23.179 - type: recall_at_10 value: 64.622 - type: recall_at_100 value: 90.135 - type: recall_at_1000 value: 98.301 - type: recall_at_3 value: 42.836999999999996 - type: recall_at_5 value: 52.512 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.59598723210215 - type: f1 value: 96.41913500001952 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.89557683538533 - type: f1 value: 63.379319722356264 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 78.93745796906524 - type: f1 value: 75.71616541785902 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 81.41223940820443 - type: f1 value: 81.2877893719078 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 35.03682528325662 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.942529406124 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.459949660460317 - type: mrr value: 32.70509582031616 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 6.497 - type: map_at_10 value: 13.843 - type: map_at_100 value: 17.713 - type: map_at_1000 value: 19.241 - type: map_at_3 value: 10.096 - type: map_at_5 value: 11.85 - type: mrr_at_1 value: 48.916 - type: mrr_at_10 value: 57.764 - type: mrr_at_100 value: 58.251 - type: mrr_at_1000 value: 58.282999999999994 - type: mrr_at_3 value: 55.623999999999995 - type: mrr_at_5 value: 57.018 - type: ndcg_at_1 value: 46.594 - type: ndcg_at_10 value: 36.945 - type: ndcg_at_100 value: 34.06 - type: ndcg_at_1000 value: 43.05 - type: ndcg_at_3 value: 41.738 - type: ndcg_at_5 value: 39.330999999999996 - type: precision_at_1 value: 48.916 - type: precision_at_10 value: 27.43 - type: precision_at_100 value: 8.616 - type: precision_at_1000 value: 2.155 - type: precision_at_3 value: 39.112 - type: precision_at_5 value: 33.808 - type: recall_at_1 value: 6.497 - type: recall_at_10 value: 18.163 - type: recall_at_100 value: 34.566 - type: recall_at_1000 value: 67.15 - type: recall_at_3 value: 11.100999999999999 - type: recall_at_5 value: 14.205000000000002 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 31.916 - type: map_at_10 value: 48.123 - type: map_at_100 value: 49.103 - type: map_at_1000 value: 49.131 - type: map_at_3 value: 43.711 - type: map_at_5 value: 46.323 - type: mrr_at_1 value: 36.181999999999995 - type: mrr_at_10 value: 50.617999999999995 - type: mrr_at_100 value: 51.329 - type: mrr_at_1000 value: 51.348000000000006 - type: mrr_at_3 value: 47.010999999999996 - type: mrr_at_5 value: 49.175000000000004 - type: ndcg_at_1 value: 36.181999999999995 - type: ndcg_at_10 value: 56.077999999999996 - type: ndcg_at_100 value: 60.037 - type: ndcg_at_1000 value: 60.63499999999999 - type: ndcg_at_3 value: 47.859 - type: ndcg_at_5 value: 52.178999999999995 - type: precision_at_1 value: 36.181999999999995 - type: precision_at_10 value: 9.284 - type: precision_at_100 value: 1.149 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 22.006999999999998 - type: precision_at_5 value: 15.695 - type: recall_at_1 value: 31.916 - type: recall_at_10 value: 77.771 - type: recall_at_100 value: 94.602 - type: recall_at_1000 value: 98.967 - type: recall_at_3 value: 56.528 - type: recall_at_5 value: 66.527 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.486 - type: map_at_10 value: 85.978 - type: map_at_100 value: 86.587 - type: map_at_1000 value: 86.598 - type: map_at_3 value: 83.04899999999999 - type: map_at_5 value: 84.857 - type: mrr_at_1 value: 82.32000000000001 - type: mrr_at_10 value: 88.64 - type: mrr_at_100 value: 88.702 - type: mrr_at_1000 value: 88.702 - type: mrr_at_3 value: 87.735 - type: mrr_at_5 value: 88.36 - type: ndcg_at_1 value: 82.34 - type: ndcg_at_10 value: 89.67 - type: ndcg_at_100 value: 90.642 - type: ndcg_at_1000 value: 90.688 - type: ndcg_at_3 value: 86.932 - type: ndcg_at_5 value: 88.408 - type: precision_at_1 value: 82.34 - type: precision_at_10 value: 13.675999999999998 - type: precision_at_100 value: 1.544 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.24 - type: precision_at_5 value: 25.068 - type: recall_at_1 value: 71.486 - type: recall_at_10 value: 96.844 - type: recall_at_100 value: 99.843 - type: recall_at_1000 value: 99.996 - type: recall_at_3 value: 88.92099999999999 - type: recall_at_5 value: 93.215 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 59.75758437908334 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 68.03497914092789 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.808 - type: map_at_10 value: 16.059 - type: map_at_100 value: 19.048000000000002 - type: map_at_1000 value: 19.43 - type: map_at_3 value: 10.953 - type: map_at_5 value: 13.363 - type: mrr_at_1 value: 28.7 - type: mrr_at_10 value: 42.436 - type: mrr_at_100 value: 43.599 - type: mrr_at_1000 value: 43.62 - type: mrr_at_3 value: 38.45 - type: mrr_at_5 value: 40.89 - type: ndcg_at_1 value: 28.7 - type: ndcg_at_10 value: 26.346000000000004 - type: ndcg_at_100 value: 36.758 - type: ndcg_at_1000 value: 42.113 - type: ndcg_at_3 value: 24.254 - type: ndcg_at_5 value: 21.506 - type: precision_at_1 value: 28.7 - type: precision_at_10 value: 13.969999999999999 - type: precision_at_100 value: 2.881 - type: precision_at_1000 value: 0.414 - type: precision_at_3 value: 22.933 - type: precision_at_5 value: 19.220000000000002 - type: recall_at_1 value: 5.808 - type: recall_at_10 value: 28.310000000000002 - type: recall_at_100 value: 58.475 - type: recall_at_1000 value: 84.072 - type: recall_at_3 value: 13.957 - type: recall_at_5 value: 19.515 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.39274129958557 - type: cos_sim_spearman value: 79.78021235170053 - type: euclidean_pearson value: 79.35335401300166 - type: euclidean_spearman value: 79.7271870968275 - type: manhattan_pearson value: 79.35256263340601 - type: manhattan_spearman value: 79.76036386976321 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.99130429246708 - type: cos_sim_spearman value: 73.88322811171203 - type: euclidean_pearson value: 80.7569419170376 - type: euclidean_spearman value: 73.82542155409597 - type: manhattan_pearson value: 80.79468183847625 - type: manhattan_spearman value: 73.87027144047784 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.88548789489907 - type: cos_sim_spearman value: 85.07535893847255 - type: euclidean_pearson value: 84.6637222061494 - type: euclidean_spearman value: 85.14200626702456 - type: manhattan_pearson value: 84.75327892344734 - type: manhattan_spearman value: 85.24406181838596 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 82.88140039325008 - type: cos_sim_spearman value: 79.61211268112362 - type: euclidean_pearson value: 81.29639728816458 - type: euclidean_spearman value: 79.51284578041442 - type: manhattan_pearson value: 81.3381797137111 - type: manhattan_spearman value: 79.55683684039808 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 85.16716737270485 - type: cos_sim_spearman value: 86.14823841857738 - type: euclidean_pearson value: 85.36325733440725 - type: euclidean_spearman value: 86.04919691402029 - type: manhattan_pearson value: 85.3147511385052 - type: manhattan_spearman value: 86.00676205857764 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 80.34266645861588 - type: cos_sim_spearman value: 81.59914035005882 - type: euclidean_pearson value: 81.15053076245988 - type: euclidean_spearman value: 81.52776915798489 - type: manhattan_pearson value: 81.1819647418673 - type: manhattan_spearman value: 81.57479527353556 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.38263326821439 - type: cos_sim_spearman value: 89.10946308202642 - type: euclidean_pearson value: 88.87831312540068 - type: euclidean_spearman value: 89.03615865973664 - type: manhattan_pearson value: 88.79835539970384 - type: manhattan_spearman value: 88.9766156339753 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 70.1574915581685 - type: cos_sim_spearman value: 70.59144980004054 - type: euclidean_pearson value: 71.43246306918755 - type: euclidean_spearman value: 70.5544189562984 - type: manhattan_pearson value: 71.4071414609503 - type: manhattan_spearman value: 70.31799126163712 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 83.36215796635351 - type: cos_sim_spearman value: 83.07276756467208 - type: euclidean_pearson value: 83.06690453635584 - type: euclidean_spearman value: 82.9635366303289 - type: manhattan_pearson value: 83.04994049700815 - type: manhattan_spearman value: 82.98120125356036 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 86.92530011616722 - type: mrr value: 96.21826793395421 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 65.75 - type: map_at_10 value: 77.701 - type: map_at_100 value: 78.005 - type: map_at_1000 value: 78.006 - type: map_at_3 value: 75.48 - type: map_at_5 value: 76.927 - type: mrr_at_1 value: 68.333 - type: mrr_at_10 value: 78.511 - type: mrr_at_100 value: 78.704 - type: mrr_at_1000 value: 78.704 - type: mrr_at_3 value: 77 - type: mrr_at_5 value: 78.083 - type: ndcg_at_1 value: 68.333 - type: ndcg_at_10 value: 82.42699999999999 - type: ndcg_at_100 value: 83.486 - type: ndcg_at_1000 value: 83.511 - type: ndcg_at_3 value: 78.96300000000001 - type: ndcg_at_5 value: 81.028 - type: precision_at_1 value: 68.333 - type: precision_at_10 value: 10.667 - type: precision_at_100 value: 1.127 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 31.333 - type: precision_at_5 value: 20.133000000000003 - type: recall_at_1 value: 65.75 - type: recall_at_10 value: 95.578 - type: recall_at_100 value: 99.833 - type: recall_at_1000 value: 100 - type: recall_at_3 value: 86.506 - type: recall_at_5 value: 91.75 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.75247524752476 - type: cos_sim_ap value: 94.16065078045173 - type: cos_sim_f1 value: 87.22986247544205 - type: cos_sim_precision value: 85.71428571428571 - type: cos_sim_recall value: 88.8 - type: dot_accuracy value: 99.74554455445545 - type: dot_ap value: 93.90633887037264 - type: dot_f1 value: 86.9873417721519 - type: dot_precision value: 88.1025641025641 - type: dot_recall value: 85.9 - type: euclidean_accuracy value: 99.75247524752476 - type: euclidean_ap value: 94.17466319018055 - type: euclidean_f1 value: 87.3405299313052 - type: euclidean_precision value: 85.74181117533719 - type: euclidean_recall value: 89 - type: manhattan_accuracy value: 99.75445544554455 - type: manhattan_ap value: 94.27688371923577 - type: manhattan_f1 value: 87.74002954209749 - type: manhattan_precision value: 86.42095053346266 - type: manhattan_recall value: 89.1 - type: max_accuracy value: 99.75445544554455 - type: max_ap value: 94.27688371923577 - type: max_f1 value: 87.74002954209749 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 71.26500637517056 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 39.17507906280528 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.4848744828509 - type: mrr value: 53.33678168236992 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.599864323827887 - type: cos_sim_spearman value: 30.91116204665598 - type: dot_pearson value: 30.82637894269936 - type: dot_spearman value: 30.957573868416066 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.23600000000000002 - type: map_at_10 value: 1.892 - type: map_at_100 value: 11.586 - type: map_at_1000 value: 27.761999999999997 - type: map_at_3 value: 0.653 - type: map_at_5 value: 1.028 - type: mrr_at_1 value: 88 - type: mrr_at_10 value: 94 - type: mrr_at_100 value: 94 - type: mrr_at_1000 value: 94 - type: mrr_at_3 value: 94 - type: mrr_at_5 value: 94 - type: ndcg_at_1 value: 82 - type: ndcg_at_10 value: 77.48899999999999 - type: ndcg_at_100 value: 60.141 - type: ndcg_at_1000 value: 54.228 - type: ndcg_at_3 value: 82.358 - type: ndcg_at_5 value: 80.449 - type: precision_at_1 value: 88 - type: precision_at_10 value: 82.19999999999999 - type: precision_at_100 value: 61.760000000000005 - type: precision_at_1000 value: 23.684 - type: precision_at_3 value: 88 - type: precision_at_5 value: 85.6 - type: recall_at_1 value: 0.23600000000000002 - type: recall_at_10 value: 2.117 - type: recall_at_100 value: 14.985000000000001 - type: recall_at_1000 value: 51.107 - type: recall_at_3 value: 0.688 - type: recall_at_5 value: 1.1039999999999999 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.3040000000000003 - type: map_at_10 value: 9.025 - type: map_at_100 value: 15.312999999999999 - type: map_at_1000 value: 16.954 - type: map_at_3 value: 4.981 - type: map_at_5 value: 6.32 - type: mrr_at_1 value: 24.490000000000002 - type: mrr_at_10 value: 39.835 - type: mrr_at_100 value: 40.8 - type: mrr_at_1000 value: 40.8 - type: mrr_at_3 value: 35.034 - type: mrr_at_5 value: 37.687 - type: ndcg_at_1 value: 22.448999999999998 - type: ndcg_at_10 value: 22.545 - type: ndcg_at_100 value: 35.931999999999995 - type: ndcg_at_1000 value: 47.665 - type: ndcg_at_3 value: 23.311 - type: ndcg_at_5 value: 22.421 - type: precision_at_1 value: 24.490000000000002 - type: precision_at_10 value: 20.408 - type: precision_at_100 value: 7.815999999999999 - type: precision_at_1000 value: 1.553 - type: precision_at_3 value: 25.169999999999998 - type: precision_at_5 value: 23.265 - type: recall_at_1 value: 2.3040000000000003 - type: recall_at_10 value: 15.693999999999999 - type: recall_at_100 value: 48.917 - type: recall_at_1000 value: 84.964 - type: recall_at_3 value: 6.026 - type: recall_at_5 value: 9.066 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 82.6074 - type: ap value: 23.187467098602013 - type: f1 value: 65.36829506379657 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 63.16355404640635 - type: f1 value: 63.534725639863346 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 50.91004094411276 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.55301901412649 - type: cos_sim_ap value: 75.25312618556728 - type: cos_sim_f1 value: 68.76561719140429 - type: cos_sim_precision value: 65.3061224489796 - type: cos_sim_recall value: 72.61213720316623 - type: dot_accuracy value: 86.29671574178936 - type: dot_ap value: 75.11910195501207 - type: dot_f1 value: 68.44048376830045 - type: dot_precision value: 66.12546125461255 - type: dot_recall value: 70.92348284960423 - type: euclidean_accuracy value: 86.5828217202122 - type: euclidean_ap value: 75.22986344900924 - type: euclidean_f1 value: 68.81267797449549 - type: euclidean_precision value: 64.8238861674831 - type: euclidean_recall value: 73.3245382585752 - type: manhattan_accuracy value: 86.61262442629791 - type: manhattan_ap value: 75.24401608557328 - type: manhattan_f1 value: 68.80473982483257 - type: manhattan_precision value: 67.21187720181177 - type: manhattan_recall value: 70.47493403693932 - type: max_accuracy value: 86.61262442629791 - type: max_ap value: 75.25312618556728 - type: max_f1 value: 68.81267797449549 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.10688089416696 - type: cos_sim_ap value: 84.17862178779863 - type: cos_sim_f1 value: 76.17305208781748 - type: cos_sim_precision value: 71.31246641590543 - type: cos_sim_recall value: 81.74468740375731 - type: dot_accuracy value: 88.1844995536927 - type: dot_ap value: 84.33816725235876 - type: dot_f1 value: 76.43554032918746 - type: dot_precision value: 74.01557767200346 - type: dot_recall value: 79.0190945488143 - type: euclidean_accuracy value: 88.07001203089223 - type: euclidean_ap value: 84.12267000814985 - type: euclidean_f1 value: 76.12232600180778 - type: euclidean_precision value: 74.50604541433205 - type: euclidean_recall value: 77.81028641823221 - type: manhattan_accuracy value: 88.06419063142779 - type: manhattan_ap value: 84.11648917164187 - type: manhattan_f1 value: 76.20579953925474 - type: manhattan_precision value: 72.56772755762935 - type: manhattan_recall value: 80.22790267939637 - type: max_accuracy value: 88.1844995536927 - type: max_ap value: 84.33816725235876 - type: max_f1 value: 76.43554032918746 --- <!-- **English** | [中文](./README_zh.md) --> # gte-large-en-v1.5 We introduce `gte-v1.5` series, upgraded `gte` embeddings that support the context length of up to **8192**, while further enhancing model performance. The models are built upon the `transformer++` encoder [backbone](https://huggingface.co/Alibaba-NLP/new-impl) (BERT + RoPE + GLU). The `gte-v1.5` series achieve state-of-the-art scores on the MTEB benchmark within the same model size category and prodvide competitive on the LoCo long-context retrieval tests (refer to [Evaluation](#evaluation)). We also present the [`gte-Qwen1.5-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct), a SOTA instruction-tuned multi-lingual embedding model that ranked 2nd in MTEB and 1st in C-MTEB. <!-- Provide a longer summary of what this model is. --> - **Developed by:** Institute for Intelligent Computing, Alibaba Group - **Model type:** Text Embeddings - **Paper:** [mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval](https://arxiv.org/pdf/2407.19669) <!-- - **Demo [optional]:** [More Information Needed] --> ### Model list | Models | Language | Model Size | Max Seq. Length | Dimension | MTEB-en | LoCo | |:-----: | :-----: |:-----: |:-----: |:-----: | :-----: | :-----: | |[`gte-Qwen1.5-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct)| Multiple | 7720 | 32768 | 4096 | 67.34 | 87.57 | |[`gte-large-en-v1.5`](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | English | 434 | 8192 | 1024 | 65.39 | 86.71 | |[`gte-base-en-v1.5`](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | English | 137 | 8192 | 768 | 64.11 | 87.44 | ## How to Get Started with the Model Use the code below to get started with the model. ```python # Requires transformers>=4.36.0 import torch.nn.functional as F from transformers import AutoModel, AutoTokenizer input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] model_path = 'Alibaba-NLP/gte-large-en-v1.5' tokenizer = AutoTokenizer.from_pretrained(model_path) model = AutoModel.from_pretrained(model_path, trust_remote_code=True) # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=8192, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = outputs.last_hidden_state[:, 0] # (Optionally) normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) ``` **It is recommended to install xformers and enable unpadding for acceleration, refer to [enable-unpadding-and-xformers](https://huggingface.co/Alibaba-NLP/new-impl#recommendation-enable-unpadding-and-acceleration-with-xformers).** Use with sentence-transformers: ```python # Requires sentence_transformers>=2.7.0 from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim sentences = ['That is a happy person', 'That is a very happy person'] model = SentenceTransformer('Alibaba-NLP/gte-large-en-v1.5', trust_remote_code=True) embeddings = model.encode(sentences) print(cos_sim(embeddings[0], embeddings[1])) ``` Use with `transformers.js`: ```js // npm i @xenova/transformers import { pipeline, dot } from '@xenova/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Alibaba-NLP/gte-large-en-v1.5', { quantized: false, // Comment out this line to use the quantized version }); // Generate sentence embeddings const sentences = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => 100 * dot(source_embeddings, x)); console.log(similarities); // [41.86354093370361, 77.07076371259589, 37.02981979677899] ``` ## Training Details ### Training Data - Masked language modeling (MLM): `c4-en` - Weak-supervised contrastive pre-training (CPT): [GTE](https://arxiv.org/pdf/2308.03281.pdf) pre-training data - Supervised contrastive fine-tuning: [GTE](https://arxiv.org/pdf/2308.03281.pdf) fine-tuning data ### Training Procedure To enable the backbone model to support a context length of 8192, we adopted a multi-stage training strategy. The model first undergoes preliminary MLM pre-training on shorter lengths. And then, we resample the data, reducing the proportion of short texts, and continue the MLM pre-training. The entire training process is as follows: - MLM-512: lr 2e-4, mlm_probability 0.3, batch_size 4096, num_steps 300000, rope_base 10000 - MLM-2048: lr 5e-5, mlm_probability 0.3, batch_size 4096, num_steps 30000, rope_base 10000 - [MLM-8192](https://huggingface.co/Alibaba-NLP/gte-en-mlm-large): lr 5e-5, mlm_probability 0.3, batch_size 1024, num_steps 30000, rope_base 160000 - CPT: max_len 512, lr 5e-5, batch_size 28672, num_steps 100000 - Fine-tuning: TODO ## Evaluation ### MTEB The results of other models are retrieved from [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). The gte evaluation setting: `mteb==1.2.0, fp16 auto mix precision, max_length=8192`, and set ntk scaling factor to 2 (equivalent to rope_base * 2). | Model Name | Param Size (M) | Dimension | Sequence Length | Average (56) | Class. (12) | Clust. (11) | Pair Class. (3) | Reran. (4) | Retr. (15) | STS (10) | Summ. (1) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [**gte-large-en-v1.5**](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 409 | 1024 | 8192 | **65.39** | 77.75 | 47.95 | 84.63 | 58.50 | 57.91 | 81.43 | 30.91 | | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 335 | 1024 | 512 | 64.68 | 75.64 | 46.71 | 87.2 | 60.11 | 54.39 | 85 | 32.71 | | [multilingual-e5-large-instruct](https://huggingface.co/intfloat/multilingual-e5-large-instruct) | 560 | 1024 | 514 | 64.41 | 77.56 | 47.1 | 86.19 | 58.58 | 52.47 | 84.78 | 30.39 | | [bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5)| 335 | 1024 | 512 | 64.23 | 75.97 | 46.08 | 87.12 | 60.03 | 54.29 | 83.11 | 31.61 | | [**gte-base-en-v1.5**](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | 137 | 768 | 8192 | **64.11** | 77.17 | 46.82 | 85.33 | 57.66 | 54.09 | 81.97 | 31.17 | | [bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5)| 109 | 768 | 512 | 63.55 | 75.53 | 45.77 | 86.55 | 58.86 | 53.25 | 82.4 | 31.07 | ### LoCo | Model Name | Dimension | Sequence Length | Average (5) | QsmsumRetrieval | SummScreenRetrieval | QasperAbastractRetrieval | QasperTitleRetrieval | GovReportRetrieval | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [gte-qwen1.5-7b](https://huggingface.co/Alibaba-NLP/gte-qwen1.5-7b) | 4096 | 32768 | 87.57 | 49.37 | 93.10 | 99.67 | 97.54 | 98.21 | | [gte-large-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-v1.5) |1024 | 8192 | 86.71 | 44.55 | 92.61 | 99.82 | 97.81 | 98.74 | | [gte-base-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-v1.5) | 768 | 8192 | 87.44 | 49.91 | 91.78 | 99.82 | 97.13 | 98.58 | ## Citation If you find our paper or models helpful, please consider citing them as follows: ``` @article{zhang2024mgte, title={mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval}, author={Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Wen and Dai, Ziqi and Tang, Jialong and Lin, Huan and Yang, Baosong and Xie, Pengjun and Huang, Fei and others}, journal={arXiv preprint arXiv:2407.19669}, year={2024} } @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
BAAI/bge-small-en-v1.5
BAAI
feature-extraction
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "transformers", "mteb", "en", "arxiv:2401.03462", "arxiv:2312.15503", "arxiv:2311.13534", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-09-12T05:20:55Z"
2024-02-22T03:36:23+00:00
3,522,362
297
--- language: - en license: mit tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers - mteb model-index: - name: bge-small-en-v1.5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.79104477611939 - type: ap value: 37.21923821573361 - type: f1 value: 68.0914945617093 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 92.75377499999999 - type: ap value: 89.46766124546022 - type: f1 value: 92.73884001331487 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 46.986 - type: f1 value: 46.55936786727896 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 35.846000000000004 - type: map_at_10 value: 51.388 - type: map_at_100 value: 52.132999999999996 - type: map_at_1000 value: 52.141000000000005 - type: map_at_3 value: 47.037 - type: map_at_5 value: 49.579 - type: mrr_at_1 value: 36.558 - type: mrr_at_10 value: 51.658 - type: mrr_at_100 value: 52.402 - type: mrr_at_1000 value: 52.410000000000004 - type: mrr_at_3 value: 47.345 - type: mrr_at_5 value: 49.797999999999995 - type: ndcg_at_1 value: 35.846000000000004 - type: ndcg_at_10 value: 59.550000000000004 - type: ndcg_at_100 value: 62.596 - type: ndcg_at_1000 value: 62.759 - type: ndcg_at_3 value: 50.666999999999994 - type: ndcg_at_5 value: 55.228 - type: precision_at_1 value: 35.846000000000004 - type: precision_at_10 value: 8.542 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.389 - type: precision_at_5 value: 14.438 - type: recall_at_1 value: 35.846000000000004 - type: recall_at_10 value: 85.42 - type: recall_at_100 value: 98.43499999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 61.166 - type: recall_at_5 value: 72.191 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.402770198163594 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 40.01545436974177 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.586465273207196 - type: mrr value: 74.42169019038825 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.1891186537969 - type: cos_sim_spearman value: 83.75492046087288 - type: euclidean_pearson value: 84.11766204805357 - type: euclidean_spearman value: 84.01456493126516 - type: manhattan_pearson value: 84.2132950502772 - type: manhattan_spearman value: 83.89227298813377 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.74025974025975 - type: f1 value: 85.71493566466381 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 38.467181385006434 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 34.719496037339056 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 29.587000000000003 - type: map_at_10 value: 41.114 - type: map_at_100 value: 42.532 - type: map_at_1000 value: 42.661 - type: map_at_3 value: 37.483 - type: map_at_5 value: 39.652 - type: mrr_at_1 value: 36.338 - type: mrr_at_10 value: 46.763 - type: mrr_at_100 value: 47.393 - type: mrr_at_1000 value: 47.445 - type: mrr_at_3 value: 43.538 - type: mrr_at_5 value: 45.556000000000004 - type: ndcg_at_1 value: 36.338 - type: ndcg_at_10 value: 47.658 - type: ndcg_at_100 value: 52.824000000000005 - type: ndcg_at_1000 value: 54.913999999999994 - type: ndcg_at_3 value: 41.989 - type: ndcg_at_5 value: 44.944 - type: precision_at_1 value: 36.338 - type: precision_at_10 value: 9.156 - type: precision_at_100 value: 1.4789999999999999 - type: precision_at_1000 value: 0.196 - type: precision_at_3 value: 20.076 - type: precision_at_5 value: 14.85 - type: recall_at_1 value: 29.587000000000003 - type: recall_at_10 value: 60.746 - type: recall_at_100 value: 82.157 - type: recall_at_1000 value: 95.645 - type: recall_at_3 value: 44.821 - type: recall_at_5 value: 52.819 - type: map_at_1 value: 30.239 - type: map_at_10 value: 39.989000000000004 - type: map_at_100 value: 41.196 - type: map_at_1000 value: 41.325 - type: map_at_3 value: 37.261 - type: map_at_5 value: 38.833 - type: mrr_at_1 value: 37.516 - type: mrr_at_10 value: 46.177 - type: mrr_at_100 value: 46.806 - type: mrr_at_1000 value: 46.849000000000004 - type: mrr_at_3 value: 44.002 - type: mrr_at_5 value: 45.34 - type: ndcg_at_1 value: 37.516 - type: ndcg_at_10 value: 45.586 - type: ndcg_at_100 value: 49.897000000000006 - type: ndcg_at_1000 value: 51.955 - type: ndcg_at_3 value: 41.684 - type: ndcg_at_5 value: 43.617 - type: precision_at_1 value: 37.516 - type: precision_at_10 value: 8.522 - type: precision_at_100 value: 1.374 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 20.105999999999998 - type: precision_at_5 value: 14.152999999999999 - type: recall_at_1 value: 30.239 - type: recall_at_10 value: 55.03 - type: recall_at_100 value: 73.375 - type: recall_at_1000 value: 86.29599999999999 - type: recall_at_3 value: 43.269000000000005 - type: recall_at_5 value: 48.878 - type: map_at_1 value: 38.338 - type: map_at_10 value: 50.468999999999994 - type: map_at_100 value: 51.553000000000004 - type: map_at_1000 value: 51.608 - type: map_at_3 value: 47.107 - type: map_at_5 value: 49.101 - type: mrr_at_1 value: 44.201 - type: mrr_at_10 value: 54.057 - type: mrr_at_100 value: 54.764 - type: mrr_at_1000 value: 54.791000000000004 - type: mrr_at_3 value: 51.56699999999999 - type: mrr_at_5 value: 53.05 - type: ndcg_at_1 value: 44.201 - type: ndcg_at_10 value: 56.379000000000005 - type: ndcg_at_100 value: 60.645 - type: ndcg_at_1000 value: 61.73499999999999 - type: ndcg_at_3 value: 50.726000000000006 - type: ndcg_at_5 value: 53.58500000000001 - type: precision_at_1 value: 44.201 - type: precision_at_10 value: 9.141 - type: precision_at_100 value: 1.216 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 22.654 - type: precision_at_5 value: 15.723999999999998 - type: recall_at_1 value: 38.338 - type: recall_at_10 value: 70.30499999999999 - type: recall_at_100 value: 88.77199999999999 - type: recall_at_1000 value: 96.49799999999999 - type: recall_at_3 value: 55.218 - type: recall_at_5 value: 62.104000000000006 - type: map_at_1 value: 25.682 - type: map_at_10 value: 33.498 - type: map_at_100 value: 34.461000000000006 - type: map_at_1000 value: 34.544000000000004 - type: map_at_3 value: 30.503999999999998 - type: map_at_5 value: 32.216 - type: mrr_at_1 value: 27.683999999999997 - type: mrr_at_10 value: 35.467999999999996 - type: mrr_at_100 value: 36.32 - type: mrr_at_1000 value: 36.386 - type: mrr_at_3 value: 32.618 - type: mrr_at_5 value: 34.262 - type: ndcg_at_1 value: 27.683999999999997 - type: ndcg_at_10 value: 38.378 - type: ndcg_at_100 value: 43.288 - type: ndcg_at_1000 value: 45.413 - type: ndcg_at_3 value: 32.586 - type: ndcg_at_5 value: 35.499 - type: precision_at_1 value: 27.683999999999997 - type: precision_at_10 value: 5.864 - type: precision_at_100 value: 0.882 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 13.446 - type: precision_at_5 value: 9.718 - type: recall_at_1 value: 25.682 - type: recall_at_10 value: 51.712 - type: recall_at_100 value: 74.446 - type: recall_at_1000 value: 90.472 - type: recall_at_3 value: 36.236000000000004 - type: recall_at_5 value: 43.234 - type: map_at_1 value: 16.073999999999998 - type: map_at_10 value: 24.352999999999998 - type: map_at_100 value: 25.438 - type: map_at_1000 value: 25.545 - type: map_at_3 value: 21.614 - type: map_at_5 value: 23.104 - type: mrr_at_1 value: 19.776 - type: mrr_at_10 value: 28.837000000000003 - type: mrr_at_100 value: 29.755 - type: mrr_at_1000 value: 29.817 - type: mrr_at_3 value: 26.201999999999998 - type: mrr_at_5 value: 27.714 - type: ndcg_at_1 value: 19.776 - type: ndcg_at_10 value: 29.701 - type: ndcg_at_100 value: 35.307 - type: ndcg_at_1000 value: 37.942 - type: ndcg_at_3 value: 24.764 - type: ndcg_at_5 value: 27.025 - type: precision_at_1 value: 19.776 - type: precision_at_10 value: 5.659 - type: precision_at_100 value: 0.971 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 12.065 - type: precision_at_5 value: 8.905000000000001 - type: recall_at_1 value: 16.073999999999998 - type: recall_at_10 value: 41.647 - type: recall_at_100 value: 66.884 - type: recall_at_1000 value: 85.91499999999999 - type: recall_at_3 value: 27.916 - type: recall_at_5 value: 33.729 - type: map_at_1 value: 28.444999999999997 - type: map_at_10 value: 38.218999999999994 - type: map_at_100 value: 39.595 - type: map_at_1000 value: 39.709 - type: map_at_3 value: 35.586 - type: map_at_5 value: 36.895 - type: mrr_at_1 value: 34.841 - type: mrr_at_10 value: 44.106 - type: mrr_at_100 value: 44.98 - type: mrr_at_1000 value: 45.03 - type: mrr_at_3 value: 41.979 - type: mrr_at_5 value: 43.047999999999995 - type: ndcg_at_1 value: 34.841 - type: ndcg_at_10 value: 43.922 - type: ndcg_at_100 value: 49.504999999999995 - type: ndcg_at_1000 value: 51.675000000000004 - type: ndcg_at_3 value: 39.858 - type: ndcg_at_5 value: 41.408 - type: precision_at_1 value: 34.841 - type: precision_at_10 value: 7.872999999999999 - type: precision_at_100 value: 1.2449999999999999 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 18.993 - type: precision_at_5 value: 13.032 - type: recall_at_1 value: 28.444999999999997 - type: recall_at_10 value: 54.984 - type: recall_at_100 value: 78.342 - type: recall_at_1000 value: 92.77 - type: recall_at_3 value: 42.842999999999996 - type: recall_at_5 value: 47.247 - type: map_at_1 value: 23.072 - type: map_at_10 value: 32.354 - type: map_at_100 value: 33.800000000000004 - type: map_at_1000 value: 33.908 - type: map_at_3 value: 29.232000000000003 - type: map_at_5 value: 31.049 - type: mrr_at_1 value: 29.110000000000003 - type: mrr_at_10 value: 38.03 - type: mrr_at_100 value: 39.032 - type: mrr_at_1000 value: 39.086999999999996 - type: mrr_at_3 value: 35.407 - type: mrr_at_5 value: 36.76 - type: ndcg_at_1 value: 29.110000000000003 - type: ndcg_at_10 value: 38.231 - type: ndcg_at_100 value: 44.425 - type: ndcg_at_1000 value: 46.771 - type: ndcg_at_3 value: 33.095 - type: ndcg_at_5 value: 35.459 - type: precision_at_1 value: 29.110000000000003 - type: precision_at_10 value: 7.215000000000001 - type: precision_at_100 value: 1.2109999999999999 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 16.058 - type: precision_at_5 value: 11.644 - type: recall_at_1 value: 23.072 - type: recall_at_10 value: 50.285999999999994 - type: recall_at_100 value: 76.596 - type: recall_at_1000 value: 92.861 - type: recall_at_3 value: 35.702 - type: recall_at_5 value: 42.152 - type: map_at_1 value: 24.937916666666666 - type: map_at_10 value: 33.755250000000004 - type: map_at_100 value: 34.955999999999996 - type: map_at_1000 value: 35.070499999999996 - type: map_at_3 value: 30.98708333333333 - type: map_at_5 value: 32.51491666666666 - type: mrr_at_1 value: 29.48708333333333 - type: mrr_at_10 value: 37.92183333333334 - type: mrr_at_100 value: 38.76583333333333 - type: mrr_at_1000 value: 38.82466666666667 - type: mrr_at_3 value: 35.45125 - type: mrr_at_5 value: 36.827000000000005 - type: ndcg_at_1 value: 29.48708333333333 - type: ndcg_at_10 value: 39.05225 - type: ndcg_at_100 value: 44.25983333333334 - type: ndcg_at_1000 value: 46.568333333333335 - type: ndcg_at_3 value: 34.271583333333325 - type: ndcg_at_5 value: 36.483916666666666 - type: precision_at_1 value: 29.48708333333333 - type: precision_at_10 value: 6.865749999999999 - type: precision_at_100 value: 1.1195833333333332 - type: precision_at_1000 value: 0.15058333333333335 - type: precision_at_3 value: 15.742083333333333 - type: precision_at_5 value: 11.221916666666667 - type: recall_at_1 value: 24.937916666666666 - type: recall_at_10 value: 50.650416666666665 - type: recall_at_100 value: 73.55383333333334 - type: recall_at_1000 value: 89.61691666666667 - type: recall_at_3 value: 37.27808333333334 - type: recall_at_5 value: 42.99475 - type: map_at_1 value: 23.947 - type: map_at_10 value: 30.575000000000003 - type: map_at_100 value: 31.465 - type: map_at_1000 value: 31.558000000000003 - type: map_at_3 value: 28.814 - type: map_at_5 value: 29.738999999999997 - type: mrr_at_1 value: 26.994 - type: mrr_at_10 value: 33.415 - type: mrr_at_100 value: 34.18 - type: mrr_at_1000 value: 34.245 - type: mrr_at_3 value: 31.621 - type: mrr_at_5 value: 32.549 - type: ndcg_at_1 value: 26.994 - type: ndcg_at_10 value: 34.482 - type: ndcg_at_100 value: 38.915 - type: ndcg_at_1000 value: 41.355 - type: ndcg_at_3 value: 31.139 - type: ndcg_at_5 value: 32.589 - type: precision_at_1 value: 26.994 - type: precision_at_10 value: 5.322 - type: precision_at_100 value: 0.8160000000000001 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 13.344000000000001 - type: precision_at_5 value: 8.988 - type: recall_at_1 value: 23.947 - type: recall_at_10 value: 43.647999999999996 - type: recall_at_100 value: 63.851 - type: recall_at_1000 value: 82.0 - type: recall_at_3 value: 34.288000000000004 - type: recall_at_5 value: 38.117000000000004 - type: map_at_1 value: 16.197 - type: map_at_10 value: 22.968 - type: map_at_100 value: 24.095 - type: map_at_1000 value: 24.217 - type: map_at_3 value: 20.771 - type: map_at_5 value: 21.995 - type: mrr_at_1 value: 19.511 - type: mrr_at_10 value: 26.55 - type: mrr_at_100 value: 27.500999999999998 - type: mrr_at_1000 value: 27.578999999999997 - type: mrr_at_3 value: 24.421 - type: mrr_at_5 value: 25.604 - type: ndcg_at_1 value: 19.511 - type: ndcg_at_10 value: 27.386 - type: ndcg_at_100 value: 32.828 - type: ndcg_at_1000 value: 35.739 - type: ndcg_at_3 value: 23.405 - type: ndcg_at_5 value: 25.255 - type: precision_at_1 value: 19.511 - type: precision_at_10 value: 5.017 - type: precision_at_100 value: 0.91 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 11.023 - type: precision_at_5 value: 8.025 - type: recall_at_1 value: 16.197 - type: recall_at_10 value: 37.09 - type: recall_at_100 value: 61.778 - type: recall_at_1000 value: 82.56599999999999 - type: recall_at_3 value: 26.034000000000002 - type: recall_at_5 value: 30.762 - type: map_at_1 value: 25.41 - type: map_at_10 value: 33.655 - type: map_at_100 value: 34.892 - type: map_at_1000 value: 34.995 - type: map_at_3 value: 30.94 - type: map_at_5 value: 32.303 - type: mrr_at_1 value: 29.477999999999998 - type: mrr_at_10 value: 37.443 - type: mrr_at_100 value: 38.383 - type: mrr_at_1000 value: 38.440000000000005 - type: mrr_at_3 value: 34.949999999999996 - type: mrr_at_5 value: 36.228 - type: ndcg_at_1 value: 29.477999999999998 - type: ndcg_at_10 value: 38.769 - type: ndcg_at_100 value: 44.245000000000005 - type: ndcg_at_1000 value: 46.593 - type: ndcg_at_3 value: 33.623 - type: ndcg_at_5 value: 35.766 - type: precision_at_1 value: 29.477999999999998 - type: precision_at_10 value: 6.455 - type: precision_at_100 value: 1.032 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 14.893999999999998 - type: precision_at_5 value: 10.485 - type: recall_at_1 value: 25.41 - type: recall_at_10 value: 50.669 - type: recall_at_100 value: 74.084 - type: recall_at_1000 value: 90.435 - type: recall_at_3 value: 36.679 - type: recall_at_5 value: 41.94 - type: map_at_1 value: 23.339 - type: map_at_10 value: 31.852000000000004 - type: map_at_100 value: 33.411 - type: map_at_1000 value: 33.62 - type: map_at_3 value: 28.929 - type: map_at_5 value: 30.542 - type: mrr_at_1 value: 28.063 - type: mrr_at_10 value: 36.301 - type: mrr_at_100 value: 37.288 - type: mrr_at_1000 value: 37.349 - type: mrr_at_3 value: 33.663 - type: mrr_at_5 value: 35.165 - type: ndcg_at_1 value: 28.063 - type: ndcg_at_10 value: 37.462 - type: ndcg_at_100 value: 43.620999999999995 - type: ndcg_at_1000 value: 46.211 - type: ndcg_at_3 value: 32.68 - type: ndcg_at_5 value: 34.981 - type: precision_at_1 value: 28.063 - type: precision_at_10 value: 7.1739999999999995 - type: precision_at_100 value: 1.486 - type: precision_at_1000 value: 0.23500000000000001 - type: precision_at_3 value: 15.217 - type: precision_at_5 value: 11.265 - type: recall_at_1 value: 23.339 - type: recall_at_10 value: 48.376999999999995 - type: recall_at_100 value: 76.053 - type: recall_at_1000 value: 92.455 - type: recall_at_3 value: 34.735 - type: recall_at_5 value: 40.71 - type: map_at_1 value: 18.925 - type: map_at_10 value: 26.017000000000003 - type: map_at_100 value: 27.034000000000002 - type: map_at_1000 value: 27.156000000000002 - type: map_at_3 value: 23.604 - type: map_at_5 value: 24.75 - type: mrr_at_1 value: 20.333000000000002 - type: mrr_at_10 value: 27.915 - type: mrr_at_100 value: 28.788000000000004 - type: mrr_at_1000 value: 28.877999999999997 - type: mrr_at_3 value: 25.446999999999996 - type: mrr_at_5 value: 26.648 - type: ndcg_at_1 value: 20.333000000000002 - type: ndcg_at_10 value: 30.673000000000002 - type: ndcg_at_100 value: 35.618 - type: ndcg_at_1000 value: 38.517 - type: ndcg_at_3 value: 25.71 - type: ndcg_at_5 value: 27.679 - type: precision_at_1 value: 20.333000000000002 - type: precision_at_10 value: 4.9910000000000005 - type: precision_at_100 value: 0.8130000000000001 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 11.029 - type: precision_at_5 value: 7.8740000000000006 - type: recall_at_1 value: 18.925 - type: recall_at_10 value: 43.311 - type: recall_at_100 value: 66.308 - type: recall_at_1000 value: 87.49 - type: recall_at_3 value: 29.596 - type: recall_at_5 value: 34.245 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 13.714 - type: map_at_10 value: 23.194 - type: map_at_100 value: 24.976000000000003 - type: map_at_1000 value: 25.166 - type: map_at_3 value: 19.709 - type: map_at_5 value: 21.523999999999997 - type: mrr_at_1 value: 30.619000000000003 - type: mrr_at_10 value: 42.563 - type: mrr_at_100 value: 43.386 - type: mrr_at_1000 value: 43.423 - type: mrr_at_3 value: 39.555 - type: mrr_at_5 value: 41.268 - type: ndcg_at_1 value: 30.619000000000003 - type: ndcg_at_10 value: 31.836 - type: ndcg_at_100 value: 38.652 - type: ndcg_at_1000 value: 42.088 - type: ndcg_at_3 value: 26.733 - type: ndcg_at_5 value: 28.435 - type: precision_at_1 value: 30.619000000000003 - type: precision_at_10 value: 9.751999999999999 - type: precision_at_100 value: 1.71 - type: precision_at_1000 value: 0.23500000000000001 - type: precision_at_3 value: 19.935 - type: precision_at_5 value: 14.984 - type: recall_at_1 value: 13.714 - type: recall_at_10 value: 37.26 - type: recall_at_100 value: 60.546 - type: recall_at_1000 value: 79.899 - type: recall_at_3 value: 24.325 - type: recall_at_5 value: 29.725 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.462 - type: map_at_10 value: 18.637 - type: map_at_100 value: 26.131999999999998 - type: map_at_1000 value: 27.607 - type: map_at_3 value: 13.333 - type: map_at_5 value: 15.654000000000002 - type: mrr_at_1 value: 66.25 - type: mrr_at_10 value: 74.32600000000001 - type: mrr_at_100 value: 74.60900000000001 - type: mrr_at_1000 value: 74.62 - type: mrr_at_3 value: 72.667 - type: mrr_at_5 value: 73.817 - type: ndcg_at_1 value: 53.87499999999999 - type: ndcg_at_10 value: 40.028999999999996 - type: ndcg_at_100 value: 44.199 - type: ndcg_at_1000 value: 51.629999999999995 - type: ndcg_at_3 value: 44.113 - type: ndcg_at_5 value: 41.731 - type: precision_at_1 value: 66.25 - type: precision_at_10 value: 31.900000000000002 - type: precision_at_100 value: 10.043000000000001 - type: precision_at_1000 value: 1.926 - type: precision_at_3 value: 47.417 - type: precision_at_5 value: 40.65 - type: recall_at_1 value: 8.462 - type: recall_at_10 value: 24.293 - type: recall_at_100 value: 50.146 - type: recall_at_1000 value: 74.034 - type: recall_at_3 value: 14.967 - type: recall_at_5 value: 18.682000000000002 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 47.84499999999999 - type: f1 value: 42.48106691979349 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 74.034 - type: map_at_10 value: 82.76 - type: map_at_100 value: 82.968 - type: map_at_1000 value: 82.98299999999999 - type: map_at_3 value: 81.768 - type: map_at_5 value: 82.418 - type: mrr_at_1 value: 80.048 - type: mrr_at_10 value: 87.64999999999999 - type: mrr_at_100 value: 87.712 - type: mrr_at_1000 value: 87.713 - type: mrr_at_3 value: 87.01100000000001 - type: mrr_at_5 value: 87.466 - type: ndcg_at_1 value: 80.048 - type: ndcg_at_10 value: 86.643 - type: ndcg_at_100 value: 87.361 - type: ndcg_at_1000 value: 87.606 - type: ndcg_at_3 value: 85.137 - type: ndcg_at_5 value: 86.016 - type: precision_at_1 value: 80.048 - type: precision_at_10 value: 10.372 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 32.638 - type: precision_at_5 value: 20.177 - type: recall_at_1 value: 74.034 - type: recall_at_10 value: 93.769 - type: recall_at_100 value: 96.569 - type: recall_at_1000 value: 98.039 - type: recall_at_3 value: 89.581 - type: recall_at_5 value: 91.906 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 20.5 - type: map_at_10 value: 32.857 - type: map_at_100 value: 34.589 - type: map_at_1000 value: 34.778 - type: map_at_3 value: 29.160999999999998 - type: map_at_5 value: 31.033 - type: mrr_at_1 value: 40.123 - type: mrr_at_10 value: 48.776 - type: mrr_at_100 value: 49.495 - type: mrr_at_1000 value: 49.539 - type: mrr_at_3 value: 46.605000000000004 - type: mrr_at_5 value: 47.654 - type: ndcg_at_1 value: 40.123 - type: ndcg_at_10 value: 40.343 - type: ndcg_at_100 value: 46.56 - type: ndcg_at_1000 value: 49.777 - type: ndcg_at_3 value: 37.322 - type: ndcg_at_5 value: 37.791000000000004 - type: precision_at_1 value: 40.123 - type: precision_at_10 value: 11.08 - type: precision_at_100 value: 1.752 - type: precision_at_1000 value: 0.232 - type: precision_at_3 value: 24.897 - type: precision_at_5 value: 17.809 - type: recall_at_1 value: 20.5 - type: recall_at_10 value: 46.388 - type: recall_at_100 value: 69.552 - type: recall_at_1000 value: 89.011 - type: recall_at_3 value: 33.617999999999995 - type: recall_at_5 value: 38.211 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.135999999999996 - type: map_at_10 value: 61.673 - type: map_at_100 value: 62.562 - type: map_at_1000 value: 62.62 - type: map_at_3 value: 58.467999999999996 - type: map_at_5 value: 60.463 - type: mrr_at_1 value: 78.271 - type: mrr_at_10 value: 84.119 - type: mrr_at_100 value: 84.29299999999999 - type: mrr_at_1000 value: 84.299 - type: mrr_at_3 value: 83.18900000000001 - type: mrr_at_5 value: 83.786 - type: ndcg_at_1 value: 78.271 - type: ndcg_at_10 value: 69.935 - type: ndcg_at_100 value: 73.01299999999999 - type: ndcg_at_1000 value: 74.126 - type: ndcg_at_3 value: 65.388 - type: ndcg_at_5 value: 67.906 - type: precision_at_1 value: 78.271 - type: precision_at_10 value: 14.562 - type: precision_at_100 value: 1.6969999999999998 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 41.841 - type: precision_at_5 value: 27.087 - type: recall_at_1 value: 39.135999999999996 - type: recall_at_10 value: 72.809 - type: recall_at_100 value: 84.86200000000001 - type: recall_at_1000 value: 92.208 - type: recall_at_3 value: 62.76199999999999 - type: recall_at_5 value: 67.718 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 90.60600000000001 - type: ap value: 86.6579587804335 - type: f1 value: 90.5938853929307 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.852 - type: map_at_10 value: 33.982 - type: map_at_100 value: 35.116 - type: map_at_1000 value: 35.167 - type: map_at_3 value: 30.134 - type: map_at_5 value: 32.340999999999994 - type: mrr_at_1 value: 22.479 - type: mrr_at_10 value: 34.594 - type: mrr_at_100 value: 35.672 - type: mrr_at_1000 value: 35.716 - type: mrr_at_3 value: 30.84 - type: mrr_at_5 value: 32.998 - type: ndcg_at_1 value: 22.493 - type: ndcg_at_10 value: 40.833000000000006 - type: ndcg_at_100 value: 46.357 - type: ndcg_at_1000 value: 47.637 - type: ndcg_at_3 value: 32.995999999999995 - type: ndcg_at_5 value: 36.919000000000004 - type: precision_at_1 value: 22.493 - type: precision_at_10 value: 6.465999999999999 - type: precision_at_100 value: 0.9249999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.030999999999999 - type: precision_at_5 value: 10.413 - type: recall_at_1 value: 21.852 - type: recall_at_10 value: 61.934999999999995 - type: recall_at_100 value: 87.611 - type: recall_at_1000 value: 97.441 - type: recall_at_3 value: 40.583999999999996 - type: recall_at_5 value: 49.992999999999995 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.36069311445507 - type: f1 value: 93.16456330371453 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 74.74692202462381 - type: f1 value: 58.17903579421599 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.80833893745796 - type: f1 value: 72.70786592684664 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.69872225958305 - type: f1 value: 78.61626934504731 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.058658628717694 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 30.85561739360599 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.290259910144385 - type: mrr value: 32.44223046102856 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.288 - type: map_at_10 value: 12.267999999999999 - type: map_at_100 value: 15.557000000000002 - type: map_at_1000 value: 16.98 - type: map_at_3 value: 8.866 - type: map_at_5 value: 10.418 - type: mrr_at_1 value: 43.653 - type: mrr_at_10 value: 52.681 - type: mrr_at_100 value: 53.315999999999995 - type: mrr_at_1000 value: 53.357 - type: mrr_at_3 value: 51.393 - type: mrr_at_5 value: 51.903999999999996 - type: ndcg_at_1 value: 42.415000000000006 - type: ndcg_at_10 value: 34.305 - type: ndcg_at_100 value: 30.825999999999997 - type: ndcg_at_1000 value: 39.393 - type: ndcg_at_3 value: 39.931 - type: ndcg_at_5 value: 37.519999999999996 - type: precision_at_1 value: 43.653 - type: precision_at_10 value: 25.728 - type: precision_at_100 value: 7.932 - type: precision_at_1000 value: 2.07 - type: precision_at_3 value: 38.184000000000005 - type: precision_at_5 value: 32.879000000000005 - type: recall_at_1 value: 5.288 - type: recall_at_10 value: 16.195 - type: recall_at_100 value: 31.135 - type: recall_at_1000 value: 61.531000000000006 - type: recall_at_3 value: 10.313 - type: recall_at_5 value: 12.754999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 28.216 - type: map_at_10 value: 42.588 - type: map_at_100 value: 43.702999999999996 - type: map_at_1000 value: 43.739 - type: map_at_3 value: 38.177 - type: map_at_5 value: 40.754000000000005 - type: mrr_at_1 value: 31.866 - type: mrr_at_10 value: 45.189 - type: mrr_at_100 value: 46.056000000000004 - type: mrr_at_1000 value: 46.081 - type: mrr_at_3 value: 41.526999999999994 - type: mrr_at_5 value: 43.704 - type: ndcg_at_1 value: 31.837 - type: ndcg_at_10 value: 50.178 - type: ndcg_at_100 value: 54.98800000000001 - type: ndcg_at_1000 value: 55.812 - type: ndcg_at_3 value: 41.853 - type: ndcg_at_5 value: 46.153 - type: precision_at_1 value: 31.837 - type: precision_at_10 value: 8.43 - type: precision_at_100 value: 1.1119999999999999 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 19.023 - type: precision_at_5 value: 13.911000000000001 - type: recall_at_1 value: 28.216 - type: recall_at_10 value: 70.8 - type: recall_at_100 value: 91.857 - type: recall_at_1000 value: 97.941 - type: recall_at_3 value: 49.196 - type: recall_at_5 value: 59.072 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.22800000000001 - type: map_at_10 value: 85.115 - type: map_at_100 value: 85.72 - type: map_at_1000 value: 85.737 - type: map_at_3 value: 82.149 - type: map_at_5 value: 84.029 - type: mrr_at_1 value: 81.96 - type: mrr_at_10 value: 88.00200000000001 - type: mrr_at_100 value: 88.088 - type: mrr_at_1000 value: 88.089 - type: mrr_at_3 value: 87.055 - type: mrr_at_5 value: 87.715 - type: ndcg_at_1 value: 82.01 - type: ndcg_at_10 value: 88.78 - type: ndcg_at_100 value: 89.91 - type: ndcg_at_1000 value: 90.013 - type: ndcg_at_3 value: 85.957 - type: ndcg_at_5 value: 87.56 - type: precision_at_1 value: 82.01 - type: precision_at_10 value: 13.462 - type: precision_at_100 value: 1.528 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.553 - type: precision_at_5 value: 24.732000000000003 - type: recall_at_1 value: 71.22800000000001 - type: recall_at_10 value: 95.69 - type: recall_at_100 value: 99.531 - type: recall_at_1000 value: 99.98 - type: recall_at_3 value: 87.632 - type: recall_at_5 value: 92.117 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 52.31768034366916 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 60.640266772723606 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.7780000000000005 - type: map_at_10 value: 12.299 - type: map_at_100 value: 14.363000000000001 - type: map_at_1000 value: 14.71 - type: map_at_3 value: 8.738999999999999 - type: map_at_5 value: 10.397 - type: mrr_at_1 value: 23.599999999999998 - type: mrr_at_10 value: 34.845 - type: mrr_at_100 value: 35.916 - type: mrr_at_1000 value: 35.973 - type: mrr_at_3 value: 31.7 - type: mrr_at_5 value: 33.535 - type: ndcg_at_1 value: 23.599999999999998 - type: ndcg_at_10 value: 20.522000000000002 - type: ndcg_at_100 value: 28.737000000000002 - type: ndcg_at_1000 value: 34.596 - type: ndcg_at_3 value: 19.542 - type: ndcg_at_5 value: 16.958000000000002 - type: precision_at_1 value: 23.599999999999998 - type: precision_at_10 value: 10.67 - type: precision_at_100 value: 2.259 - type: precision_at_1000 value: 0.367 - type: precision_at_3 value: 18.333 - type: precision_at_5 value: 14.879999999999999 - type: recall_at_1 value: 4.7780000000000005 - type: recall_at_10 value: 21.617 - type: recall_at_100 value: 45.905 - type: recall_at_1000 value: 74.42 - type: recall_at_3 value: 11.148 - type: recall_at_5 value: 15.082999999999998 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.22372750297885 - type: cos_sim_spearman value: 79.40972617119405 - type: euclidean_pearson value: 80.6101072020434 - type: euclidean_spearman value: 79.53844217225202 - type: manhattan_pearson value: 80.57265975286111 - type: manhattan_spearman value: 79.46335611792958 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.43713315520749 - type: cos_sim_spearman value: 77.44128693329532 - type: euclidean_pearson value: 81.63869928101123 - type: euclidean_spearman value: 77.29512977961515 - type: manhattan_pearson value: 81.63704185566183 - type: manhattan_spearman value: 77.29909412738657 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 81.59451537860527 - type: cos_sim_spearman value: 82.97994638856723 - type: euclidean_pearson value: 82.89478688288412 - type: euclidean_spearman value: 83.58740751053104 - type: manhattan_pearson value: 82.69140840941608 - type: manhattan_spearman value: 83.33665956040555 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 82.00756527711764 - type: cos_sim_spearman value: 81.83560996841379 - type: euclidean_pearson value: 82.07684151976518 - type: euclidean_spearman value: 82.00913052060511 - type: manhattan_pearson value: 82.05690778488794 - type: manhattan_spearman value: 82.02260252019525 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.13710262895447 - type: cos_sim_spearman value: 87.26412811156248 - type: euclidean_pearson value: 86.94151453230228 - type: euclidean_spearman value: 87.5363796699571 - type: manhattan_pearson value: 86.86989424083748 - type: manhattan_spearman value: 87.47315940781353 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.0230597603627 - type: cos_sim_spearman value: 84.93344499318864 - type: euclidean_pearson value: 84.23754743431141 - type: euclidean_spearman value: 85.09707376597099 - type: manhattan_pearson value: 84.04325160987763 - type: manhattan_spearman value: 84.89353071339909 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 86.75620824563921 - type: cos_sim_spearman value: 87.15065513706398 - type: euclidean_pearson value: 88.26281533633521 - type: euclidean_spearman value: 87.51963738643983 - type: manhattan_pearson value: 88.25599267618065 - type: manhattan_spearman value: 87.58048736047483 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 64.74645319195137 - type: cos_sim_spearman value: 65.29996325037214 - type: euclidean_pearson value: 67.04297794086443 - type: euclidean_spearman value: 65.43841726694343 - type: manhattan_pearson value: 67.39459955690904 - type: manhattan_spearman value: 65.92864704413651 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.31291020270801 - type: cos_sim_spearman value: 85.86473738688068 - type: euclidean_pearson value: 85.65537275064152 - type: euclidean_spearman value: 86.13087454209642 - type: manhattan_pearson value: 85.43946955047609 - type: manhattan_spearman value: 85.91568175344916 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.93798118350695 - type: mrr value: 95.93536274908824 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.594 - type: map_at_10 value: 66.81899999999999 - type: map_at_100 value: 67.368 - type: map_at_1000 value: 67.4 - type: map_at_3 value: 64.061 - type: map_at_5 value: 65.47 - type: mrr_at_1 value: 60.667 - type: mrr_at_10 value: 68.219 - type: mrr_at_100 value: 68.655 - type: mrr_at_1000 value: 68.684 - type: mrr_at_3 value: 66.22200000000001 - type: mrr_at_5 value: 67.289 - type: ndcg_at_1 value: 60.667 - type: ndcg_at_10 value: 71.275 - type: ndcg_at_100 value: 73.642 - type: ndcg_at_1000 value: 74.373 - type: ndcg_at_3 value: 66.521 - type: ndcg_at_5 value: 68.581 - type: precision_at_1 value: 60.667 - type: precision_at_10 value: 9.433 - type: precision_at_100 value: 1.0699999999999998 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 25.556 - type: precision_at_5 value: 16.8 - type: recall_at_1 value: 57.594 - type: recall_at_10 value: 83.622 - type: recall_at_100 value: 94.167 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 70.64399999999999 - type: recall_at_5 value: 75.983 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.85841584158416 - type: cos_sim_ap value: 96.66996142314342 - type: cos_sim_f1 value: 92.83208020050125 - type: cos_sim_precision value: 93.06532663316584 - type: cos_sim_recall value: 92.60000000000001 - type: dot_accuracy value: 99.85841584158416 - type: dot_ap value: 96.6775307676576 - type: dot_f1 value: 92.69289729177312 - type: dot_precision value: 94.77533960292581 - type: dot_recall value: 90.7 - type: euclidean_accuracy value: 99.86138613861387 - type: euclidean_ap value: 96.6338454403108 - type: euclidean_f1 value: 92.92214357937311 - type: euclidean_precision value: 93.96728016359918 - type: euclidean_recall value: 91.9 - type: manhattan_accuracy value: 99.86237623762376 - type: manhattan_ap value: 96.60370449645053 - type: manhattan_f1 value: 92.91177970423253 - type: manhattan_precision value: 94.7970863683663 - type: manhattan_recall value: 91.10000000000001 - type: max_accuracy value: 99.86237623762376 - type: max_ap value: 96.6775307676576 - type: max_f1 value: 92.92214357937311 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 60.77977058695198 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.2725272535638 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 53.64052466362125 - type: mrr value: 54.533067014684654 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.677624219206578 - type: cos_sim_spearman value: 30.121368518123447 - type: dot_pearson value: 30.69870088041608 - type: dot_spearman value: 29.61284927093751 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22 - type: map_at_10 value: 1.855 - type: map_at_100 value: 9.885 - type: map_at_1000 value: 23.416999999999998 - type: map_at_3 value: 0.637 - type: map_at_5 value: 1.024 - type: mrr_at_1 value: 88.0 - type: mrr_at_10 value: 93.067 - type: mrr_at_100 value: 93.067 - type: mrr_at_1000 value: 93.067 - type: mrr_at_3 value: 92.667 - type: mrr_at_5 value: 93.067 - type: ndcg_at_1 value: 82.0 - type: ndcg_at_10 value: 75.899 - type: ndcg_at_100 value: 55.115 - type: ndcg_at_1000 value: 48.368 - type: ndcg_at_3 value: 79.704 - type: ndcg_at_5 value: 78.39699999999999 - type: precision_at_1 value: 88.0 - type: precision_at_10 value: 79.60000000000001 - type: precision_at_100 value: 56.06 - type: precision_at_1000 value: 21.206 - type: precision_at_3 value: 84.667 - type: precision_at_5 value: 83.2 - type: recall_at_1 value: 0.22 - type: recall_at_10 value: 2.078 - type: recall_at_100 value: 13.297 - type: recall_at_1000 value: 44.979 - type: recall_at_3 value: 0.6689999999999999 - type: recall_at_5 value: 1.106 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.258 - type: map_at_10 value: 10.439 - type: map_at_100 value: 16.89 - type: map_at_1000 value: 18.407999999999998 - type: map_at_3 value: 5.668 - type: map_at_5 value: 7.718 - type: mrr_at_1 value: 32.653 - type: mrr_at_10 value: 51.159 - type: mrr_at_100 value: 51.714000000000006 - type: mrr_at_1000 value: 51.714000000000006 - type: mrr_at_3 value: 47.959 - type: mrr_at_5 value: 50.407999999999994 - type: ndcg_at_1 value: 29.592000000000002 - type: ndcg_at_10 value: 26.037 - type: ndcg_at_100 value: 37.924 - type: ndcg_at_1000 value: 49.126999999999995 - type: ndcg_at_3 value: 30.631999999999998 - type: ndcg_at_5 value: 28.571 - type: precision_at_1 value: 32.653 - type: precision_at_10 value: 22.857 - type: precision_at_100 value: 7.754999999999999 - type: precision_at_1000 value: 1.529 - type: precision_at_3 value: 34.014 - type: precision_at_5 value: 29.796 - type: recall_at_1 value: 2.258 - type: recall_at_10 value: 16.554 - type: recall_at_100 value: 48.439 - type: recall_at_1000 value: 82.80499999999999 - type: recall_at_3 value: 7.283 - type: recall_at_5 value: 10.732 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 69.8858 - type: ap value: 13.835684144362109 - type: f1 value: 53.803351693244586 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 60.50650820599886 - type: f1 value: 60.84357825979259 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 48.52131044852134 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.59337187816654 - type: cos_sim_ap value: 73.23925826533437 - type: cos_sim_f1 value: 67.34693877551021 - type: cos_sim_precision value: 62.40432237730752 - type: cos_sim_recall value: 73.13984168865434 - type: dot_accuracy value: 85.31322644096085 - type: dot_ap value: 72.30723963807422 - type: dot_f1 value: 66.47051612112296 - type: dot_precision value: 62.0792305930845 - type: dot_recall value: 71.53034300791556 - type: euclidean_accuracy value: 85.61125350181797 - type: euclidean_ap value: 73.32843720487845 - type: euclidean_f1 value: 67.36549633745895 - type: euclidean_precision value: 64.60755813953489 - type: euclidean_recall value: 70.36939313984169 - type: manhattan_accuracy value: 85.63509566668654 - type: manhattan_ap value: 73.16658488311325 - type: manhattan_f1 value: 67.20597386434349 - type: manhattan_precision value: 63.60424028268551 - type: manhattan_recall value: 71.2401055408971 - type: max_accuracy value: 85.63509566668654 - type: max_ap value: 73.32843720487845 - type: max_f1 value: 67.36549633745895 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.33779640625606 - type: cos_sim_ap value: 84.83868375898157 - type: cos_sim_f1 value: 77.16506154017773 - type: cos_sim_precision value: 74.62064005753327 - type: cos_sim_recall value: 79.88912842623961 - type: dot_accuracy value: 88.02732176815307 - type: dot_ap value: 83.95089283763002 - type: dot_f1 value: 76.29635101196631 - type: dot_precision value: 73.31771720613288 - type: dot_recall value: 79.52725592854944 - type: euclidean_accuracy value: 88.44452206310397 - type: euclidean_ap value: 84.98384576824827 - type: euclidean_f1 value: 77.29311047696697 - type: euclidean_precision value: 74.51232583065381 - type: euclidean_recall value: 80.28949799815214 - type: manhattan_accuracy value: 88.47362906042613 - type: manhattan_ap value: 84.91421462218432 - type: manhattan_f1 value: 77.05107637204792 - type: manhattan_precision value: 74.74484256243214 - type: manhattan_recall value: 79.50415768401602 - type: max_accuracy value: 88.47362906042613 - type: max_ap value: 84.98384576824827 - type: max_f1 value: 77.29311047696697 --- <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). If you are looking for a model that supports more languages, longer texts, and other retrieval methods, you can try using [bge-m3](https://huggingface.co/BAAI/bge-m3). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding focuses on retrieval-augmented LLMs, consisting of the following projects currently: - **Long-Context LLM**: [Activation Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon) - **Fine-tuning of LM** : [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail) - **Dense Retrieval**: [BGE-M3](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3), [LLM Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), [BGE Embedding](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/baai_general_embedding) - **Reranker Model**: [BGE Reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) - **Benchmark**: [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) ## News - 1/30/2024: Release **BGE-M3**, a new member to BGE model series! M3 stands for **M**ulti-linguality (100+ languages), **M**ulti-granularities (input length up to 8192), **M**ulti-Functionality (unification of dense, lexical, multi-vec/colbert retrieval). It is the first embedding model which supports all three retrieval methods, achieving new SOTA on multi-lingual (MIRACL) and cross-lingual (MKQA) benchmarks. [Technical Report](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/BGE_M3/BGE_M3.pdf) and [Code](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3). :fire: - 1/9/2024: Release [Activation-Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon), an effective, efficient, compatible, and low-cost (training) method to extend the context length of LLM. [Technical Report](https://arxiv.org/abs/2401.03462) :fire: - 12/24/2023: Release **LLaRA**, a LLaMA-7B based dense retriever, leading to state-of-the-art performances on MS MARCO and BEIR. Model and code will be open-sourced. Please stay tuned. [Technical Report](https://arxiv.org/abs/2312.15503) :fire: - 11/23/2023: Release [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail), a method to maintain general capabilities during fine-tuning by merging multiple language models. [Technical Report](https://arxiv.org/abs/2311.13534) :fire: - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Technical Report](https://arxiv.org/pdf/2310.07554.pdf) - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3#usage) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3) | Multi-Functionality(dense retrieval, sparse retrieval, multi-vector(colbert)), Multi-Linguality, and Multi-Granularity(8192 tokens) | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` #### Usage of the ONNX files ```python from optimum.onnxruntime import ORTModelForFeatureExtraction # type: ignore import torch from transformers import AutoModel, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-small-en-v1.5') model = AutoModel.from_pretrained('BAAI/bge-small-en-v1.5') model_ort = ORTModelForFeatureExtraction.from_pretrained('BAAI/bge-small-en-v1.5', file_name="onnx/model.onnx") # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') model_output_ort = model_ort(**encoded_input) # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # model_output and model_output_ort are identical ``` #### Usage via infinity Its also possible to deploy the onnx files with the [infinity_emb](https://github.com/michaelfeil/infinity) pip package. Recommended is `device="cuda", engine="torch"` with flash attention on gpu, and `device="cpu", engine="optimum"` for onnx inference. ```python import asyncio from infinity_emb import AsyncEmbeddingEngine, EngineArgs sentences = ["Embed this is sentence via Infinity.", "Paris is in France."] engine = AsyncEmbeddingEngine.from_args( EngineArgs(model_name_or_path = "BAAI/bge-small-en-v1.5", device="cpu", engine="optimum" # or engine="torch" )) async def main(): async with engine: embeddings, usage = await engine.embed(sentences=sentences) asyncio.run(main()) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR", "BIOSSES", "SCIFACT" ]
avsolatorio/GIST-small-Embedding-v0
avsolatorio
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "bert", "feature-extraction", "mteb", "sentence-similarity", "en", "arxiv:2402.16829", "arxiv:2212.09741", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-02-03T06:14:01Z"
2024-02-28T00:36:01+00:00
3,475,309
25
--- language: - en library_name: sentence-transformers license: mit pipeline_tag: sentence-similarity tags: - feature-extraction - mteb - sentence-similarity - sentence-transformers model-index: - name: GIST-small-Embedding-v0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.26865671641791 - type: ap value: 38.25623793370476 - type: f1 value: 69.26434651320257 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.232225 - type: ap value: 89.97936072879344 - type: f1 value: 93.22122653806187 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.715999999999994 - type: f1 value: 49.169789920136076 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 34.922 - type: map_at_10 value: 50.524 - type: map_at_100 value: 51.247 - type: map_at_1000 value: 51.249 - type: map_at_3 value: 45.887 - type: map_at_5 value: 48.592999999999996 - type: mrr_at_1 value: 34.922 - type: mrr_at_10 value: 50.382000000000005 - type: mrr_at_100 value: 51.104000000000006 - type: mrr_at_1000 value: 51.105999999999995 - type: mrr_at_3 value: 45.733000000000004 - type: mrr_at_5 value: 48.428 - type: ndcg_at_1 value: 34.922 - type: ndcg_at_10 value: 59.12 - type: ndcg_at_100 value: 62.083999999999996 - type: ndcg_at_1000 value: 62.137 - type: ndcg_at_3 value: 49.616 - type: ndcg_at_5 value: 54.501 - type: precision_at_1 value: 34.922 - type: precision_at_10 value: 8.649 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.152 - type: precision_at_5 value: 14.466999999999999 - type: recall_at_1 value: 34.922 - type: recall_at_10 value: 86.48599999999999 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 60.455000000000005 - type: recall_at_5 value: 72.333 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.623282347623714 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 39.86487843524932 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.3290291318171 - type: mrr value: 75.2379853141626 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.52002953574285 - type: cos_sim_spearman value: 86.98752423842483 - type: euclidean_pearson value: 86.89442688314197 - type: euclidean_spearman value: 86.88631711307471 - type: manhattan_pearson value: 87.03723618507175 - type: manhattan_spearman value: 86.76041062975224 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 86.64935064935065 - type: f1 value: 86.61903824934998 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.21904455377494 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 35.43342755570654 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 31.843 - type: map_at_10 value: 43.379 - type: map_at_100 value: 44.946999999999996 - type: map_at_1000 value: 45.078 - type: map_at_3 value: 39.598 - type: map_at_5 value: 41.746 - type: mrr_at_1 value: 39.199 - type: mrr_at_10 value: 49.672 - type: mrr_at_100 value: 50.321000000000005 - type: mrr_at_1000 value: 50.365 - type: mrr_at_3 value: 46.805 - type: mrr_at_5 value: 48.579 - type: ndcg_at_1 value: 39.199 - type: ndcg_at_10 value: 50.163999999999994 - type: ndcg_at_100 value: 55.418 - type: ndcg_at_1000 value: 57.353 - type: ndcg_at_3 value: 44.716 - type: ndcg_at_5 value: 47.268 - type: precision_at_1 value: 39.199 - type: precision_at_10 value: 9.757 - type: precision_at_100 value: 1.552 - type: precision_at_1000 value: 0.20500000000000002 - type: precision_at_3 value: 21.602 - type: precision_at_5 value: 15.479000000000001 - type: recall_at_1 value: 31.843 - type: recall_at_10 value: 62.743 - type: recall_at_100 value: 84.78099999999999 - type: recall_at_1000 value: 96.86099999999999 - type: recall_at_3 value: 46.927 - type: recall_at_5 value: 54.355 - type: map_at_1 value: 29.321 - type: map_at_10 value: 39.062999999999995 - type: map_at_100 value: 40.403 - type: map_at_1000 value: 40.534 - type: map_at_3 value: 36.367 - type: map_at_5 value: 37.756 - type: mrr_at_1 value: 35.987 - type: mrr_at_10 value: 44.708999999999996 - type: mrr_at_100 value: 45.394 - type: mrr_at_1000 value: 45.436 - type: mrr_at_3 value: 42.463 - type: mrr_at_5 value: 43.663000000000004 - type: ndcg_at_1 value: 35.987 - type: ndcg_at_10 value: 44.585 - type: ndcg_at_100 value: 49.297999999999995 - type: ndcg_at_1000 value: 51.315 - type: ndcg_at_3 value: 40.569 - type: ndcg_at_5 value: 42.197 - type: precision_at_1 value: 35.987 - type: precision_at_10 value: 8.369 - type: precision_at_100 value: 1.366 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 19.427 - type: precision_at_5 value: 13.58 - type: recall_at_1 value: 29.321 - type: recall_at_10 value: 54.333 - type: recall_at_100 value: 74.178 - type: recall_at_1000 value: 86.732 - type: recall_at_3 value: 42.46 - type: recall_at_5 value: 47.089999999999996 - type: map_at_1 value: 38.811 - type: map_at_10 value: 51.114000000000004 - type: map_at_100 value: 52.22 - type: map_at_1000 value: 52.275000000000006 - type: map_at_3 value: 47.644999999999996 - type: map_at_5 value: 49.675000000000004 - type: mrr_at_1 value: 44.389 - type: mrr_at_10 value: 54.459 - type: mrr_at_100 value: 55.208999999999996 - type: mrr_at_1000 value: 55.239000000000004 - type: mrr_at_3 value: 51.954 - type: mrr_at_5 value: 53.571999999999996 - type: ndcg_at_1 value: 44.389 - type: ndcg_at_10 value: 56.979 - type: ndcg_at_100 value: 61.266 - type: ndcg_at_1000 value: 62.315 - type: ndcg_at_3 value: 51.342 - type: ndcg_at_5 value: 54.33 - type: precision_at_1 value: 44.389 - type: precision_at_10 value: 9.26 - type: precision_at_100 value: 1.226 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 22.926 - type: precision_at_5 value: 15.987000000000002 - type: recall_at_1 value: 38.811 - type: recall_at_10 value: 70.841 - type: recall_at_100 value: 89.218 - type: recall_at_1000 value: 96.482 - type: recall_at_3 value: 56.123999999999995 - type: recall_at_5 value: 63.322 - type: map_at_1 value: 25.378 - type: map_at_10 value: 34.311 - type: map_at_100 value: 35.399 - type: map_at_1000 value: 35.482 - type: map_at_3 value: 31.917 - type: map_at_5 value: 33.275 - type: mrr_at_1 value: 27.683999999999997 - type: mrr_at_10 value: 36.575 - type: mrr_at_100 value: 37.492 - type: mrr_at_1000 value: 37.556 - type: mrr_at_3 value: 34.35 - type: mrr_at_5 value: 35.525 - type: ndcg_at_1 value: 27.683999999999997 - type: ndcg_at_10 value: 39.247 - type: ndcg_at_100 value: 44.424 - type: ndcg_at_1000 value: 46.478 - type: ndcg_at_3 value: 34.684 - type: ndcg_at_5 value: 36.886 - type: precision_at_1 value: 27.683999999999997 - type: precision_at_10 value: 5.989 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 14.84 - type: precision_at_5 value: 10.215 - type: recall_at_1 value: 25.378 - type: recall_at_10 value: 52.195 - type: recall_at_100 value: 75.764 - type: recall_at_1000 value: 91.012 - type: recall_at_3 value: 39.885999999999996 - type: recall_at_5 value: 45.279 - type: map_at_1 value: 17.326 - type: map_at_10 value: 25.247000000000003 - type: map_at_100 value: 26.473000000000003 - type: map_at_1000 value: 26.579000000000004 - type: map_at_3 value: 22.466 - type: map_at_5 value: 24.113 - type: mrr_at_1 value: 21.393 - type: mrr_at_10 value: 30.187 - type: mrr_at_100 value: 31.089 - type: mrr_at_1000 value: 31.15 - type: mrr_at_3 value: 27.279999999999998 - type: mrr_at_5 value: 29.127 - type: ndcg_at_1 value: 21.393 - type: ndcg_at_10 value: 30.668 - type: ndcg_at_100 value: 36.543 - type: ndcg_at_1000 value: 39.181 - type: ndcg_at_3 value: 25.552000000000003 - type: ndcg_at_5 value: 28.176000000000002 - type: precision_at_1 value: 21.393 - type: precision_at_10 value: 5.784000000000001 - type: precision_at_100 value: 1.001 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 12.231 - type: precision_at_5 value: 9.179 - type: recall_at_1 value: 17.326 - type: recall_at_10 value: 42.415000000000006 - type: recall_at_100 value: 68.605 - type: recall_at_1000 value: 87.694 - type: recall_at_3 value: 28.343 - type: recall_at_5 value: 35.086 - type: map_at_1 value: 29.069 - type: map_at_10 value: 40.027 - type: map_at_100 value: 41.308 - type: map_at_1000 value: 41.412 - type: map_at_3 value: 36.864000000000004 - type: map_at_5 value: 38.641999999999996 - type: mrr_at_1 value: 35.707 - type: mrr_at_10 value: 45.527 - type: mrr_at_100 value: 46.348 - type: mrr_at_1000 value: 46.392 - type: mrr_at_3 value: 43.086 - type: mrr_at_5 value: 44.645 - type: ndcg_at_1 value: 35.707 - type: ndcg_at_10 value: 46.117000000000004 - type: ndcg_at_100 value: 51.468 - type: ndcg_at_1000 value: 53.412000000000006 - type: ndcg_at_3 value: 41.224 - type: ndcg_at_5 value: 43.637 - type: precision_at_1 value: 35.707 - type: precision_at_10 value: 8.459999999999999 - type: precision_at_100 value: 1.2970000000000002 - type: precision_at_1000 value: 0.165 - type: precision_at_3 value: 19.731 - type: precision_at_5 value: 14.013 - type: recall_at_1 value: 29.069 - type: recall_at_10 value: 58.343999999999994 - type: recall_at_100 value: 81.296 - type: recall_at_1000 value: 93.974 - type: recall_at_3 value: 44.7 - type: recall_at_5 value: 50.88700000000001 - type: map_at_1 value: 23.905 - type: map_at_10 value: 33.983000000000004 - type: map_at_100 value: 35.372 - type: map_at_1000 value: 35.487 - type: map_at_3 value: 30.902 - type: map_at_5 value: 32.505 - type: mrr_at_1 value: 29.794999999999998 - type: mrr_at_10 value: 39.28 - type: mrr_at_100 value: 40.215 - type: mrr_at_1000 value: 40.276 - type: mrr_at_3 value: 36.701 - type: mrr_at_5 value: 38.105 - type: ndcg_at_1 value: 29.794999999999998 - type: ndcg_at_10 value: 40.041 - type: ndcg_at_100 value: 45.884 - type: ndcg_at_1000 value: 48.271 - type: ndcg_at_3 value: 34.931 - type: ndcg_at_5 value: 37.044 - type: precision_at_1 value: 29.794999999999998 - type: precision_at_10 value: 7.546 - type: precision_at_100 value: 1.216 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 16.933 - type: precision_at_5 value: 12.1 - type: recall_at_1 value: 23.905 - type: recall_at_10 value: 52.945 - type: recall_at_100 value: 77.551 - type: recall_at_1000 value: 93.793 - type: recall_at_3 value: 38.364 - type: recall_at_5 value: 44.044 - type: map_at_1 value: 25.24441666666667 - type: map_at_10 value: 34.4595 - type: map_at_100 value: 35.699999999999996 - type: map_at_1000 value: 35.8155 - type: map_at_3 value: 31.608333333333338 - type: map_at_5 value: 33.189416666666666 - type: mrr_at_1 value: 29.825250000000004 - type: mrr_at_10 value: 38.60875 - type: mrr_at_100 value: 39.46575 - type: mrr_at_1000 value: 39.52458333333333 - type: mrr_at_3 value: 36.145166666666675 - type: mrr_at_5 value: 37.57625 - type: ndcg_at_1 value: 29.825250000000004 - type: ndcg_at_10 value: 39.88741666666667 - type: ndcg_at_100 value: 45.17966666666667 - type: ndcg_at_1000 value: 47.440583333333336 - type: ndcg_at_3 value: 35.04591666666666 - type: ndcg_at_5 value: 37.32025 - type: precision_at_1 value: 29.825250000000004 - type: precision_at_10 value: 7.07225 - type: precision_at_100 value: 1.1462499999999998 - type: precision_at_1000 value: 0.15325 - type: precision_at_3 value: 16.18375 - type: precision_at_5 value: 11.526833333333334 - type: recall_at_1 value: 25.24441666666667 - type: recall_at_10 value: 51.744916666666676 - type: recall_at_100 value: 75.04574999999998 - type: recall_at_1000 value: 90.65558333333334 - type: recall_at_3 value: 38.28349999999999 - type: recall_at_5 value: 44.16591666666667 - type: map_at_1 value: 24.237000000000002 - type: map_at_10 value: 30.667 - type: map_at_100 value: 31.592 - type: map_at_1000 value: 31.688 - type: map_at_3 value: 28.810999999999996 - type: map_at_5 value: 29.788999999999998 - type: mrr_at_1 value: 26.840000000000003 - type: mrr_at_10 value: 33.305 - type: mrr_at_100 value: 34.089000000000006 - type: mrr_at_1000 value: 34.159 - type: mrr_at_3 value: 31.518 - type: mrr_at_5 value: 32.469 - type: ndcg_at_1 value: 26.840000000000003 - type: ndcg_at_10 value: 34.541 - type: ndcg_at_100 value: 39.206 - type: ndcg_at_1000 value: 41.592 - type: ndcg_at_3 value: 31.005 - type: ndcg_at_5 value: 32.554 - type: precision_at_1 value: 26.840000000000003 - type: precision_at_10 value: 5.3069999999999995 - type: precision_at_100 value: 0.8340000000000001 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 13.292000000000002 - type: precision_at_5 value: 9.049 - type: recall_at_1 value: 24.237000000000002 - type: recall_at_10 value: 43.862 - type: recall_at_100 value: 65.352 - type: recall_at_1000 value: 82.704 - type: recall_at_3 value: 34.009 - type: recall_at_5 value: 37.878 - type: map_at_1 value: 16.482 - type: map_at_10 value: 23.249 - type: map_at_100 value: 24.388 - type: map_at_1000 value: 24.519 - type: map_at_3 value: 20.971 - type: map_at_5 value: 22.192 - type: mrr_at_1 value: 19.993 - type: mrr_at_10 value: 26.985 - type: mrr_at_100 value: 27.975 - type: mrr_at_1000 value: 28.052 - type: mrr_at_3 value: 24.954 - type: mrr_at_5 value: 26.070999999999998 - type: ndcg_at_1 value: 19.993 - type: ndcg_at_10 value: 27.656 - type: ndcg_at_100 value: 33.256 - type: ndcg_at_1000 value: 36.275 - type: ndcg_at_3 value: 23.644000000000002 - type: ndcg_at_5 value: 25.466 - type: precision_at_1 value: 19.993 - type: precision_at_10 value: 5.093 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 11.149000000000001 - type: precision_at_5 value: 8.149000000000001 - type: recall_at_1 value: 16.482 - type: recall_at_10 value: 37.141999999999996 - type: recall_at_100 value: 62.696 - type: recall_at_1000 value: 84.333 - type: recall_at_3 value: 26.031 - type: recall_at_5 value: 30.660999999999998 - type: map_at_1 value: 24.887999999999998 - type: map_at_10 value: 34.101 - type: map_at_100 value: 35.27 - type: map_at_1000 value: 35.370000000000005 - type: map_at_3 value: 31.283 - type: map_at_5 value: 32.72 - type: mrr_at_1 value: 29.011 - type: mrr_at_10 value: 38.004 - type: mrr_at_100 value: 38.879000000000005 - type: mrr_at_1000 value: 38.938 - type: mrr_at_3 value: 35.571999999999996 - type: mrr_at_5 value: 36.789 - type: ndcg_at_1 value: 29.011 - type: ndcg_at_10 value: 39.586 - type: ndcg_at_100 value: 44.939 - type: ndcg_at_1000 value: 47.236 - type: ndcg_at_3 value: 34.4 - type: ndcg_at_5 value: 36.519 - type: precision_at_1 value: 29.011 - type: precision_at_10 value: 6.763 - type: precision_at_100 value: 1.059 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 15.609 - type: precision_at_5 value: 10.896 - type: recall_at_1 value: 24.887999999999998 - type: recall_at_10 value: 52.42 - type: recall_at_100 value: 75.803 - type: recall_at_1000 value: 91.725 - type: recall_at_3 value: 38.080999999999996 - type: recall_at_5 value: 43.47 - type: map_at_1 value: 23.953 - type: map_at_10 value: 32.649 - type: map_at_100 value: 34.181 - type: map_at_1000 value: 34.398 - type: map_at_3 value: 29.567 - type: map_at_5 value: 31.263 - type: mrr_at_1 value: 29.051 - type: mrr_at_10 value: 37.419999999999995 - type: mrr_at_100 value: 38.396 - type: mrr_at_1000 value: 38.458 - type: mrr_at_3 value: 34.782999999999994 - type: mrr_at_5 value: 36.254999999999995 - type: ndcg_at_1 value: 29.051 - type: ndcg_at_10 value: 38.595 - type: ndcg_at_100 value: 44.6 - type: ndcg_at_1000 value: 47.158 - type: ndcg_at_3 value: 33.56 - type: ndcg_at_5 value: 35.870000000000005 - type: precision_at_1 value: 29.051 - type: precision_at_10 value: 7.53 - type: precision_at_100 value: 1.538 - type: precision_at_1000 value: 0.24 - type: precision_at_3 value: 15.744 - type: precision_at_5 value: 11.542 - type: recall_at_1 value: 23.953 - type: recall_at_10 value: 50.08200000000001 - type: recall_at_100 value: 77.364 - type: recall_at_1000 value: 93.57799999999999 - type: recall_at_3 value: 35.432 - type: recall_at_5 value: 41.875 - type: map_at_1 value: 17.72 - type: map_at_10 value: 25.724000000000004 - type: map_at_100 value: 26.846999999999998 - type: map_at_1000 value: 26.964 - type: map_at_3 value: 22.909 - type: map_at_5 value: 24.596999999999998 - type: mrr_at_1 value: 18.854000000000003 - type: mrr_at_10 value: 27.182000000000002 - type: mrr_at_100 value: 28.182000000000002 - type: mrr_at_1000 value: 28.274 - type: mrr_at_3 value: 24.276 - type: mrr_at_5 value: 26.115 - type: ndcg_at_1 value: 18.854000000000003 - type: ndcg_at_10 value: 30.470000000000002 - type: ndcg_at_100 value: 35.854 - type: ndcg_at_1000 value: 38.701 - type: ndcg_at_3 value: 24.924 - type: ndcg_at_5 value: 27.895999999999997 - type: precision_at_1 value: 18.854000000000003 - type: precision_at_10 value: 5.009 - type: precision_at_100 value: 0.835 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 10.721 - type: precision_at_5 value: 8.133 - type: recall_at_1 value: 17.72 - type: recall_at_10 value: 43.617 - type: recall_at_100 value: 67.941 - type: recall_at_1000 value: 88.979 - type: recall_at_3 value: 29.044999999999998 - type: recall_at_5 value: 36.044 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 13.427 - type: map_at_10 value: 22.935 - type: map_at_100 value: 24.808 - type: map_at_1000 value: 24.994 - type: map_at_3 value: 19.533 - type: map_at_5 value: 21.261 - type: mrr_at_1 value: 30.945 - type: mrr_at_10 value: 43.242000000000004 - type: mrr_at_100 value: 44.013999999999996 - type: mrr_at_1000 value: 44.048 - type: mrr_at_3 value: 40.109 - type: mrr_at_5 value: 42.059999999999995 - type: ndcg_at_1 value: 30.945 - type: ndcg_at_10 value: 31.828 - type: ndcg_at_100 value: 38.801 - type: ndcg_at_1000 value: 42.126999999999995 - type: ndcg_at_3 value: 26.922 - type: ndcg_at_5 value: 28.483999999999998 - type: precision_at_1 value: 30.945 - type: precision_at_10 value: 9.844 - type: precision_at_100 value: 1.7309999999999999 - type: precision_at_1000 value: 0.23500000000000001 - type: precision_at_3 value: 20.477999999999998 - type: precision_at_5 value: 15.27 - type: recall_at_1 value: 13.427 - type: recall_at_10 value: 37.141000000000005 - type: recall_at_100 value: 61.007 - type: recall_at_1000 value: 79.742 - type: recall_at_3 value: 24.431 - type: recall_at_5 value: 29.725 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.122 - type: map_at_10 value: 18.799 - type: map_at_100 value: 25.724999999999998 - type: map_at_1000 value: 27.205000000000002 - type: map_at_3 value: 14.194999999999999 - type: map_at_5 value: 16.225 - type: mrr_at_1 value: 68.0 - type: mrr_at_10 value: 76.035 - type: mrr_at_100 value: 76.292 - type: mrr_at_1000 value: 76.297 - type: mrr_at_3 value: 74.458 - type: mrr_at_5 value: 75.558 - type: ndcg_at_1 value: 56.00000000000001 - type: ndcg_at_10 value: 39.761 - type: ndcg_at_100 value: 43.736999999999995 - type: ndcg_at_1000 value: 51.146 - type: ndcg_at_3 value: 45.921 - type: ndcg_at_5 value: 42.756 - type: precision_at_1 value: 68.0 - type: precision_at_10 value: 30.275000000000002 - type: precision_at_100 value: 9.343 - type: precision_at_1000 value: 1.8270000000000002 - type: precision_at_3 value: 49.167 - type: precision_at_5 value: 40.699999999999996 - type: recall_at_1 value: 9.122 - type: recall_at_10 value: 23.669999999999998 - type: recall_at_100 value: 48.719 - type: recall_at_1000 value: 72.033 - type: recall_at_3 value: 15.498999999999999 - type: recall_at_5 value: 18.657 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 55.885000000000005 - type: f1 value: 50.70726446938571 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 75.709 - type: map_at_10 value: 83.345 - type: map_at_100 value: 83.557 - type: map_at_1000 value: 83.572 - type: map_at_3 value: 82.425 - type: map_at_5 value: 83.013 - type: mrr_at_1 value: 81.593 - type: mrr_at_10 value: 88.331 - type: mrr_at_100 value: 88.408 - type: mrr_at_1000 value: 88.41 - type: mrr_at_3 value: 87.714 - type: mrr_at_5 value: 88.122 - type: ndcg_at_1 value: 81.593 - type: ndcg_at_10 value: 86.925 - type: ndcg_at_100 value: 87.67 - type: ndcg_at_1000 value: 87.924 - type: ndcg_at_3 value: 85.5 - type: ndcg_at_5 value: 86.283 - type: precision_at_1 value: 81.593 - type: precision_at_10 value: 10.264 - type: precision_at_100 value: 1.084 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 32.388 - type: precision_at_5 value: 19.991 - type: recall_at_1 value: 75.709 - type: recall_at_10 value: 93.107 - type: recall_at_100 value: 96.024 - type: recall_at_1000 value: 97.603 - type: recall_at_3 value: 89.08500000000001 - type: recall_at_5 value: 91.15299999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 19.121 - type: map_at_10 value: 31.78 - type: map_at_100 value: 33.497 - type: map_at_1000 value: 33.696 - type: map_at_3 value: 27.893 - type: map_at_5 value: 30.087000000000003 - type: mrr_at_1 value: 38.272 - type: mrr_at_10 value: 47.176 - type: mrr_at_100 value: 48.002 - type: mrr_at_1000 value: 48.044 - type: mrr_at_3 value: 45.086999999999996 - type: mrr_at_5 value: 46.337 - type: ndcg_at_1 value: 38.272 - type: ndcg_at_10 value: 39.145 - type: ndcg_at_100 value: 45.696999999999996 - type: ndcg_at_1000 value: 49.0 - type: ndcg_at_3 value: 36.148 - type: ndcg_at_5 value: 37.023 - type: precision_at_1 value: 38.272 - type: precision_at_10 value: 11.065 - type: precision_at_100 value: 1.7840000000000003 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 24.587999999999997 - type: precision_at_5 value: 18.056 - type: recall_at_1 value: 19.121 - type: recall_at_10 value: 44.857 - type: recall_at_100 value: 69.774 - type: recall_at_1000 value: 89.645 - type: recall_at_3 value: 32.588 - type: recall_at_5 value: 37.939 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 36.428 - type: map_at_10 value: 56.891999999999996 - type: map_at_100 value: 57.82899999999999 - type: map_at_1000 value: 57.896 - type: map_at_3 value: 53.762 - type: map_at_5 value: 55.718 - type: mrr_at_1 value: 72.856 - type: mrr_at_10 value: 79.245 - type: mrr_at_100 value: 79.515 - type: mrr_at_1000 value: 79.525 - type: mrr_at_3 value: 78.143 - type: mrr_at_5 value: 78.822 - type: ndcg_at_1 value: 72.856 - type: ndcg_at_10 value: 65.204 - type: ndcg_at_100 value: 68.552 - type: ndcg_at_1000 value: 69.902 - type: ndcg_at_3 value: 60.632 - type: ndcg_at_5 value: 63.161 - type: precision_at_1 value: 72.856 - type: precision_at_10 value: 13.65 - type: precision_at_100 value: 1.6260000000000001 - type: precision_at_1000 value: 0.181 - type: precision_at_3 value: 38.753 - type: precision_at_5 value: 25.251 - type: recall_at_1 value: 36.428 - type: recall_at_10 value: 68.25099999999999 - type: recall_at_100 value: 81.317 - type: recall_at_1000 value: 90.27 - type: recall_at_3 value: 58.13 - type: recall_at_5 value: 63.126000000000005 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 89.4868 - type: ap value: 84.88319192880247 - type: f1 value: 89.46144458052846 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.282999999999998 - type: map_at_10 value: 33.045 - type: map_at_100 value: 34.238 - type: map_at_1000 value: 34.29 - type: map_at_3 value: 29.305999999999997 - type: map_at_5 value: 31.391000000000002 - type: mrr_at_1 value: 21.92 - type: mrr_at_10 value: 33.649 - type: mrr_at_100 value: 34.791 - type: mrr_at_1000 value: 34.837 - type: mrr_at_3 value: 30.0 - type: mrr_at_5 value: 32.039 - type: ndcg_at_1 value: 21.92 - type: ndcg_at_10 value: 39.729 - type: ndcg_at_100 value: 45.484 - type: ndcg_at_1000 value: 46.817 - type: ndcg_at_3 value: 32.084 - type: ndcg_at_5 value: 35.789 - type: precision_at_1 value: 21.92 - type: precision_at_10 value: 6.297 - type: precision_at_100 value: 0.918 - type: precision_at_1000 value: 0.10300000000000001 - type: precision_at_3 value: 13.639000000000001 - type: precision_at_5 value: 10.054 - type: recall_at_1 value: 21.282999999999998 - type: recall_at_10 value: 60.343999999999994 - type: recall_at_100 value: 86.981 - type: recall_at_1000 value: 97.205 - type: recall_at_3 value: 39.452999999999996 - type: recall_at_5 value: 48.333 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.47879616963064 - type: f1 value: 95.21800589958251 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.09256725946192 - type: f1 value: 60.554043889452515 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 75.53463349024882 - type: f1 value: 73.14418495756476 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.22663080026899 - type: f1 value: 79.331456217501 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.50316010430136 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.15612040042282 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.36227552557184 - type: mrr value: 33.57901344209811 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.6610000000000005 - type: map_at_10 value: 12.992 - type: map_at_100 value: 16.756999999999998 - type: map_at_1000 value: 18.25 - type: map_at_3 value: 9.471 - type: map_at_5 value: 11.116 - type: mrr_at_1 value: 43.653 - type: mrr_at_10 value: 53.388999999999996 - type: mrr_at_100 value: 53.982 - type: mrr_at_1000 value: 54.033 - type: mrr_at_3 value: 51.858000000000004 - type: mrr_at_5 value: 53.019000000000005 - type: ndcg_at_1 value: 41.641 - type: ndcg_at_10 value: 34.691 - type: ndcg_at_100 value: 32.305 - type: ndcg_at_1000 value: 41.132999999999996 - type: ndcg_at_3 value: 40.614 - type: ndcg_at_5 value: 38.456 - type: precision_at_1 value: 43.344 - type: precision_at_10 value: 25.881999999999998 - type: precision_at_100 value: 8.483 - type: precision_at_1000 value: 2.131 - type: precision_at_3 value: 38.803 - type: precision_at_5 value: 33.87 - type: recall_at_1 value: 5.6610000000000005 - type: recall_at_10 value: 16.826 - type: recall_at_100 value: 32.939 - type: recall_at_1000 value: 65.161 - type: recall_at_3 value: 10.756 - type: recall_at_5 value: 13.331000000000001 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 26.692 - type: map_at_10 value: 41.065000000000005 - type: map_at_100 value: 42.235 - type: map_at_1000 value: 42.27 - type: map_at_3 value: 36.635 - type: map_at_5 value: 39.219 - type: mrr_at_1 value: 30.214000000000002 - type: mrr_at_10 value: 43.443 - type: mrr_at_100 value: 44.326 - type: mrr_at_1000 value: 44.352000000000004 - type: mrr_at_3 value: 39.623999999999995 - type: mrr_at_5 value: 41.898 - type: ndcg_at_1 value: 30.214000000000002 - type: ndcg_at_10 value: 48.692 - type: ndcg_at_100 value: 53.671 - type: ndcg_at_1000 value: 54.522000000000006 - type: ndcg_at_3 value: 40.245 - type: ndcg_at_5 value: 44.580999999999996 - type: precision_at_1 value: 30.214000000000002 - type: precision_at_10 value: 8.3 - type: precision_at_100 value: 1.1079999999999999 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 18.521 - type: precision_at_5 value: 13.627 - type: recall_at_1 value: 26.692 - type: recall_at_10 value: 69.699 - type: recall_at_100 value: 91.425 - type: recall_at_1000 value: 97.78099999999999 - type: recall_at_3 value: 47.711 - type: recall_at_5 value: 57.643 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.962 - type: map_at_10 value: 84.772 - type: map_at_100 value: 85.402 - type: map_at_1000 value: 85.418 - type: map_at_3 value: 81.89 - type: map_at_5 value: 83.685 - type: mrr_at_1 value: 81.67 - type: mrr_at_10 value: 87.681 - type: mrr_at_100 value: 87.792 - type: mrr_at_1000 value: 87.79299999999999 - type: mrr_at_3 value: 86.803 - type: mrr_at_5 value: 87.392 - type: ndcg_at_1 value: 81.69 - type: ndcg_at_10 value: 88.429 - type: ndcg_at_100 value: 89.66 - type: ndcg_at_1000 value: 89.762 - type: ndcg_at_3 value: 85.75 - type: ndcg_at_5 value: 87.20700000000001 - type: precision_at_1 value: 81.69 - type: precision_at_10 value: 13.395000000000001 - type: precision_at_100 value: 1.528 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.507000000000005 - type: precision_at_5 value: 24.614 - type: recall_at_1 value: 70.962 - type: recall_at_10 value: 95.339 - type: recall_at_100 value: 99.543 - type: recall_at_1000 value: 99.984 - type: recall_at_3 value: 87.54899999999999 - type: recall_at_5 value: 91.726 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 55.506631779239555 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 60.63731341848479 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.852 - type: map_at_10 value: 13.175 - type: map_at_100 value: 15.623999999999999 - type: map_at_1000 value: 16.002 - type: map_at_3 value: 9.103 - type: map_at_5 value: 11.068999999999999 - type: mrr_at_1 value: 23.9 - type: mrr_at_10 value: 35.847 - type: mrr_at_100 value: 36.968 - type: mrr_at_1000 value: 37.018 - type: mrr_at_3 value: 32.300000000000004 - type: mrr_at_5 value: 34.14 - type: ndcg_at_1 value: 23.9 - type: ndcg_at_10 value: 21.889 - type: ndcg_at_100 value: 30.903000000000002 - type: ndcg_at_1000 value: 36.992000000000004 - type: ndcg_at_3 value: 20.274 - type: ndcg_at_5 value: 17.773 - type: precision_at_1 value: 23.9 - type: precision_at_10 value: 11.61 - type: precision_at_100 value: 2.4539999999999997 - type: precision_at_1000 value: 0.391 - type: precision_at_3 value: 19.133 - type: precision_at_5 value: 15.740000000000002 - type: recall_at_1 value: 4.852 - type: recall_at_10 value: 23.507 - type: recall_at_100 value: 49.775000000000006 - type: recall_at_1000 value: 79.308 - type: recall_at_3 value: 11.637 - type: recall_at_5 value: 15.947 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 86.03345827446948 - type: cos_sim_spearman value: 80.53174518259549 - type: euclidean_pearson value: 83.44538971660883 - type: euclidean_spearman value: 80.57344324098692 - type: manhattan_pearson value: 83.36528808195459 - type: manhattan_spearman value: 80.48931287157902 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.21363088257881 - type: cos_sim_spearman value: 75.56589127055523 - type: euclidean_pearson value: 82.32868324521908 - type: euclidean_spearman value: 75.31928550664554 - type: manhattan_pearson value: 82.31332875713211 - type: manhattan_spearman value: 75.35376322099196 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 85.09085593258487 - type: cos_sim_spearman value: 86.26355088415221 - type: euclidean_pearson value: 85.49646115361156 - type: euclidean_spearman value: 86.20652472228703 - type: manhattan_pearson value: 85.44084081123815 - type: manhattan_spearman value: 86.1162623448951 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 84.68250248349368 - type: cos_sim_spearman value: 82.29883673695083 - type: euclidean_pearson value: 84.17633035446019 - type: euclidean_spearman value: 82.19990511264791 - type: manhattan_pearson value: 84.17408410692279 - type: manhattan_spearman value: 82.249873895981 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.31878760045024 - type: cos_sim_spearman value: 88.7364409031183 - type: euclidean_pearson value: 88.230537618603 - type: euclidean_spearman value: 88.76484309646318 - type: manhattan_pearson value: 88.17689071136469 - type: manhattan_spearman value: 88.72809249037928 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.41078559110638 - type: cos_sim_spearman value: 85.27439135411049 - type: euclidean_pearson value: 84.5333571592088 - type: euclidean_spearman value: 85.25645460575957 - type: manhattan_pearson value: 84.38428921610226 - type: manhattan_spearman value: 85.07796040798796 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.82374132382576 - type: cos_sim_spearman value: 89.02101343562433 - type: euclidean_pearson value: 89.50729765458932 - type: euclidean_spearman value: 89.04184772869253 - type: manhattan_pearson value: 89.51737904059856 - type: manhattan_spearman value: 89.12925950440676 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.56051823873482 - type: cos_sim_spearman value: 68.50988748185463 - type: euclidean_pearson value: 69.16524346147456 - type: euclidean_spearman value: 68.61859952449579 - type: manhattan_pearson value: 69.10618915706995 - type: manhattan_spearman value: 68.36401769459522 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.4159693872625 - type: cos_sim_spearman value: 87.07819121764247 - type: euclidean_pearson value: 87.03013260863153 - type: euclidean_spearman value: 87.06547293631309 - type: manhattan_pearson value: 86.8129744446062 - type: manhattan_spearman value: 86.88494096335627 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 86.47758088996575 - type: mrr value: 96.17891458577733 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.538999999999994 - type: map_at_10 value: 66.562 - type: map_at_100 value: 67.254 - type: map_at_1000 value: 67.284 - type: map_at_3 value: 63.722 - type: map_at_5 value: 65.422 - type: mrr_at_1 value: 60.0 - type: mrr_at_10 value: 67.354 - type: mrr_at_100 value: 67.908 - type: mrr_at_1000 value: 67.93299999999999 - type: mrr_at_3 value: 65.056 - type: mrr_at_5 value: 66.43900000000001 - type: ndcg_at_1 value: 60.0 - type: ndcg_at_10 value: 70.858 - type: ndcg_at_100 value: 73.67099999999999 - type: ndcg_at_1000 value: 74.26700000000001 - type: ndcg_at_3 value: 65.911 - type: ndcg_at_5 value: 68.42200000000001 - type: precision_at_1 value: 60.0 - type: precision_at_10 value: 9.4 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 25.444 - type: precision_at_5 value: 17.0 - type: recall_at_1 value: 57.538999999999994 - type: recall_at_10 value: 83.233 - type: recall_at_100 value: 95.667 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 69.883 - type: recall_at_5 value: 76.19399999999999 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.82574257425742 - type: cos_sim_ap value: 95.78722833053911 - type: cos_sim_f1 value: 90.94650205761316 - type: cos_sim_precision value: 93.64406779661016 - type: cos_sim_recall value: 88.4 - type: dot_accuracy value: 99.83366336633664 - type: dot_ap value: 95.89733601612964 - type: dot_f1 value: 91.41981613891727 - type: dot_precision value: 93.42379958246346 - type: dot_recall value: 89.5 - type: euclidean_accuracy value: 99.82574257425742 - type: euclidean_ap value: 95.75227035138846 - type: euclidean_f1 value: 90.96509240246407 - type: euclidean_precision value: 93.45991561181435 - type: euclidean_recall value: 88.6 - type: manhattan_accuracy value: 99.82574257425742 - type: manhattan_ap value: 95.76278266220176 - type: manhattan_f1 value: 91.08409321175279 - type: manhattan_precision value: 92.29979466119097 - type: manhattan_recall value: 89.9 - type: max_accuracy value: 99.83366336633664 - type: max_ap value: 95.89733601612964 - type: max_f1 value: 91.41981613891727 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 61.905425988638605 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.159589881679736 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 53.0605499476397 - type: mrr value: 53.91594516594517 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.202718009067 - type: cos_sim_spearman value: 31.136199912366987 - type: dot_pearson value: 30.66329011927951 - type: dot_spearman value: 30.107664909625107 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.209 - type: map_at_10 value: 1.712 - type: map_at_100 value: 9.464 - type: map_at_1000 value: 23.437 - type: map_at_3 value: 0.609 - type: map_at_5 value: 0.9440000000000001 - type: mrr_at_1 value: 78.0 - type: mrr_at_10 value: 86.833 - type: mrr_at_100 value: 86.833 - type: mrr_at_1000 value: 86.833 - type: mrr_at_3 value: 85.333 - type: mrr_at_5 value: 86.833 - type: ndcg_at_1 value: 74.0 - type: ndcg_at_10 value: 69.14 - type: ndcg_at_100 value: 53.047999999999995 - type: ndcg_at_1000 value: 48.577 - type: ndcg_at_3 value: 75.592 - type: ndcg_at_5 value: 72.509 - type: precision_at_1 value: 78.0 - type: precision_at_10 value: 73.0 - type: precision_at_100 value: 54.44 - type: precision_at_1000 value: 21.326 - type: precision_at_3 value: 80.667 - type: precision_at_5 value: 77.2 - type: recall_at_1 value: 0.209 - type: recall_at_10 value: 1.932 - type: recall_at_100 value: 13.211999999999998 - type: recall_at_1000 value: 45.774 - type: recall_at_3 value: 0.644 - type: recall_at_5 value: 1.0290000000000001 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.609 - type: map_at_10 value: 8.334999999999999 - type: map_at_100 value: 14.604000000000001 - type: map_at_1000 value: 16.177 - type: map_at_3 value: 4.87 - type: map_at_5 value: 6.3149999999999995 - type: mrr_at_1 value: 32.653 - type: mrr_at_10 value: 45.047 - type: mrr_at_100 value: 45.808 - type: mrr_at_1000 value: 45.808 - type: mrr_at_3 value: 41.497 - type: mrr_at_5 value: 43.231 - type: ndcg_at_1 value: 30.612000000000002 - type: ndcg_at_10 value: 21.193 - type: ndcg_at_100 value: 34.97 - type: ndcg_at_1000 value: 46.69 - type: ndcg_at_3 value: 24.823 - type: ndcg_at_5 value: 22.872999999999998 - type: precision_at_1 value: 32.653 - type: precision_at_10 value: 17.959 - type: precision_at_100 value: 7.4079999999999995 - type: precision_at_1000 value: 1.537 - type: precision_at_3 value: 25.85 - type: precision_at_5 value: 22.448999999999998 - type: recall_at_1 value: 2.609 - type: recall_at_10 value: 13.63 - type: recall_at_100 value: 47.014 - type: recall_at_1000 value: 83.176 - type: recall_at_3 value: 5.925 - type: recall_at_5 value: 8.574 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 72.80239999999999 - type: ap value: 15.497911013214791 - type: f1 value: 56.258411577947285 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.00452744765139 - type: f1 value: 61.42228624410908 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 50.00516915962345 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.62317458425225 - type: cos_sim_ap value: 72.95115658063823 - type: cos_sim_f1 value: 66.78976523344764 - type: cos_sim_precision value: 66.77215189873418 - type: cos_sim_recall value: 66.80738786279683 - type: dot_accuracy value: 85.62317458425225 - type: dot_ap value: 73.10385271517778 - type: dot_f1 value: 66.94853829427399 - type: dot_precision value: 61.74242424242424 - type: dot_recall value: 73.11345646437995 - type: euclidean_accuracy value: 85.65893783155511 - type: euclidean_ap value: 72.87428208473992 - type: euclidean_f1 value: 66.70919994896005 - type: euclidean_precision value: 64.5910551025451 - type: euclidean_recall value: 68.97097625329816 - type: manhattan_accuracy value: 85.59933241938367 - type: manhattan_ap value: 72.67282695064966 - type: manhattan_f1 value: 66.67537215983286 - type: manhattan_precision value: 66.00310237849017 - type: manhattan_recall value: 67.36147757255937 - type: max_accuracy value: 85.65893783155511 - type: max_ap value: 73.10385271517778 - type: max_f1 value: 66.94853829427399 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.69096130709822 - type: cos_sim_ap value: 85.30326978668063 - type: cos_sim_f1 value: 77.747088683189 - type: cos_sim_precision value: 75.4491451753115 - type: cos_sim_recall value: 80.189405605174 - type: dot_accuracy value: 88.43870066363954 - type: dot_ap value: 84.62999949222983 - type: dot_f1 value: 77.3074661963551 - type: dot_precision value: 73.93871239808828 - type: dot_recall value: 80.99784416384355 - type: euclidean_accuracy value: 88.70066363953894 - type: euclidean_ap value: 85.34184508966621 - type: euclidean_f1 value: 77.76871756856931 - type: euclidean_precision value: 74.97855917667239 - type: euclidean_recall value: 80.77456113335386 - type: manhattan_accuracy value: 88.68319944114566 - type: manhattan_ap value: 85.3026464242333 - type: manhattan_f1 value: 77.66561049296294 - type: manhattan_precision value: 74.4665818849795 - type: manhattan_recall value: 81.15183246073299 - type: max_accuracy value: 88.70066363953894 - type: max_ap value: 85.34184508966621 - type: max_f1 value: 77.76871756856931 --- <h1 align="center">GIST small Embedding v0</h1> *GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning* The model is fine-tuned on top of the [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) using the [MEDI dataset](https://github.com/xlang-ai/instructor-embedding.git) augmented with mined triplets from the [MTEB Classification](https://huggingface.co/mteb) training dataset (excluding data from the Amazon Polarity Classification task). The model does not require any instruction for generating embeddings. This means that queries for retrieval tasks can be directly encoded without crafting instructions. Technical paper: [GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning](https://arxiv.org/abs/2402.16829) # Data The dataset used is a compilation of the MEDI and MTEB Classification training datasets. Third-party datasets may be subject to additional terms and conditions under their associated licenses. A HuggingFace Dataset version of the compiled dataset, and the specific revision used to train the model, is available: - Dataset: [avsolatorio/medi-data-mteb_avs_triplets](https://huggingface.co/datasets/avsolatorio/medi-data-mteb_avs_triplets) - Revision: 238a0499b6e6b690cc64ea56fde8461daa8341bb The dataset contains a `task_type` key, which can be used to select only the mteb classification tasks (prefixed with `mteb_`). The **MEDI Dataset** is published in the following paper: [One Embedder, Any Task: Instruction-Finetuned Text Embeddings](https://arxiv.org/abs/2212.09741). The MTEB Benchmark results of the GIST embedding model, compared with the base model, suggest that the fine-tuning dataset has perturbed the model considerably, which resulted in significant improvements in certain tasks while adversely degrading performance in some. The retrieval performance for the TRECCOVID task is of note. The fine-tuning dataset does not contain significant knowledge about COVID-19, which could have caused the observed performance degradation. We found some evidence, detailed in the paper, that thematic coverage of the fine-tuning data can affect downstream performance. # Usage The model can be easily loaded using the Sentence Transformers library. ```Python import torch.nn.functional as F from sentence_transformers import SentenceTransformer revision = None # Replace with the specific revision to ensure reproducibility if the model is updated. model = SentenceTransformer("avsolatorio/GIST-small-Embedding-v0", revision=revision) texts = [ "Illustration of the REaLTabFormer model. The left block shows the non-relational tabular data model using GPT-2 with a causal LM head. In contrast, the right block shows how a relational dataset's child table is modeled using a sequence-to-sequence (Seq2Seq) model. The Seq2Seq model uses the observations in the parent table to condition the generation of the observations in the child table. The trained GPT-2 model on the parent table, with weights frozen, is also used as the encoder in the Seq2Seq model.", "Predicting human mobility holds significant practical value, with applications ranging from enhancing disaster risk planning to simulating epidemic spread. In this paper, we present the GeoFormer, a decoder-only transformer model adapted from the GPT architecture to forecast human mobility.", "As the economies of Southeast Asia continue adopting digital technologies, policy makers increasingly ask how to prepare the workforce for emerging labor demands. However, little is known about the skills that workers need to adapt to these changes" ] # Compute embeddings embeddings = model.encode(texts, convert_to_tensor=True) # Compute cosine-similarity for each pair of sentences scores = F.cosine_similarity(embeddings.unsqueeze(1), embeddings.unsqueeze(0), dim=-1) print(scores.cpu().numpy()) ``` # Training Parameters Below are the training parameters used to fine-tune the model: ``` Epochs = 40 Warmup ratio = 0.1 Learning rate = 5e-6 Batch size = 16 Checkpoint step = 102000 Contrastive loss temperature = 0.01 ``` # Evaluation The model was evaluated using the [MTEB Evaluation](https://huggingface.co/mteb) suite. # Citation Please cite our work if you use GISTEmbed or the datasets we published in your projects or research. 🤗 ``` @article{solatorio2024gistembed, title={GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning}, author={Aivin V. Solatorio}, journal={arXiv preprint arXiv:2402.16829}, year={2024}, URL={https://arxiv.org/abs/2402.16829} eprint={2402.16829}, archivePrefix={arXiv}, primaryClass={cs.LG} } ``` # Acknowledgements This work is supported by the "KCP IV - Exploring Data Use in the Development Economics Literature using Large Language Models (AI and LLMs)" project funded by the [Knowledge for Change Program (KCP)](https://www.worldbank.org/en/programs/knowledge-for-change) of the World Bank - RA-P503405-RESE-TF0C3444. The findings, interpretations, and conclusions expressed in this material are entirely those of the authors. They do not necessarily represent the views of the International Bank for Reconstruction and Development/World Bank and its affiliated organizations, or those of the Executive Directors of the World Bank or the governments they represent.
[ "BIOSSES", "SCIFACT" ]
answerdotai/answerai-colbert-small-v1
answerdotai
null
[ "onnx", "safetensors", "bert", "ColBERT", "RAGatouille", "passage-retrieval", "en", "arxiv:2407.20750", "license:apache-2.0", "region:us" ]
"2024-08-12T13:02:24Z"
2024-11-18T23:45:37+00:00
2,961,317
144
--- language: - en license: apache-2.0 tags: - ColBERT - RAGatouille - passage-retrieval --- # answerai-colbert-small-v1 **answerai-colbert-small-v1** is a new, proof-of-concept model by [Answer.AI](https://answer.ai), showing the strong performance multi-vector models with the new [JaColBERTv2.5 training recipe](https://arxiv.org/abs/2407.20750) and some extra tweaks can reach, even with just **33 million parameters**. While being MiniLM-sized, it outperforms all previous similarly-sized models on common benchmarks, and even outperforms much larger popular models such as e5-large-v2 or bge-base-en-v1.5. For more information about this model or how it was trained, head over to the [announcement blogpost](https://www.answer.ai/posts/2024-08-13-small-but-mighty-colbert.html). ## Usage ### Installation This model was designed with the upcoming RAGatouille overhaul in mind. However, it's compatible with all recent ColBERT implementations! To use it, you can either use the Stanford ColBERT library, or RAGatouille. You can install both or either by simply running. ```sh pip install --upgrade ragatouille pip install --upgrade colbert-ai ``` If you're interested in using this model as a re-ranker (it vastly outperforms cross-encoders its size!), you can do so via the [rerankers](https://github.com/AnswerDotAI/rerankers) library: ```sh pip install --upgrade rerankers[transformers] ``` ### Rerankers ```python from rerankers import Reranker ranker = Reranker("answerdotai/answerai-colbert-small-v1", model_type='colbert') docs = ['Hayao Miyazaki is a Japanese director, born on [...]', 'Walt Disney is an American author, director and [...]', ...] query = 'Who directed spirited away?' ranker.rank(query=query, docs=docs) ``` ### RAGatouille ```python from ragatouille import RAGPretrainedModel RAG = RAGPretrainedModel.from_pretrained("answerdotai/answerai-colbert-small-v1") docs = ['Hayao Miyazaki is a Japanese director, born on [...]', 'Walt Disney is an American author, director and [...]', ...] RAG.index(docs, index_name="ghibli") query = 'Who directed spirited away?' results = RAG.search(query) ``` ### Stanford ColBERT #### Indexing ```python from colbert import Indexer from colbert.infra import Run, RunConfig, ColBERTConfig INDEX_NAME = "DEFINE_HERE" if __name__ == "__main__": config = ColBERTConfig( doc_maxlen=512, nbits=2 ) indexer = Indexer( checkpoint="answerdotai/answerai-colbert-small-v1", config=config, ) docs = ['Hayao Miyazaki is a Japanese director, born on [...]', 'Walt Disney is an American author, director and [...]', ...] indexer.index(name=INDEX_NAME, collection=docs) ``` #### Querying ```python from colbert import Searcher from colbert.infra import Run, RunConfig, ColBERTConfig INDEX_NAME = "THE_INDEX_YOU_CREATED" k = 10 if __name__ == "__main__": config = ColBERTConfig( query_maxlen=32 # Adjust as needed, we recommend the nearest higher multiple of 16 to your query ) searcher = Searcher( index=index_name, config=config ) query = 'Who directed spirited away?' results = searcher.search(query, k=k) ``` #### Extracting Vectors Finally, if you want to extract individula vectors, you can use the model this way: ```python from colbert.modeling.checkpoint import Checkpoint ckpt = Checkpoint("answerdotai/answerai-colbert-small-v1", colbert_config=ColBERTConfig()) embedded_query = ckpt.queryFromText(["Who dubs Howl's in English?"], bsize=16) ``` ## Results ### Against single-vector models ![](https://www.answer.ai/posts/images/minicolbert/small_results.png) | Dataset / Model | answer-colbert-s | snowflake-s | bge-small-en | bge-base-en | |:-----------------|:-----------------:|:-------------:|:-------------:|:-------------:| | **Size** | 33M (1x) | 33M (1x) | 33M (1x) | **109M (3.3x)** | | **BEIR AVG** | **53.79** | 51.99 | 51.68 | 53.25 | | **FiQA2018** | **41.15** | 40.65 | 40.34 | 40.65 | | **HotpotQA** | **76.11** | 66.54 | 69.94 | 72.6 | | **MSMARCO** | **43.5** | 40.23 | 40.83 | 41.35 | | **NQ** | **59.1** | 50.9 | 50.18 | 54.15 | | **TRECCOVID** | **84.59** | 80.12 | 75.9 | 78.07 | | **ArguAna** | 50.09 | 57.59 | 59.55 | **63.61** | | **ClimateFEVER**| 33.07 | **35.2** | 31.84 | 31.17 | | **CQADupstackRetrieval** | 38.75 | 39.65 | 39.05 | **42.35** | | **DBPedia** | **45.58** | 41.02 | 40.03 | 40.77 | | **FEVER** | **90.96** | 87.13 | 86.64 | 86.29 | | **NFCorpus** | 37.3 | 34.92 | 34.3 | **37.39** | | **QuoraRetrieval** | 87.72 | 88.41 | 88.78 | **88.9** | | **SCIDOCS** | 18.42 | **21.82** | 20.52 | 21.73 | | **SciFact** | **74.77** | 72.22 | 71.28 | 74.04 | | **Touche2020** | 25.69 | 23.48 | **26.04** | 25.7 | ### Against ColBERTv2.0 | Dataset / Model | answerai-colbert-small-v1 | ColBERTv2.0 | |:-----------------|:-----------------------:|:------------:| | **BEIR AVG** | **53.79** | 50.02 | | **DBPedia** | **45.58** | 44.6 | | **FiQA2018** | **41.15** | 35.6 | | **NQ** | **59.1** | 56.2 | | **HotpotQA** | **76.11** | 66.7 | | **NFCorpus** | **37.3** | 33.8 | | **TRECCOVID** | **84.59** | 73.3 | | **Touche2020** | 25.69 | **26.3** | | **ArguAna** | **50.09** | 46.3 | | **ClimateFEVER**| **33.07** | 17.6 | | **FEVER** | **90.96** | 78.5 | | **QuoraRetrieval** | **87.72** | 85.2 | | **SCIDOCS** | **18.42** | 15.4 | | **SciFact** | **74.77** | 69.3 | ## Referencing We'll most likely eventually release a technical report. In the meantime, if you use this model or other models following the JaColBERTv2.5 recipe and would like to give us credit, please cite the JaColBERTv2.5 journal pre-print: ``` @article{clavie2024jacolbertv2, title={JaColBERTv2.5: Optimising Multi-Vector Retrievers to Create State-of-the-Art Japanese Retrievers with Constrained Resources}, author={Clavi{\'e}, Benjamin}, journal={arXiv preprint arXiv:2407.20750}, year={2024} } ```
[ "SCIFACT" ]
Alibaba-NLP/gte-base-en-v1.5
Alibaba-NLP
sentence-similarity
[ "transformers", "onnx", "safetensors", "new", "feature-extraction", "sentence-transformers", "gte", "mteb", "transformers.js", "sentence-similarity", "custom_code", "en", "arxiv:2407.19669", "arxiv:2308.03281", "license:apache-2.0", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-04-20T02:53:42Z"
2024-11-15T14:10:57+00:00
2,607,332
63
--- language: - en library_name: transformers license: apache-2.0 tags: - sentence-transformers - gte - mteb - transformers.js - sentence-similarity model-index: - name: gte-base-en-v1.5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 74.7910447761194 - type: ap value: 37.053785713650626 - type: f1 value: 68.51101510998551 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.016875 - type: ap value: 89.17750268426342 - type: f1 value: 92.9970977240524 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.312000000000005 - type: f1 value: 52.98175784163017 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 38.193 - type: map_at_10 value: 54.848 - type: map_at_100 value: 55.388000000000005 - type: map_at_1000 value: 55.388999999999996 - type: map_at_3 value: 50.427 - type: map_at_5 value: 53.105000000000004 - type: mrr_at_1 value: 39.047 - type: mrr_at_10 value: 55.153 - type: mrr_at_100 value: 55.686 - type: mrr_at_1000 value: 55.688 - type: mrr_at_3 value: 50.676 - type: mrr_at_5 value: 53.417 - type: ndcg_at_1 value: 38.193 - type: ndcg_at_10 value: 63.486 - type: ndcg_at_100 value: 65.58 - type: ndcg_at_1000 value: 65.61 - type: ndcg_at_3 value: 54.494 - type: ndcg_at_5 value: 59.339 - type: precision_at_1 value: 38.193 - type: precision_at_10 value: 9.075 - type: precision_at_100 value: 0.9939999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.096 - type: precision_at_5 value: 15.619 - type: recall_at_1 value: 38.193 - type: recall_at_10 value: 90.754 - type: recall_at_100 value: 99.431 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 66.28699999999999 - type: recall_at_5 value: 78.094 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.508221208908964 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.04668382560096 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.828759903716815 - type: mrr value: 74.37343358395991 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.03673698773017 - type: cos_sim_spearman value: 83.6470866785058 - type: euclidean_pearson value: 82.64048673096565 - type: euclidean_spearman value: 83.63142367101115 - type: manhattan_pearson value: 82.71493099760228 - type: manhattan_spearman value: 83.60491704294326 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 86.73376623376623 - type: f1 value: 86.70294049278262 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.31923804167062 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 37.552547125348454 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 30.567 - type: map_at_10 value: 41.269 - type: map_at_100 value: 42.689 - type: map_at_1000 value: 42.84 - type: map_at_3 value: 37.567 - type: map_at_5 value: 39.706 - type: mrr_at_1 value: 37.053000000000004 - type: mrr_at_10 value: 46.900999999999996 - type: mrr_at_100 value: 47.662 - type: mrr_at_1000 value: 47.713 - type: mrr_at_3 value: 43.801 - type: mrr_at_5 value: 45.689 - type: ndcg_at_1 value: 37.053000000000004 - type: ndcg_at_10 value: 47.73 - type: ndcg_at_100 value: 53.128 - type: ndcg_at_1000 value: 55.300000000000004 - type: ndcg_at_3 value: 42.046 - type: ndcg_at_5 value: 44.782 - type: precision_at_1 value: 37.053000000000004 - type: precision_at_10 value: 9.142 - type: precision_at_100 value: 1.485 - type: precision_at_1000 value: 0.197 - type: precision_at_3 value: 20.076 - type: precision_at_5 value: 14.535 - type: recall_at_1 value: 30.567 - type: recall_at_10 value: 60.602999999999994 - type: recall_at_100 value: 83.22800000000001 - type: recall_at_1000 value: 96.696 - type: recall_at_3 value: 44.336999999999996 - type: recall_at_5 value: 51.949 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 28.538000000000004 - type: map_at_10 value: 38.757999999999996 - type: map_at_100 value: 40.129 - type: map_at_1000 value: 40.262 - type: map_at_3 value: 35.866 - type: map_at_5 value: 37.417 - type: mrr_at_1 value: 36.051 - type: mrr_at_10 value: 44.868 - type: mrr_at_100 value: 45.568999999999996 - type: mrr_at_1000 value: 45.615 - type: mrr_at_3 value: 42.558 - type: mrr_at_5 value: 43.883 - type: ndcg_at_1 value: 36.051 - type: ndcg_at_10 value: 44.584 - type: ndcg_at_100 value: 49.356 - type: ndcg_at_1000 value: 51.39 - type: ndcg_at_3 value: 40.389 - type: ndcg_at_5 value: 42.14 - type: precision_at_1 value: 36.051 - type: precision_at_10 value: 8.446 - type: precision_at_100 value: 1.411 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 19.639 - type: precision_at_5 value: 13.796 - type: recall_at_1 value: 28.538000000000004 - type: recall_at_10 value: 54.99000000000001 - type: recall_at_100 value: 75.098 - type: recall_at_1000 value: 87.848 - type: recall_at_3 value: 42.236000000000004 - type: recall_at_5 value: 47.377 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 37.188 - type: map_at_10 value: 50.861000000000004 - type: map_at_100 value: 51.917 - type: map_at_1000 value: 51.964999999999996 - type: map_at_3 value: 47.144000000000005 - type: map_at_5 value: 49.417 - type: mrr_at_1 value: 42.571 - type: mrr_at_10 value: 54.086999999999996 - type: mrr_at_100 value: 54.739000000000004 - type: mrr_at_1000 value: 54.762 - type: mrr_at_3 value: 51.285000000000004 - type: mrr_at_5 value: 53.0 - type: ndcg_at_1 value: 42.571 - type: ndcg_at_10 value: 57.282 - type: ndcg_at_100 value: 61.477000000000004 - type: ndcg_at_1000 value: 62.426 - type: ndcg_at_3 value: 51.0 - type: ndcg_at_5 value: 54.346000000000004 - type: precision_at_1 value: 42.571 - type: precision_at_10 value: 9.467 - type: precision_at_100 value: 1.2550000000000001 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 23.114 - type: precision_at_5 value: 16.250999999999998 - type: recall_at_1 value: 37.188 - type: recall_at_10 value: 73.068 - type: recall_at_100 value: 91.203 - type: recall_at_1000 value: 97.916 - type: recall_at_3 value: 56.552 - type: recall_at_5 value: 64.567 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 25.041000000000004 - type: map_at_10 value: 33.86 - type: map_at_100 value: 34.988 - type: map_at_1000 value: 35.064 - type: map_at_3 value: 31.049 - type: map_at_5 value: 32.845 - type: mrr_at_1 value: 26.893 - type: mrr_at_10 value: 35.594 - type: mrr_at_100 value: 36.617 - type: mrr_at_1000 value: 36.671 - type: mrr_at_3 value: 33.051 - type: mrr_at_5 value: 34.61 - type: ndcg_at_1 value: 26.893 - type: ndcg_at_10 value: 38.674 - type: ndcg_at_100 value: 44.178 - type: ndcg_at_1000 value: 46.089999999999996 - type: ndcg_at_3 value: 33.485 - type: ndcg_at_5 value: 36.402 - type: precision_at_1 value: 26.893 - type: precision_at_10 value: 5.989 - type: precision_at_100 value: 0.918 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 14.2 - type: precision_at_5 value: 10.26 - type: recall_at_1 value: 25.041000000000004 - type: recall_at_10 value: 51.666000000000004 - type: recall_at_100 value: 76.896 - type: recall_at_1000 value: 91.243 - type: recall_at_3 value: 38.035999999999994 - type: recall_at_5 value: 44.999 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 15.909999999999998 - type: map_at_10 value: 23.901 - type: map_at_100 value: 25.165 - type: map_at_1000 value: 25.291000000000004 - type: map_at_3 value: 21.356 - type: map_at_5 value: 22.816 - type: mrr_at_1 value: 20.025000000000002 - type: mrr_at_10 value: 28.382 - type: mrr_at_100 value: 29.465000000000003 - type: mrr_at_1000 value: 29.535 - type: mrr_at_3 value: 25.933 - type: mrr_at_5 value: 27.332 - type: ndcg_at_1 value: 20.025000000000002 - type: ndcg_at_10 value: 29.099000000000004 - type: ndcg_at_100 value: 35.127 - type: ndcg_at_1000 value: 38.096000000000004 - type: ndcg_at_3 value: 24.464 - type: ndcg_at_5 value: 26.709 - type: precision_at_1 value: 20.025000000000002 - type: precision_at_10 value: 5.398 - type: precision_at_100 value: 0.9690000000000001 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 11.774 - type: precision_at_5 value: 8.632 - type: recall_at_1 value: 15.909999999999998 - type: recall_at_10 value: 40.672000000000004 - type: recall_at_100 value: 66.855 - type: recall_at_1000 value: 87.922 - type: recall_at_3 value: 28.069 - type: recall_at_5 value: 33.812 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 30.175 - type: map_at_10 value: 41.36 - type: map_at_100 value: 42.701 - type: map_at_1000 value: 42.817 - type: map_at_3 value: 37.931 - type: map_at_5 value: 39.943 - type: mrr_at_1 value: 35.611 - type: mrr_at_10 value: 46.346 - type: mrr_at_100 value: 47.160000000000004 - type: mrr_at_1000 value: 47.203 - type: mrr_at_3 value: 43.712 - type: mrr_at_5 value: 45.367000000000004 - type: ndcg_at_1 value: 35.611 - type: ndcg_at_10 value: 47.532000000000004 - type: ndcg_at_100 value: 53.003 - type: ndcg_at_1000 value: 55.007 - type: ndcg_at_3 value: 42.043 - type: ndcg_at_5 value: 44.86 - type: precision_at_1 value: 35.611 - type: precision_at_10 value: 8.624 - type: precision_at_100 value: 1.332 - type: precision_at_1000 value: 0.169 - type: precision_at_3 value: 20.083000000000002 - type: precision_at_5 value: 14.437 - type: recall_at_1 value: 30.175 - type: recall_at_10 value: 60.5 - type: recall_at_100 value: 83.399 - type: recall_at_1000 value: 96.255 - type: recall_at_3 value: 45.448 - type: recall_at_5 value: 52.432 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 22.467000000000002 - type: map_at_10 value: 33.812999999999995 - type: map_at_100 value: 35.248000000000005 - type: map_at_1000 value: 35.359 - type: map_at_3 value: 30.316 - type: map_at_5 value: 32.233000000000004 - type: mrr_at_1 value: 28.310999999999996 - type: mrr_at_10 value: 38.979 - type: mrr_at_100 value: 39.937 - type: mrr_at_1000 value: 39.989999999999995 - type: mrr_at_3 value: 36.244 - type: mrr_at_5 value: 37.871 - type: ndcg_at_1 value: 28.310999999999996 - type: ndcg_at_10 value: 40.282000000000004 - type: ndcg_at_100 value: 46.22 - type: ndcg_at_1000 value: 48.507 - type: ndcg_at_3 value: 34.596 - type: ndcg_at_5 value: 37.267 - type: precision_at_1 value: 28.310999999999996 - type: precision_at_10 value: 7.831 - type: precision_at_100 value: 1.257 - type: precision_at_1000 value: 0.164 - type: precision_at_3 value: 17.275 - type: precision_at_5 value: 12.556999999999999 - type: recall_at_1 value: 22.467000000000002 - type: recall_at_10 value: 54.14099999999999 - type: recall_at_100 value: 79.593 - type: recall_at_1000 value: 95.063 - type: recall_at_3 value: 38.539 - type: recall_at_5 value: 45.403 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 24.18591666666667 - type: map_at_10 value: 33.84258333333333 - type: map_at_100 value: 35.11391666666666 - type: map_at_1000 value: 35.23258333333333 - type: map_at_3 value: 30.764249999999997 - type: map_at_5 value: 32.52333333333334 - type: mrr_at_1 value: 28.54733333333333 - type: mrr_at_10 value: 37.81725 - type: mrr_at_100 value: 38.716499999999996 - type: mrr_at_1000 value: 38.77458333333333 - type: mrr_at_3 value: 35.157833333333336 - type: mrr_at_5 value: 36.69816666666667 - type: ndcg_at_1 value: 28.54733333333333 - type: ndcg_at_10 value: 39.51508333333334 - type: ndcg_at_100 value: 44.95316666666666 - type: ndcg_at_1000 value: 47.257083333333334 - type: ndcg_at_3 value: 34.205833333333324 - type: ndcg_at_5 value: 36.78266666666667 - type: precision_at_1 value: 28.54733333333333 - type: precision_at_10 value: 7.082583333333334 - type: precision_at_100 value: 1.1590833333333332 - type: precision_at_1000 value: 0.15516666666666662 - type: precision_at_3 value: 15.908750000000001 - type: precision_at_5 value: 11.505416666666669 - type: recall_at_1 value: 24.18591666666667 - type: recall_at_10 value: 52.38758333333333 - type: recall_at_100 value: 76.13666666666667 - type: recall_at_1000 value: 91.99066666666667 - type: recall_at_3 value: 37.78333333333334 - type: recall_at_5 value: 44.30141666666666 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 21.975 - type: map_at_10 value: 29.781000000000002 - type: map_at_100 value: 30.847 - type: map_at_1000 value: 30.94 - type: map_at_3 value: 27.167 - type: map_at_5 value: 28.633999999999997 - type: mrr_at_1 value: 24.387 - type: mrr_at_10 value: 32.476 - type: mrr_at_100 value: 33.337 - type: mrr_at_1000 value: 33.403 - type: mrr_at_3 value: 29.881999999999998 - type: mrr_at_5 value: 31.339 - type: ndcg_at_1 value: 24.387 - type: ndcg_at_10 value: 34.596 - type: ndcg_at_100 value: 39.635 - type: ndcg_at_1000 value: 42.079 - type: ndcg_at_3 value: 29.516 - type: ndcg_at_5 value: 31.959 - type: precision_at_1 value: 24.387 - type: precision_at_10 value: 5.6129999999999995 - type: precision_at_100 value: 0.8909999999999999 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 12.73 - type: precision_at_5 value: 9.171999999999999 - type: recall_at_1 value: 21.975 - type: recall_at_10 value: 46.826 - type: recall_at_100 value: 69.554 - type: recall_at_1000 value: 87.749 - type: recall_at_3 value: 33.016 - type: recall_at_5 value: 38.97 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 15.614 - type: map_at_10 value: 22.927 - type: map_at_100 value: 24.185000000000002 - type: map_at_1000 value: 24.319 - type: map_at_3 value: 20.596 - type: map_at_5 value: 21.854000000000003 - type: mrr_at_1 value: 18.858 - type: mrr_at_10 value: 26.535999999999998 - type: mrr_at_100 value: 27.582 - type: mrr_at_1000 value: 27.665 - type: mrr_at_3 value: 24.295 - type: mrr_at_5 value: 25.532 - type: ndcg_at_1 value: 18.858 - type: ndcg_at_10 value: 27.583000000000002 - type: ndcg_at_100 value: 33.635 - type: ndcg_at_1000 value: 36.647 - type: ndcg_at_3 value: 23.348 - type: ndcg_at_5 value: 25.257 - type: precision_at_1 value: 18.858 - type: precision_at_10 value: 5.158 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.13999999999999999 - type: precision_at_3 value: 11.092 - type: precision_at_5 value: 8.1 - type: recall_at_1 value: 15.614 - type: recall_at_10 value: 37.916 - type: recall_at_100 value: 65.205 - type: recall_at_1000 value: 86.453 - type: recall_at_3 value: 26.137 - type: recall_at_5 value: 31.087999999999997 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 23.078000000000003 - type: map_at_10 value: 31.941999999999997 - type: map_at_100 value: 33.196999999999996 - type: map_at_1000 value: 33.303 - type: map_at_3 value: 28.927000000000003 - type: map_at_5 value: 30.707 - type: mrr_at_1 value: 26.866 - type: mrr_at_10 value: 35.557 - type: mrr_at_100 value: 36.569 - type: mrr_at_1000 value: 36.632 - type: mrr_at_3 value: 32.897999999999996 - type: mrr_at_5 value: 34.437 - type: ndcg_at_1 value: 26.866 - type: ndcg_at_10 value: 37.372 - type: ndcg_at_100 value: 43.248 - type: ndcg_at_1000 value: 45.632 - type: ndcg_at_3 value: 31.852999999999998 - type: ndcg_at_5 value: 34.582 - type: precision_at_1 value: 26.866 - type: precision_at_10 value: 6.511 - type: precision_at_100 value: 1.078 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 14.582999999999998 - type: precision_at_5 value: 10.634 - type: recall_at_1 value: 23.078000000000003 - type: recall_at_10 value: 50.334 - type: recall_at_100 value: 75.787 - type: recall_at_1000 value: 92.485 - type: recall_at_3 value: 35.386 - type: recall_at_5 value: 42.225 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 22.203999999999997 - type: map_at_10 value: 31.276 - type: map_at_100 value: 32.844 - type: map_at_1000 value: 33.062999999999995 - type: map_at_3 value: 27.733999999999998 - type: map_at_5 value: 29.64 - type: mrr_at_1 value: 27.272999999999996 - type: mrr_at_10 value: 36.083 - type: mrr_at_100 value: 37.008 - type: mrr_at_1000 value: 37.076 - type: mrr_at_3 value: 33.004 - type: mrr_at_5 value: 34.664 - type: ndcg_at_1 value: 27.272999999999996 - type: ndcg_at_10 value: 37.763000000000005 - type: ndcg_at_100 value: 43.566 - type: ndcg_at_1000 value: 46.356 - type: ndcg_at_3 value: 31.673000000000002 - type: ndcg_at_5 value: 34.501 - type: precision_at_1 value: 27.272999999999996 - type: precision_at_10 value: 7.470000000000001 - type: precision_at_100 value: 1.502 - type: precision_at_1000 value: 0.24 - type: precision_at_3 value: 14.756 - type: precision_at_5 value: 11.225 - type: recall_at_1 value: 22.203999999999997 - type: recall_at_10 value: 51.437999999999995 - type: recall_at_100 value: 76.845 - type: recall_at_1000 value: 94.38600000000001 - type: recall_at_3 value: 34.258 - type: recall_at_5 value: 41.512 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 17.474 - type: map_at_10 value: 26.362999999999996 - type: map_at_100 value: 27.456999999999997 - type: map_at_1000 value: 27.567999999999998 - type: map_at_3 value: 23.518 - type: map_at_5 value: 25.068 - type: mrr_at_1 value: 18.669 - type: mrr_at_10 value: 27.998 - type: mrr_at_100 value: 28.953 - type: mrr_at_1000 value: 29.03 - type: mrr_at_3 value: 25.230999999999998 - type: mrr_at_5 value: 26.654 - type: ndcg_at_1 value: 18.669 - type: ndcg_at_10 value: 31.684 - type: ndcg_at_100 value: 36.864999999999995 - type: ndcg_at_1000 value: 39.555 - type: ndcg_at_3 value: 26.057000000000002 - type: ndcg_at_5 value: 28.587 - type: precision_at_1 value: 18.669 - type: precision_at_10 value: 5.3420000000000005 - type: precision_at_100 value: 0.847 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 11.583 - type: precision_at_5 value: 8.466 - type: recall_at_1 value: 17.474 - type: recall_at_10 value: 46.497 - type: recall_at_100 value: 69.977 - type: recall_at_1000 value: 89.872 - type: recall_at_3 value: 31.385999999999996 - type: recall_at_5 value: 37.283 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 17.173 - type: map_at_10 value: 30.407 - type: map_at_100 value: 32.528 - type: map_at_1000 value: 32.698 - type: map_at_3 value: 25.523 - type: map_at_5 value: 28.038 - type: mrr_at_1 value: 38.958 - type: mrr_at_10 value: 51.515 - type: mrr_at_100 value: 52.214000000000006 - type: mrr_at_1000 value: 52.237 - type: mrr_at_3 value: 48.502 - type: mrr_at_5 value: 50.251000000000005 - type: ndcg_at_1 value: 38.958 - type: ndcg_at_10 value: 40.355000000000004 - type: ndcg_at_100 value: 47.68 - type: ndcg_at_1000 value: 50.370000000000005 - type: ndcg_at_3 value: 33.946 - type: ndcg_at_5 value: 36.057 - type: precision_at_1 value: 38.958 - type: precision_at_10 value: 12.508 - type: precision_at_100 value: 2.054 - type: precision_at_1000 value: 0.256 - type: precision_at_3 value: 25.581 - type: precision_at_5 value: 19.256999999999998 - type: recall_at_1 value: 17.173 - type: recall_at_10 value: 46.967 - type: recall_at_100 value: 71.47200000000001 - type: recall_at_1000 value: 86.238 - type: recall_at_3 value: 30.961 - type: recall_at_5 value: 37.539 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 8.999 - type: map_at_10 value: 18.989 - type: map_at_100 value: 26.133 - type: map_at_1000 value: 27.666 - type: map_at_3 value: 13.918 - type: map_at_5 value: 16.473 - type: mrr_at_1 value: 66.25 - type: mrr_at_10 value: 74.161 - type: mrr_at_100 value: 74.516 - type: mrr_at_1000 value: 74.524 - type: mrr_at_3 value: 72.875 - type: mrr_at_5 value: 73.613 - type: ndcg_at_1 value: 54.37499999999999 - type: ndcg_at_10 value: 39.902 - type: ndcg_at_100 value: 44.212 - type: ndcg_at_1000 value: 51.62 - type: ndcg_at_3 value: 45.193 - type: ndcg_at_5 value: 42.541000000000004 - type: precision_at_1 value: 66.25 - type: precision_at_10 value: 30.425 - type: precision_at_100 value: 9.754999999999999 - type: precision_at_1000 value: 2.043 - type: precision_at_3 value: 48.25 - type: precision_at_5 value: 40.65 - type: recall_at_1 value: 8.999 - type: recall_at_10 value: 24.133 - type: recall_at_100 value: 49.138999999999996 - type: recall_at_1000 value: 72.639 - type: recall_at_3 value: 15.287999999999998 - type: recall_at_5 value: 19.415 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 46.38999999999999 - type: f1 value: 41.444205512055234 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 87.35000000000001 - type: map_at_10 value: 92.837 - type: map_at_100 value: 92.996 - type: map_at_1000 value: 93.006 - type: map_at_3 value: 92.187 - type: map_at_5 value: 92.595 - type: mrr_at_1 value: 93.864 - type: mrr_at_10 value: 96.723 - type: mrr_at_100 value: 96.72500000000001 - type: mrr_at_1000 value: 96.72500000000001 - type: mrr_at_3 value: 96.64 - type: mrr_at_5 value: 96.71499999999999 - type: ndcg_at_1 value: 93.864 - type: ndcg_at_10 value: 94.813 - type: ndcg_at_100 value: 95.243 - type: ndcg_at_1000 value: 95.38600000000001 - type: ndcg_at_3 value: 94.196 - type: ndcg_at_5 value: 94.521 - type: precision_at_1 value: 93.864 - type: precision_at_10 value: 10.951 - type: precision_at_100 value: 1.1400000000000001 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 35.114000000000004 - type: precision_at_5 value: 21.476 - type: recall_at_1 value: 87.35000000000001 - type: recall_at_10 value: 96.941 - type: recall_at_100 value: 98.397 - type: recall_at_1000 value: 99.21600000000001 - type: recall_at_3 value: 95.149 - type: recall_at_5 value: 96.131 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 24.476 - type: map_at_10 value: 40.11 - type: map_at_100 value: 42.229 - type: map_at_1000 value: 42.378 - type: map_at_3 value: 34.512 - type: map_at_5 value: 38.037 - type: mrr_at_1 value: 47.839999999999996 - type: mrr_at_10 value: 57.053 - type: mrr_at_100 value: 57.772 - type: mrr_at_1000 value: 57.799 - type: mrr_at_3 value: 54.552 - type: mrr_at_5 value: 56.011 - type: ndcg_at_1 value: 47.839999999999996 - type: ndcg_at_10 value: 48.650999999999996 - type: ndcg_at_100 value: 55.681000000000004 - type: ndcg_at_1000 value: 57.979 - type: ndcg_at_3 value: 43.923 - type: ndcg_at_5 value: 46.037 - type: precision_at_1 value: 47.839999999999996 - type: precision_at_10 value: 13.395000000000001 - type: precision_at_100 value: 2.0660000000000003 - type: precision_at_1000 value: 0.248 - type: precision_at_3 value: 29.064 - type: precision_at_5 value: 22.006 - type: recall_at_1 value: 24.476 - type: recall_at_10 value: 56.216 - type: recall_at_100 value: 81.798 - type: recall_at_1000 value: 95.48299999999999 - type: recall_at_3 value: 39.357 - type: recall_at_5 value: 47.802 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 42.728 - type: map_at_10 value: 57.737 - type: map_at_100 value: 58.531 - type: map_at_1000 value: 58.594 - type: map_at_3 value: 54.869 - type: map_at_5 value: 56.55 - type: mrr_at_1 value: 85.456 - type: mrr_at_10 value: 90.062 - type: mrr_at_100 value: 90.159 - type: mrr_at_1000 value: 90.16 - type: mrr_at_3 value: 89.37899999999999 - type: mrr_at_5 value: 89.81 - type: ndcg_at_1 value: 85.456 - type: ndcg_at_10 value: 67.755 - type: ndcg_at_100 value: 70.341 - type: ndcg_at_1000 value: 71.538 - type: ndcg_at_3 value: 63.735 - type: ndcg_at_5 value: 65.823 - type: precision_at_1 value: 85.456 - type: precision_at_10 value: 13.450000000000001 - type: precision_at_100 value: 1.545 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_3 value: 38.861000000000004 - type: precision_at_5 value: 24.964 - type: recall_at_1 value: 42.728 - type: recall_at_10 value: 67.252 - type: recall_at_100 value: 77.265 - type: recall_at_1000 value: 85.246 - type: recall_at_3 value: 58.292 - type: recall_at_5 value: 62.41100000000001 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 87.4836 - type: ap value: 82.29552224030336 - type: f1 value: 87.42791432227448 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 23.015 - type: map_at_10 value: 35.621 - type: map_at_100 value: 36.809 - type: map_at_1000 value: 36.853 - type: map_at_3 value: 31.832 - type: map_at_5 value: 34.006 - type: mrr_at_1 value: 23.738999999999997 - type: mrr_at_10 value: 36.309999999999995 - type: mrr_at_100 value: 37.422 - type: mrr_at_1000 value: 37.461 - type: mrr_at_3 value: 32.592999999999996 - type: mrr_at_5 value: 34.736 - type: ndcg_at_1 value: 23.724999999999998 - type: ndcg_at_10 value: 42.617 - type: ndcg_at_100 value: 48.217999999999996 - type: ndcg_at_1000 value: 49.309 - type: ndcg_at_3 value: 34.905 - type: ndcg_at_5 value: 38.769 - type: precision_at_1 value: 23.724999999999998 - type: precision_at_10 value: 6.689 - type: precision_at_100 value: 0.9480000000000001 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.89 - type: precision_at_5 value: 10.897 - type: recall_at_1 value: 23.015 - type: recall_at_10 value: 64.041 - type: recall_at_100 value: 89.724 - type: recall_at_1000 value: 98.00999999999999 - type: recall_at_3 value: 43.064 - type: recall_at_5 value: 52.31099999999999 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.49794801641588 - type: f1 value: 96.28931114498003 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.81121751025992 - type: f1 value: 63.18740125901853 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 77.66644250168123 - type: f1 value: 74.93211186867839 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 81.77202420981843 - type: f1 value: 81.63681969283554 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.596687684870645 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.26965660101405 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.33619694846802 - type: mrr value: 32.53719657720334 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 6.0729999999999995 - type: map_at_10 value: 13.245999999999999 - type: map_at_100 value: 16.747999999999998 - type: map_at_1000 value: 18.163 - type: map_at_3 value: 10.064 - type: map_at_5 value: 11.513 - type: mrr_at_1 value: 49.536 - type: mrr_at_10 value: 58.092 - type: mrr_at_100 value: 58.752 - type: mrr_at_1000 value: 58.78 - type: mrr_at_3 value: 56.398 - type: mrr_at_5 value: 57.389 - type: ndcg_at_1 value: 47.059 - type: ndcg_at_10 value: 35.881 - type: ndcg_at_100 value: 32.751999999999995 - type: ndcg_at_1000 value: 41.498000000000005 - type: ndcg_at_3 value: 42.518 - type: ndcg_at_5 value: 39.550999999999995 - type: precision_at_1 value: 49.536 - type: precision_at_10 value: 26.316 - type: precision_at_100 value: 8.084 - type: precision_at_1000 value: 2.081 - type: precision_at_3 value: 39.938 - type: precision_at_5 value: 34.056 - type: recall_at_1 value: 6.0729999999999995 - type: recall_at_10 value: 16.593 - type: recall_at_100 value: 32.883 - type: recall_at_1000 value: 64.654 - type: recall_at_3 value: 11.174000000000001 - type: recall_at_5 value: 13.528 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 30.043 - type: map_at_10 value: 45.318999999999996 - type: map_at_100 value: 46.381 - type: map_at_1000 value: 46.412 - type: map_at_3 value: 40.941 - type: map_at_5 value: 43.662 - type: mrr_at_1 value: 33.98 - type: mrr_at_10 value: 47.870000000000005 - type: mrr_at_100 value: 48.681999999999995 - type: mrr_at_1000 value: 48.703 - type: mrr_at_3 value: 44.341 - type: mrr_at_5 value: 46.547 - type: ndcg_at_1 value: 33.98 - type: ndcg_at_10 value: 52.957 - type: ndcg_at_100 value: 57.434 - type: ndcg_at_1000 value: 58.103 - type: ndcg_at_3 value: 44.896 - type: ndcg_at_5 value: 49.353 - type: precision_at_1 value: 33.98 - type: precision_at_10 value: 8.786 - type: precision_at_100 value: 1.1280000000000001 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 20.577 - type: precision_at_5 value: 14.942 - type: recall_at_1 value: 30.043 - type: recall_at_10 value: 73.593 - type: recall_at_100 value: 93.026 - type: recall_at_1000 value: 97.943 - type: recall_at_3 value: 52.955 - type: recall_at_5 value: 63.132 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.808 - type: map_at_10 value: 84.675 - type: map_at_100 value: 85.322 - type: map_at_1000 value: 85.33800000000001 - type: map_at_3 value: 81.68900000000001 - type: map_at_5 value: 83.543 - type: mrr_at_1 value: 81.5 - type: mrr_at_10 value: 87.59700000000001 - type: mrr_at_100 value: 87.705 - type: mrr_at_1000 value: 87.70599999999999 - type: mrr_at_3 value: 86.607 - type: mrr_at_5 value: 87.289 - type: ndcg_at_1 value: 81.51 - type: ndcg_at_10 value: 88.41799999999999 - type: ndcg_at_100 value: 89.644 - type: ndcg_at_1000 value: 89.725 - type: ndcg_at_3 value: 85.49900000000001 - type: ndcg_at_5 value: 87.078 - type: precision_at_1 value: 81.51 - type: precision_at_10 value: 13.438 - type: precision_at_100 value: 1.532 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.363 - type: precision_at_5 value: 24.57 - type: recall_at_1 value: 70.808 - type: recall_at_10 value: 95.575 - type: recall_at_100 value: 99.667 - type: recall_at_1000 value: 99.98899999999999 - type: recall_at_3 value: 87.223 - type: recall_at_5 value: 91.682 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 58.614831329137715 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 66.86580408560826 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.093 - type: map_at_10 value: 13.014000000000001 - type: map_at_100 value: 15.412999999999998 - type: map_at_1000 value: 15.756999999999998 - type: map_at_3 value: 9.216000000000001 - type: map_at_5 value: 11.036999999999999 - type: mrr_at_1 value: 25.1 - type: mrr_at_10 value: 37.133 - type: mrr_at_100 value: 38.165 - type: mrr_at_1000 value: 38.198 - type: mrr_at_3 value: 33.217 - type: mrr_at_5 value: 35.732 - type: ndcg_at_1 value: 25.1 - type: ndcg_at_10 value: 21.918000000000003 - type: ndcg_at_100 value: 30.983 - type: ndcg_at_1000 value: 36.629 - type: ndcg_at_3 value: 20.544999999999998 - type: ndcg_at_5 value: 18.192 - type: precision_at_1 value: 25.1 - type: precision_at_10 value: 11.44 - type: precision_at_100 value: 2.459 - type: precision_at_1000 value: 0.381 - type: precision_at_3 value: 19.267 - type: precision_at_5 value: 16.16 - type: recall_at_1 value: 5.093 - type: recall_at_10 value: 23.215 - type: recall_at_100 value: 49.902 - type: recall_at_1000 value: 77.403 - type: recall_at_3 value: 11.733 - type: recall_at_5 value: 16.372999999999998 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.9365442977452 - type: cos_sim_spearman value: 79.36960687383745 - type: euclidean_pearson value: 79.6045204840714 - type: euclidean_spearman value: 79.26382712751337 - type: manhattan_pearson value: 79.4805084789529 - type: manhattan_spearman value: 79.21847863209523 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.27906192961453 - type: cos_sim_spearman value: 74.38364712099211 - type: euclidean_pearson value: 78.54358927241223 - type: euclidean_spearman value: 74.22185560806376 - type: manhattan_pearson value: 78.50904327377751 - type: manhattan_spearman value: 74.2627500781748 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.66863742649639 - type: cos_sim_spearman value: 84.70630905216271 - type: euclidean_pearson value: 84.64498334705334 - type: euclidean_spearman value: 84.87204770690148 - type: manhattan_pearson value: 84.65774227976077 - type: manhattan_spearman value: 84.91251851797985 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.1577763924467 - type: cos_sim_spearman value: 80.10314039230198 - type: euclidean_pearson value: 81.51346991046043 - type: euclidean_spearman value: 80.08678485109435 - type: manhattan_pearson value: 81.57058914661894 - type: manhattan_spearman value: 80.1516230725106 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.40310839662533 - type: cos_sim_spearman value: 87.16293477217867 - type: euclidean_pearson value: 86.50688711184775 - type: euclidean_spearman value: 87.08651444923031 - type: manhattan_pearson value: 86.54674677557857 - type: manhattan_spearman value: 87.15079017870971 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.32886275207817 - type: cos_sim_spearman value: 85.0190460590732 - type: euclidean_pearson value: 84.42553652784679 - type: euclidean_spearman value: 85.20027364279328 - type: manhattan_pearson value: 84.42926246281078 - type: manhattan_spearman value: 85.20187419804306 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.76732216967812 - type: cos_sim_spearman value: 90.63701653633909 - type: euclidean_pearson value: 90.26678186114682 - type: euclidean_spearman value: 90.67288073455427 - type: manhattan_pearson value: 90.20772020584582 - type: manhattan_spearman value: 90.60764863983702 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 69.09280387698125 - type: cos_sim_spearman value: 68.62743151172162 - type: euclidean_pearson value: 69.89386398104689 - type: euclidean_spearman value: 68.71191066733556 - type: manhattan_pearson value: 69.92516500604872 - type: manhattan_spearman value: 68.80452846992576 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.13178592019887 - type: cos_sim_spearman value: 86.03947178806887 - type: euclidean_pearson value: 85.87029414285313 - type: euclidean_spearman value: 86.04960843306998 - type: manhattan_pearson value: 85.92946858580146 - type: manhattan_spearman value: 86.12575341860442 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.16657063002837 - type: mrr value: 95.73671063867141 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 63.510999999999996 - type: map_at_10 value: 72.76899999999999 - type: map_at_100 value: 73.303 - type: map_at_1000 value: 73.32499999999999 - type: map_at_3 value: 70.514 - type: map_at_5 value: 71.929 - type: mrr_at_1 value: 66.333 - type: mrr_at_10 value: 73.75 - type: mrr_at_100 value: 74.119 - type: mrr_at_1000 value: 74.138 - type: mrr_at_3 value: 72.222 - type: mrr_at_5 value: 73.122 - type: ndcg_at_1 value: 66.333 - type: ndcg_at_10 value: 76.774 - type: ndcg_at_100 value: 78.78500000000001 - type: ndcg_at_1000 value: 79.254 - type: ndcg_at_3 value: 73.088 - type: ndcg_at_5 value: 75.002 - type: precision_at_1 value: 66.333 - type: precision_at_10 value: 9.833 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 28.222 - type: precision_at_5 value: 18.333 - type: recall_at_1 value: 63.510999999999996 - type: recall_at_10 value: 87.98899999999999 - type: recall_at_100 value: 96.5 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 77.86699999999999 - type: recall_at_5 value: 82.73899999999999 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.78514851485149 - type: cos_sim_ap value: 94.94214383862038 - type: cos_sim_f1 value: 89.02255639097744 - type: cos_sim_precision value: 89.2462311557789 - type: cos_sim_recall value: 88.8 - type: dot_accuracy value: 99.78217821782178 - type: dot_ap value: 94.69965247836805 - type: dot_f1 value: 88.78695208970439 - type: dot_precision value: 90.54054054054053 - type: dot_recall value: 87.1 - type: euclidean_accuracy value: 99.78118811881188 - type: euclidean_ap value: 94.9865187695411 - type: euclidean_f1 value: 88.99950223992036 - type: euclidean_precision value: 88.60257680872151 - type: euclidean_recall value: 89.4 - type: manhattan_accuracy value: 99.78811881188119 - type: manhattan_ap value: 95.0021236766459 - type: manhattan_f1 value: 89.12071535022356 - type: manhattan_precision value: 88.54886475814413 - type: manhattan_recall value: 89.7 - type: max_accuracy value: 99.78811881188119 - type: max_ap value: 95.0021236766459 - type: max_f1 value: 89.12071535022356 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 68.93190546593995 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 37.602808534760655 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.29214480978073 - type: mrr value: 53.123169722434426 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.967800769650022 - type: cos_sim_spearman value: 31.168490040206926 - type: dot_pearson value: 30.888603021128553 - type: dot_spearman value: 31.028241262520385 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22300000000000003 - type: map_at_10 value: 1.781 - type: map_at_100 value: 9.905999999999999 - type: map_at_1000 value: 23.455000000000002 - type: map_at_3 value: 0.569 - type: map_at_5 value: 0.918 - type: mrr_at_1 value: 84.0 - type: mrr_at_10 value: 91.067 - type: mrr_at_100 value: 91.067 - type: mrr_at_1000 value: 91.067 - type: mrr_at_3 value: 90.667 - type: mrr_at_5 value: 91.067 - type: ndcg_at_1 value: 78.0 - type: ndcg_at_10 value: 73.13499999999999 - type: ndcg_at_100 value: 55.32 - type: ndcg_at_1000 value: 49.532 - type: ndcg_at_3 value: 73.715 - type: ndcg_at_5 value: 72.74199999999999 - type: precision_at_1 value: 84.0 - type: precision_at_10 value: 78.8 - type: precision_at_100 value: 56.32 - type: precision_at_1000 value: 21.504 - type: precision_at_3 value: 77.333 - type: precision_at_5 value: 78.0 - type: recall_at_1 value: 0.22300000000000003 - type: recall_at_10 value: 2.049 - type: recall_at_100 value: 13.553 - type: recall_at_1000 value: 46.367999999999995 - type: recall_at_3 value: 0.604 - type: recall_at_5 value: 1.015 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.0380000000000003 - type: map_at_10 value: 10.188 - type: map_at_100 value: 16.395 - type: map_at_1000 value: 18.024 - type: map_at_3 value: 6.236 - type: map_at_5 value: 7.276000000000001 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 46.292 - type: mrr_at_100 value: 47.446 - type: mrr_at_1000 value: 47.446 - type: mrr_at_3 value: 41.156 - type: mrr_at_5 value: 44.32 - type: ndcg_at_1 value: 32.653 - type: ndcg_at_10 value: 25.219 - type: ndcg_at_100 value: 37.802 - type: ndcg_at_1000 value: 49.274 - type: ndcg_at_3 value: 28.605999999999998 - type: ndcg_at_5 value: 26.21 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 21.837 - type: precision_at_100 value: 7.776 - type: precision_at_1000 value: 1.522 - type: precision_at_3 value: 28.571 - type: precision_at_5 value: 25.306 - type: recall_at_1 value: 3.0380000000000003 - type: recall_at_10 value: 16.298000000000002 - type: recall_at_100 value: 48.712 - type: recall_at_1000 value: 83.16799999999999 - type: recall_at_3 value: 7.265000000000001 - type: recall_at_5 value: 9.551 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 83.978 - type: ap value: 24.751887949330015 - type: f1 value: 66.8685134049279 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.573288058856825 - type: f1 value: 61.973261751726604 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 48.75483298792469 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.36824223639506 - type: cos_sim_ap value: 75.53126388573047 - type: cos_sim_f1 value: 67.9912831688245 - type: cos_sim_precision value: 66.11817501869858 - type: cos_sim_recall value: 69.9736147757256 - type: dot_accuracy value: 86.39804494248078 - type: dot_ap value: 75.27598891718046 - type: dot_f1 value: 67.91146284159763 - type: dot_precision value: 63.90505003490807 - type: dot_recall value: 72.45382585751979 - type: euclidean_accuracy value: 86.36228169517793 - type: euclidean_ap value: 75.51438087434647 - type: euclidean_f1 value: 68.02370523061066 - type: euclidean_precision value: 66.46525679758308 - type: euclidean_recall value: 69.65699208443272 - type: manhattan_accuracy value: 86.46361089586935 - type: manhattan_ap value: 75.50800785730111 - type: manhattan_f1 value: 67.9220437187253 - type: manhattan_precision value: 67.79705573080967 - type: manhattan_recall value: 68.04749340369392 - type: max_accuracy value: 86.46361089586935 - type: max_ap value: 75.53126388573047 - type: max_f1 value: 68.02370523061066 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.80350836341057 - type: cos_sim_ap value: 85.51101933260743 - type: cos_sim_f1 value: 77.9152271629704 - type: cos_sim_precision value: 75.27815662910056 - type: cos_sim_recall value: 80.74376347397599 - type: dot_accuracy value: 88.84425815966158 - type: dot_ap value: 85.49726945962519 - type: dot_f1 value: 77.94445269567801 - type: dot_precision value: 75.27251864601261 - type: dot_recall value: 80.81305820757623 - type: euclidean_accuracy value: 88.80350836341057 - type: euclidean_ap value: 85.4882880790211 - type: euclidean_f1 value: 77.87063284615103 - type: euclidean_precision value: 74.61022927689595 - type: euclidean_recall value: 81.42901139513397 - type: manhattan_accuracy value: 88.7161873714441 - type: manhattan_ap value: 85.45753871906821 - type: manhattan_f1 value: 77.8686401480111 - type: manhattan_precision value: 74.95903683123174 - type: manhattan_recall value: 81.01324299353249 - type: max_accuracy value: 88.84425815966158 - type: max_ap value: 85.51101933260743 - type: max_f1 value: 77.94445269567801 --- <!-- **English** | [中文](./README_zh.md) --> # gte-base-en-v1.5 We introduce `gte-v1.5` series, upgraded `gte` embeddings that support the context length of up to **8192**, while further enhancing model performance. The models are built upon the `transformer++` encoder [backbone](https://huggingface.co/Alibaba-NLP/new-impl) (BERT + RoPE + GLU). The `gte-v1.5` series achieve state-of-the-art scores on the MTEB benchmark within the same model size category and prodvide competitive on the LoCo long-context retrieval tests (refer to [Evaluation](#evaluation)). We also present the [`gte-Qwen1.5-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct), a SOTA instruction-tuned multi-lingual embedding model that ranked 2nd in MTEB and 1st in C-MTEB. <!-- Provide a longer summary of what this model is. --> - **Developed by:** Institute for Intelligent Computing, Alibaba Group - **Model type:** Text Embeddings - **Paper:** [mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval](https://arxiv.org/pdf/2407.19669) <!-- - **Demo [optional]:** [More Information Needed] --> ### Model list | Models | Language | Model Size | Max Seq. Length | Dimension | MTEB-en | LoCo | |:-----: | :-----: |:-----: |:-----: |:-----: | :-----: | :-----: | |[`gte-Qwen1.5-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct)| Multiple | 7720 | 32768 | 4096 | 67.34 | 87.57 | |[`gte-large-en-v1.5`](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | English | 434 | 8192 | 1024 | 65.39 | 86.71 | |[`gte-base-en-v1.5`](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | English | 137 | 8192 | 768 | 64.11 | 87.44 | ## How to Get Started with the Model Use the code below to get started with the model. ```python # Requires transformers>=4.36.0 import torch.nn.functional as F from transformers import AutoModel, AutoTokenizer input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] model_path = 'Alibaba-NLP/gte-base-en-v1.5' tokenizer = AutoTokenizer.from_pretrained(model_path) model = AutoModel.from_pretrained(model_path, trust_remote_code=True) # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=8192, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = outputs.last_hidden_state[:, 0] # (Optionally) normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) ``` **It is recommended to install xformers and enable unpadding for acceleration, refer to [enable-unpadding-and-xformers](https://huggingface.co/Alibaba-NLP/new-impl#recommendation-enable-unpadding-and-acceleration-with-xformers).** Use with `sentence-transformers`: ```python # Requires sentence_transformers>=2.7.0 from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim sentences = ['That is a happy person', 'That is a very happy person'] model = SentenceTransformer('Alibaba-NLP/gte-base-en-v1.5', trust_remote_code=True) embeddings = model.encode(sentences) print(cos_sim(embeddings[0], embeddings[1])) ``` Use with `transformers.js`: ```js // npm i @xenova/transformers import { pipeline, dot } from '@xenova/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Alibaba-NLP/gte-base-en-v1.5', { quantized: false, // Comment out this line to use the quantized version }); // Generate sentence embeddings const sentences = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => 100 * dot(source_embeddings, x)); console.log(similarities); // [34.504930869007296, 64.03973265120138, 19.520042686034362] ``` Use with infinity: [Infinity](https://github.com/michaelfeil/infinity) is a MIT licensed server for OpenAI-compatible deployment. ``` docker run --gpus all -v $PWD/data:/app/.cache -p "7997":"7997" \ michaelf34/infinity:0.0.68 \ v2 --model-id Alibaba-NLP/gte-base-en-v1.5 --revision "4c742dc2b781e4ab062a4a77f4f7cbad4bdee970" --dtype bfloat16 --batch-size 32 --device cuda --engine torch --port 7997 ``` ## Training Details ### Training Data - Masked language modeling (MLM): `c4-en` - Weak-supervised contrastive pre-training (CPT): [GTE](https://arxiv.org/pdf/2308.03281.pdf) pre-training data - Supervised contrastive fine-tuning: [GTE](https://arxiv.org/pdf/2308.03281.pdf) fine-tuning data ### Training Procedure To enable the backbone model to support a context length of 8192, we adopted a multi-stage training strategy. The model first undergoes preliminary MLM pre-training on shorter lengths. And then, we resample the data, reducing the proportion of short texts, and continue the MLM pre-training. The entire training process is as follows: - MLM-2048: lr 5e-4, mlm_probability 0.3, batch_size 4096, num_steps 70000, rope_base 10000 - [MLM-8192](https://huggingface.co/Alibaba-NLP/gte-en-mlm-base): lr 5e-5, mlm_probability 0.3, batch_size 1024, num_steps 20000, rope_base 500000 - CPT: max_len 512, lr 2e-4, batch_size 32768, num_steps 100000 - Fine-tuning: TODO ## Evaluation ### MTEB The results of other models are retrieved from [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). The gte evaluation setting: `mteb==1.2.0, fp16 auto mix precision, max_length=8192`, and set ntk scaling factor to 2 (equivalent to rope_base * 2). | Model Name | Param Size (M) | Dimension | Sequence Length | Average (56) | Class. (12) | Clust. (11) | Pair Class. (3) | Reran. (4) | Retr. (15) | STS (10) | Summ. (1) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [**gte-large-en-v1.5**](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 434 | 1024 | 8192 | **65.39** | 77.75 | 47.95 | 84.63 | 58.50 | 57.91 | 81.43 | 30.91 | | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 335 | 1024 | 512 | 64.68 | 75.64 | 46.71 | 87.2 | 60.11 | 54.39 | 85 | 32.71 | | [multilingual-e5-large-instruct](https://huggingface.co/intfloat/multilingual-e5-large-instruct) | 560 | 1024 | 514 | 64.41 | 77.56 | 47.1 | 86.19 | 58.58 | 52.47 | 84.78 | 30.39 | | [bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5)| 335 | 1024 | 512 | 64.23 | 75.97 | 46.08 | 87.12 | 60.03 | 54.29 | 83.11 | 31.61 | | [**gte-base-en-v1.5**](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | 137 | 768 | 8192 | **64.11** | 77.17 | 46.82 | 85.33 | 57.66 | 54.09 | 81.97 | 31.17 | | [bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5)| 109 | 768 | 512 | 63.55 | 75.53 | 45.77 | 86.55 | 58.86 | 53.25 | 82.4 | 31.07 | ### LoCo | Model Name | Dimension | Sequence Length | Average (5) | QsmsumRetrieval | SummScreenRetrieval | QasperAbastractRetrieval | QasperTitleRetrieval | GovReportRetrieval | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [gte-qwen1.5-7b](https://huggingface.co/Alibaba-NLP/gte-qwen1.5-7b) | 4096 | 32768 | 87.57 | 49.37 | 93.10 | 99.67 | 97.54 | 98.21 | | [gte-large-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-v1.5) |1024 | 8192 | 86.71 | 44.55 | 92.61 | 99.82 | 97.81 | 98.74 | | [gte-base-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-v1.5) | 768 | 8192 | 87.44 | 49.91 | 91.78 | 99.82 | 97.13 | 98.58 | ## Citation If you find our paper or models helpful, please consider citing them as follows: ``` @misc{zhang2024mgte, title={mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval}, author={Xin Zhang and Yanzhao Zhang and Dingkun Long and Wen Xie and Ziqi Dai and Jialong Tang and Huan Lin and Baosong Yang and Pengjun Xie and Fei Huang and Meishan Zhang and Wenjie Li and Min Zhang}, year={2024}, eprint={2407.19669}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={https://arxiv.org/abs/2407.19669}, } @misc{li2023gte, title={Towards General Text Embeddings with Multi-stage Contrastive Learning}, author={Zehan Li and Xin Zhang and Yanzhao Zhang and Dingkun Long and Pengjun Xie and Meishan Zhang}, year={2023}, eprint={2308.03281}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={https://arxiv.org/abs/2308.03281}, } ```
[ "BIOSSES", "SCIFACT" ]
mixedbread-ai/mxbai-embed-large-v1
mixedbread-ai
feature-extraction
[ "sentence-transformers", "onnx", "safetensors", "openvino", "gguf", "bert", "feature-extraction", "mteb", "transformers.js", "transformers", "en", "arxiv:2309.12871", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-03-07T15:45:34Z"
2025-03-13T04:15:03+00:00
2,390,539
639
--- language: - en library_name: sentence-transformers license: apache-2.0 pipeline_tag: feature-extraction tags: - mteb - transformers.js - transformers model-index: - name: mxbai-angle-large-v1 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.044776119403 - type: ap value: 37.7362433623053 - type: f1 value: 68.92736573359774 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.84025000000001 - type: ap value: 90.93190875404055 - type: f1 value: 93.8297833897293 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.184 - type: f1 value: 48.74163227751588 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 41.252 - type: map_at_10 value: 57.778 - type: map_at_100 value: 58.233000000000004 - type: map_at_1000 value: 58.23700000000001 - type: map_at_3 value: 53.449999999999996 - type: map_at_5 value: 56.376000000000005 - type: mrr_at_1 value: 41.679 - type: mrr_at_10 value: 57.92699999999999 - type: mrr_at_100 value: 58.389 - type: mrr_at_1000 value: 58.391999999999996 - type: mrr_at_3 value: 53.651 - type: mrr_at_5 value: 56.521 - type: ndcg_at_1 value: 41.252 - type: ndcg_at_10 value: 66.018 - type: ndcg_at_100 value: 67.774 - type: ndcg_at_1000 value: 67.84400000000001 - type: ndcg_at_3 value: 57.372 - type: ndcg_at_5 value: 62.646 - type: precision_at_1 value: 41.252 - type: precision_at_10 value: 9.189 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.902 - type: precision_at_5 value: 16.302 - type: recall_at_1 value: 41.252 - type: recall_at_10 value: 91.892 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 68.706 - type: recall_at_5 value: 81.50800000000001 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.97294504317859 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.98071077674629 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 65.16477858490782 - type: mrr value: 78.23583080508287 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.6277629421789 - type: cos_sim_spearman value: 88.4056288400568 - type: euclidean_pearson value: 87.94871847578163 - type: euclidean_spearman value: 88.4056288400568 - type: manhattan_pearson value: 87.73271254229648 - type: manhattan_spearman value: 87.91826833762677 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.81818181818181 - type: f1 value: 87.79879337316918 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.91773608582761 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.73059477462478 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.745999999999995 - type: map_at_10 value: 43.632 - type: map_at_100 value: 45.206 - type: map_at_1000 value: 45.341 - type: map_at_3 value: 39.956 - type: map_at_5 value: 42.031 - type: mrr_at_1 value: 39.485 - type: mrr_at_10 value: 49.537 - type: mrr_at_100 value: 50.249 - type: mrr_at_1000 value: 50.294000000000004 - type: mrr_at_3 value: 46.757 - type: mrr_at_5 value: 48.481 - type: ndcg_at_1 value: 39.485 - type: ndcg_at_10 value: 50.058 - type: ndcg_at_100 value: 55.586 - type: ndcg_at_1000 value: 57.511 - type: ndcg_at_3 value: 44.786 - type: ndcg_at_5 value: 47.339999999999996 - type: precision_at_1 value: 39.485 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.552 - type: precision_at_1000 value: 0.202 - type: precision_at_3 value: 21.412 - type: precision_at_5 value: 15.479000000000001 - type: recall_at_1 value: 32.745999999999995 - type: recall_at_10 value: 62.056 - type: recall_at_100 value: 85.088 - type: recall_at_1000 value: 96.952 - type: recall_at_3 value: 46.959 - type: recall_at_5 value: 54.06999999999999 - type: map_at_1 value: 31.898 - type: map_at_10 value: 42.142 - type: map_at_100 value: 43.349 - type: map_at_1000 value: 43.483 - type: map_at_3 value: 39.18 - type: map_at_5 value: 40.733000000000004 - type: mrr_at_1 value: 39.617999999999995 - type: mrr_at_10 value: 47.922 - type: mrr_at_100 value: 48.547000000000004 - type: mrr_at_1000 value: 48.597 - type: mrr_at_3 value: 45.86 - type: mrr_at_5 value: 46.949000000000005 - type: ndcg_at_1 value: 39.617999999999995 - type: ndcg_at_10 value: 47.739 - type: ndcg_at_100 value: 51.934999999999995 - type: ndcg_at_1000 value: 54.007000000000005 - type: ndcg_at_3 value: 43.748 - type: ndcg_at_5 value: 45.345 - type: precision_at_1 value: 39.617999999999995 - type: precision_at_10 value: 8.962 - type: precision_at_100 value: 1.436 - type: precision_at_1000 value: 0.192 - type: precision_at_3 value: 21.083 - type: precision_at_5 value: 14.752 - type: recall_at_1 value: 31.898 - type: recall_at_10 value: 57.587999999999994 - type: recall_at_100 value: 75.323 - type: recall_at_1000 value: 88.304 - type: recall_at_3 value: 45.275 - type: recall_at_5 value: 49.99 - type: map_at_1 value: 40.458 - type: map_at_10 value: 52.942 - type: map_at_100 value: 53.974 - type: map_at_1000 value: 54.031 - type: map_at_3 value: 49.559999999999995 - type: map_at_5 value: 51.408 - type: mrr_at_1 value: 46.27 - type: mrr_at_10 value: 56.31699999999999 - type: mrr_at_100 value: 56.95099999999999 - type: mrr_at_1000 value: 56.98 - type: mrr_at_3 value: 53.835 - type: mrr_at_5 value: 55.252 - type: ndcg_at_1 value: 46.27 - type: ndcg_at_10 value: 58.964000000000006 - type: ndcg_at_100 value: 62.875 - type: ndcg_at_1000 value: 63.969 - type: ndcg_at_3 value: 53.297000000000004 - type: ndcg_at_5 value: 55.938 - type: precision_at_1 value: 46.27 - type: precision_at_10 value: 9.549000000000001 - type: precision_at_100 value: 1.2409999999999999 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 23.762 - type: precision_at_5 value: 16.262999999999998 - type: recall_at_1 value: 40.458 - type: recall_at_10 value: 73.446 - type: recall_at_100 value: 90.12400000000001 - type: recall_at_1000 value: 97.795 - type: recall_at_3 value: 58.123000000000005 - type: recall_at_5 value: 64.68 - type: map_at_1 value: 27.443 - type: map_at_10 value: 36.081 - type: map_at_100 value: 37.163000000000004 - type: map_at_1000 value: 37.232 - type: map_at_3 value: 33.308 - type: map_at_5 value: 34.724 - type: mrr_at_1 value: 29.492 - type: mrr_at_10 value: 38.138 - type: mrr_at_100 value: 39.065 - type: mrr_at_1000 value: 39.119 - type: mrr_at_3 value: 35.593 - type: mrr_at_5 value: 36.785000000000004 - type: ndcg_at_1 value: 29.492 - type: ndcg_at_10 value: 41.134 - type: ndcg_at_100 value: 46.300999999999995 - type: ndcg_at_1000 value: 48.106 - type: ndcg_at_3 value: 35.77 - type: ndcg_at_5 value: 38.032 - type: precision_at_1 value: 29.492 - type: precision_at_10 value: 6.249 - type: precision_at_100 value: 0.9299999999999999 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 15.065999999999999 - type: precision_at_5 value: 10.373000000000001 - type: recall_at_1 value: 27.443 - type: recall_at_10 value: 54.80199999999999 - type: recall_at_100 value: 78.21900000000001 - type: recall_at_1000 value: 91.751 - type: recall_at_3 value: 40.211000000000006 - type: recall_at_5 value: 45.599000000000004 - type: map_at_1 value: 18.731 - type: map_at_10 value: 26.717999999999996 - type: map_at_100 value: 27.897 - type: map_at_1000 value: 28.029 - type: map_at_3 value: 23.91 - type: map_at_5 value: 25.455 - type: mrr_at_1 value: 23.134 - type: mrr_at_10 value: 31.769 - type: mrr_at_100 value: 32.634 - type: mrr_at_1000 value: 32.707 - type: mrr_at_3 value: 28.938999999999997 - type: mrr_at_5 value: 30.531000000000002 - type: ndcg_at_1 value: 23.134 - type: ndcg_at_10 value: 32.249 - type: ndcg_at_100 value: 37.678 - type: ndcg_at_1000 value: 40.589999999999996 - type: ndcg_at_3 value: 26.985999999999997 - type: ndcg_at_5 value: 29.457 - type: precision_at_1 value: 23.134 - type: precision_at_10 value: 5.8709999999999996 - type: precision_at_100 value: 0.988 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 12.852 - type: precision_at_5 value: 9.428 - type: recall_at_1 value: 18.731 - type: recall_at_10 value: 44.419 - type: recall_at_100 value: 67.851 - type: recall_at_1000 value: 88.103 - type: recall_at_3 value: 29.919 - type: recall_at_5 value: 36.230000000000004 - type: map_at_1 value: 30.324 - type: map_at_10 value: 41.265 - type: map_at_100 value: 42.559000000000005 - type: map_at_1000 value: 42.669000000000004 - type: map_at_3 value: 38.138 - type: map_at_5 value: 39.881 - type: mrr_at_1 value: 36.67 - type: mrr_at_10 value: 46.774 - type: mrr_at_100 value: 47.554 - type: mrr_at_1000 value: 47.593 - type: mrr_at_3 value: 44.338 - type: mrr_at_5 value: 45.723 - type: ndcg_at_1 value: 36.67 - type: ndcg_at_10 value: 47.367 - type: ndcg_at_100 value: 52.623 - type: ndcg_at_1000 value: 54.59 - type: ndcg_at_3 value: 42.323 - type: ndcg_at_5 value: 44.727 - type: precision_at_1 value: 36.67 - type: precision_at_10 value: 8.518 - type: precision_at_100 value: 1.2890000000000001 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 19.955000000000002 - type: precision_at_5 value: 14.11 - type: recall_at_1 value: 30.324 - type: recall_at_10 value: 59.845000000000006 - type: recall_at_100 value: 81.77499999999999 - type: recall_at_1000 value: 94.463 - type: recall_at_3 value: 46.019 - type: recall_at_5 value: 52.163000000000004 - type: map_at_1 value: 24.229 - type: map_at_10 value: 35.004000000000005 - type: map_at_100 value: 36.409000000000006 - type: map_at_1000 value: 36.521 - type: map_at_3 value: 31.793 - type: map_at_5 value: 33.432 - type: mrr_at_1 value: 30.365 - type: mrr_at_10 value: 40.502 - type: mrr_at_100 value: 41.372 - type: mrr_at_1000 value: 41.435 - type: mrr_at_3 value: 37.804 - type: mrr_at_5 value: 39.226 - type: ndcg_at_1 value: 30.365 - type: ndcg_at_10 value: 41.305 - type: ndcg_at_100 value: 47.028999999999996 - type: ndcg_at_1000 value: 49.375 - type: ndcg_at_3 value: 35.85 - type: ndcg_at_5 value: 38.12 - type: precision_at_1 value: 30.365 - type: precision_at_10 value: 7.808 - type: precision_at_100 value: 1.228 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 17.352 - type: precision_at_5 value: 12.42 - type: recall_at_1 value: 24.229 - type: recall_at_10 value: 54.673 - type: recall_at_100 value: 78.766 - type: recall_at_1000 value: 94.625 - type: recall_at_3 value: 39.602 - type: recall_at_5 value: 45.558 - type: map_at_1 value: 26.695 - type: map_at_10 value: 36.0895 - type: map_at_100 value: 37.309416666666664 - type: map_at_1000 value: 37.42558333333334 - type: map_at_3 value: 33.19616666666666 - type: map_at_5 value: 34.78641666666667 - type: mrr_at_1 value: 31.486083333333337 - type: mrr_at_10 value: 40.34774999999999 - type: mrr_at_100 value: 41.17533333333333 - type: mrr_at_1000 value: 41.231583333333326 - type: mrr_at_3 value: 37.90075 - type: mrr_at_5 value: 39.266999999999996 - type: ndcg_at_1 value: 31.486083333333337 - type: ndcg_at_10 value: 41.60433333333334 - type: ndcg_at_100 value: 46.74525 - type: ndcg_at_1000 value: 48.96166666666667 - type: ndcg_at_3 value: 36.68825 - type: ndcg_at_5 value: 38.966499999999996 - type: precision_at_1 value: 31.486083333333337 - type: precision_at_10 value: 7.29675 - type: precision_at_100 value: 1.1621666666666666 - type: precision_at_1000 value: 0.1545 - type: precision_at_3 value: 16.8815 - type: precision_at_5 value: 11.974583333333333 - type: recall_at_1 value: 26.695 - type: recall_at_10 value: 53.651916666666665 - type: recall_at_100 value: 76.12083333333332 - type: recall_at_1000 value: 91.31191666666668 - type: recall_at_3 value: 40.03575 - type: recall_at_5 value: 45.876666666666665 - type: map_at_1 value: 25.668000000000003 - type: map_at_10 value: 32.486 - type: map_at_100 value: 33.371 - type: map_at_1000 value: 33.458 - type: map_at_3 value: 30.261 - type: map_at_5 value: 31.418000000000003 - type: mrr_at_1 value: 28.988000000000003 - type: mrr_at_10 value: 35.414 - type: mrr_at_100 value: 36.149 - type: mrr_at_1000 value: 36.215 - type: mrr_at_3 value: 33.333 - type: mrr_at_5 value: 34.43 - type: ndcg_at_1 value: 28.988000000000003 - type: ndcg_at_10 value: 36.732 - type: ndcg_at_100 value: 41.331 - type: ndcg_at_1000 value: 43.575 - type: ndcg_at_3 value: 32.413 - type: ndcg_at_5 value: 34.316 - type: precision_at_1 value: 28.988000000000003 - type: precision_at_10 value: 5.7059999999999995 - type: precision_at_100 value: 0.882 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 13.65 - type: precision_at_5 value: 9.417 - type: recall_at_1 value: 25.668000000000003 - type: recall_at_10 value: 47.147 - type: recall_at_100 value: 68.504 - type: recall_at_1000 value: 85.272 - type: recall_at_3 value: 35.19 - type: recall_at_5 value: 39.925 - type: map_at_1 value: 17.256 - type: map_at_10 value: 24.58 - type: map_at_100 value: 25.773000000000003 - type: map_at_1000 value: 25.899 - type: map_at_3 value: 22.236 - type: map_at_5 value: 23.507 - type: mrr_at_1 value: 20.957 - type: mrr_at_10 value: 28.416000000000004 - type: mrr_at_100 value: 29.447000000000003 - type: mrr_at_1000 value: 29.524 - type: mrr_at_3 value: 26.245 - type: mrr_at_5 value: 27.451999999999998 - type: ndcg_at_1 value: 20.957 - type: ndcg_at_10 value: 29.285 - type: ndcg_at_100 value: 35.003 - type: ndcg_at_1000 value: 37.881 - type: ndcg_at_3 value: 25.063000000000002 - type: ndcg_at_5 value: 26.983 - type: precision_at_1 value: 20.957 - type: precision_at_10 value: 5.344 - type: precision_at_100 value: 0.958 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 11.918 - type: precision_at_5 value: 8.596 - type: recall_at_1 value: 17.256 - type: recall_at_10 value: 39.644 - type: recall_at_100 value: 65.279 - type: recall_at_1000 value: 85.693 - type: recall_at_3 value: 27.825 - type: recall_at_5 value: 32.792 - type: map_at_1 value: 26.700000000000003 - type: map_at_10 value: 36.205999999999996 - type: map_at_100 value: 37.316 - type: map_at_1000 value: 37.425000000000004 - type: map_at_3 value: 33.166000000000004 - type: map_at_5 value: 35.032999999999994 - type: mrr_at_1 value: 31.436999999999998 - type: mrr_at_10 value: 40.61 - type: mrr_at_100 value: 41.415 - type: mrr_at_1000 value: 41.48 - type: mrr_at_3 value: 37.966 - type: mrr_at_5 value: 39.599000000000004 - type: ndcg_at_1 value: 31.436999999999998 - type: ndcg_at_10 value: 41.771 - type: ndcg_at_100 value: 46.784 - type: ndcg_at_1000 value: 49.183 - type: ndcg_at_3 value: 36.437000000000005 - type: ndcg_at_5 value: 39.291 - type: precision_at_1 value: 31.436999999999998 - type: precision_at_10 value: 6.987 - type: precision_at_100 value: 1.072 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 16.448999999999998 - type: precision_at_5 value: 11.866 - type: recall_at_1 value: 26.700000000000003 - type: recall_at_10 value: 54.301 - type: recall_at_100 value: 75.871 - type: recall_at_1000 value: 92.529 - type: recall_at_3 value: 40.201 - type: recall_at_5 value: 47.208 - type: map_at_1 value: 24.296 - type: map_at_10 value: 33.116 - type: map_at_100 value: 34.81 - type: map_at_1000 value: 35.032000000000004 - type: map_at_3 value: 30.105999999999998 - type: map_at_5 value: 31.839000000000002 - type: mrr_at_1 value: 29.051 - type: mrr_at_10 value: 37.803 - type: mrr_at_100 value: 38.856 - type: mrr_at_1000 value: 38.903999999999996 - type: mrr_at_3 value: 35.211 - type: mrr_at_5 value: 36.545 - type: ndcg_at_1 value: 29.051 - type: ndcg_at_10 value: 39.007 - type: ndcg_at_100 value: 45.321 - type: ndcg_at_1000 value: 47.665 - type: ndcg_at_3 value: 34.1 - type: ndcg_at_5 value: 36.437000000000005 - type: precision_at_1 value: 29.051 - type: precision_at_10 value: 7.668 - type: precision_at_100 value: 1.542 - type: precision_at_1000 value: 0.24 - type: precision_at_3 value: 16.14 - type: precision_at_5 value: 11.897 - type: recall_at_1 value: 24.296 - type: recall_at_10 value: 49.85 - type: recall_at_100 value: 78.457 - type: recall_at_1000 value: 92.618 - type: recall_at_3 value: 36.138999999999996 - type: recall_at_5 value: 42.223 - type: map_at_1 value: 20.591 - type: map_at_10 value: 28.902 - type: map_at_100 value: 29.886000000000003 - type: map_at_1000 value: 29.987000000000002 - type: map_at_3 value: 26.740000000000002 - type: map_at_5 value: 27.976 - type: mrr_at_1 value: 22.366 - type: mrr_at_10 value: 30.971 - type: mrr_at_100 value: 31.865 - type: mrr_at_1000 value: 31.930999999999997 - type: mrr_at_3 value: 28.927999999999997 - type: mrr_at_5 value: 30.231 - type: ndcg_at_1 value: 22.366 - type: ndcg_at_10 value: 33.641 - type: ndcg_at_100 value: 38.477 - type: ndcg_at_1000 value: 41.088 - type: ndcg_at_3 value: 29.486 - type: ndcg_at_5 value: 31.612000000000002 - type: precision_at_1 value: 22.366 - type: precision_at_10 value: 5.3420000000000005 - type: precision_at_100 value: 0.828 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 12.939 - type: precision_at_5 value: 9.094 - type: recall_at_1 value: 20.591 - type: recall_at_10 value: 46.052 - type: recall_at_100 value: 68.193 - type: recall_at_1000 value: 87.638 - type: recall_at_3 value: 34.966 - type: recall_at_5 value: 40.082 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 15.091 - type: map_at_10 value: 26.38 - type: map_at_100 value: 28.421999999999997 - type: map_at_1000 value: 28.621999999999996 - type: map_at_3 value: 21.597 - type: map_at_5 value: 24.12 - type: mrr_at_1 value: 34.266999999999996 - type: mrr_at_10 value: 46.864 - type: mrr_at_100 value: 47.617 - type: mrr_at_1000 value: 47.644 - type: mrr_at_3 value: 43.312 - type: mrr_at_5 value: 45.501000000000005 - type: ndcg_at_1 value: 34.266999999999996 - type: ndcg_at_10 value: 36.095 - type: ndcg_at_100 value: 43.447 - type: ndcg_at_1000 value: 46.661 - type: ndcg_at_3 value: 29.337999999999997 - type: ndcg_at_5 value: 31.824 - type: precision_at_1 value: 34.266999999999996 - type: precision_at_10 value: 11.472 - type: precision_at_100 value: 1.944 - type: precision_at_1000 value: 0.255 - type: precision_at_3 value: 21.933 - type: precision_at_5 value: 17.224999999999998 - type: recall_at_1 value: 15.091 - type: recall_at_10 value: 43.022 - type: recall_at_100 value: 68.075 - type: recall_at_1000 value: 85.76 - type: recall_at_3 value: 26.564 - type: recall_at_5 value: 33.594 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.252 - type: map_at_10 value: 20.923 - type: map_at_100 value: 30.741000000000003 - type: map_at_1000 value: 32.542 - type: map_at_3 value: 14.442 - type: map_at_5 value: 17.399 - type: mrr_at_1 value: 70.25 - type: mrr_at_10 value: 78.17 - type: mrr_at_100 value: 78.444 - type: mrr_at_1000 value: 78.45100000000001 - type: mrr_at_3 value: 76.958 - type: mrr_at_5 value: 77.571 - type: ndcg_at_1 value: 58.375 - type: ndcg_at_10 value: 44.509 - type: ndcg_at_100 value: 49.897999999999996 - type: ndcg_at_1000 value: 57.269999999999996 - type: ndcg_at_3 value: 48.64 - type: ndcg_at_5 value: 46.697 - type: precision_at_1 value: 70.25 - type: precision_at_10 value: 36.05 - type: precision_at_100 value: 11.848 - type: precision_at_1000 value: 2.213 - type: precision_at_3 value: 52.917 - type: precision_at_5 value: 45.7 - type: recall_at_1 value: 9.252 - type: recall_at_10 value: 27.006999999999998 - type: recall_at_100 value: 57.008 - type: recall_at_1000 value: 80.697 - type: recall_at_3 value: 15.798000000000002 - type: recall_at_5 value: 20.4 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 50.88 - type: f1 value: 45.545495028653384 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 75.424 - type: map_at_10 value: 83.435 - type: map_at_100 value: 83.66900000000001 - type: map_at_1000 value: 83.685 - type: map_at_3 value: 82.39800000000001 - type: map_at_5 value: 83.07 - type: mrr_at_1 value: 81.113 - type: mrr_at_10 value: 87.77199999999999 - type: mrr_at_100 value: 87.862 - type: mrr_at_1000 value: 87.86500000000001 - type: mrr_at_3 value: 87.17099999999999 - type: mrr_at_5 value: 87.616 - type: ndcg_at_1 value: 81.113 - type: ndcg_at_10 value: 86.909 - type: ndcg_at_100 value: 87.746 - type: ndcg_at_1000 value: 88.017 - type: ndcg_at_3 value: 85.368 - type: ndcg_at_5 value: 86.28099999999999 - type: precision_at_1 value: 81.113 - type: precision_at_10 value: 10.363 - type: precision_at_100 value: 1.102 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 32.507999999999996 - type: precision_at_5 value: 20.138 - type: recall_at_1 value: 75.424 - type: recall_at_10 value: 93.258 - type: recall_at_100 value: 96.545 - type: recall_at_1000 value: 98.284 - type: recall_at_3 value: 89.083 - type: recall_at_5 value: 91.445 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.532 - type: map_at_10 value: 37.141999999999996 - type: map_at_100 value: 39.162 - type: map_at_1000 value: 39.322 - type: map_at_3 value: 32.885 - type: map_at_5 value: 35.093999999999994 - type: mrr_at_1 value: 44.29 - type: mrr_at_10 value: 53.516 - type: mrr_at_100 value: 54.24 - type: mrr_at_1000 value: 54.273 - type: mrr_at_3 value: 51.286 - type: mrr_at_5 value: 52.413 - type: ndcg_at_1 value: 44.29 - type: ndcg_at_10 value: 45.268 - type: ndcg_at_100 value: 52.125 - type: ndcg_at_1000 value: 54.778000000000006 - type: ndcg_at_3 value: 41.829 - type: ndcg_at_5 value: 42.525 - type: precision_at_1 value: 44.29 - type: precision_at_10 value: 12.5 - type: precision_at_100 value: 1.9720000000000002 - type: precision_at_1000 value: 0.245 - type: precision_at_3 value: 28.035 - type: precision_at_5 value: 20.093 - type: recall_at_1 value: 22.532 - type: recall_at_10 value: 52.419000000000004 - type: recall_at_100 value: 77.43299999999999 - type: recall_at_1000 value: 93.379 - type: recall_at_3 value: 38.629000000000005 - type: recall_at_5 value: 43.858000000000004 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.359 - type: map_at_10 value: 63.966 - type: map_at_100 value: 64.87 - type: map_at_1000 value: 64.92599999999999 - type: map_at_3 value: 60.409 - type: map_at_5 value: 62.627 - type: mrr_at_1 value: 78.717 - type: mrr_at_10 value: 84.468 - type: mrr_at_100 value: 84.655 - type: mrr_at_1000 value: 84.661 - type: mrr_at_3 value: 83.554 - type: mrr_at_5 value: 84.133 - type: ndcg_at_1 value: 78.717 - type: ndcg_at_10 value: 72.03399999999999 - type: ndcg_at_100 value: 75.158 - type: ndcg_at_1000 value: 76.197 - type: ndcg_at_3 value: 67.049 - type: ndcg_at_5 value: 69.808 - type: precision_at_1 value: 78.717 - type: precision_at_10 value: 15.201 - type: precision_at_100 value: 1.764 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 43.313 - type: precision_at_5 value: 28.165000000000003 - type: recall_at_1 value: 39.359 - type: recall_at_10 value: 76.003 - type: recall_at_100 value: 88.197 - type: recall_at_1000 value: 95.003 - type: recall_at_3 value: 64.97 - type: recall_at_5 value: 70.41199999999999 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 92.83200000000001 - type: ap value: 89.33560571859861 - type: f1 value: 92.82322915005167 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.983 - type: map_at_10 value: 34.259 - type: map_at_100 value: 35.432 - type: map_at_1000 value: 35.482 - type: map_at_3 value: 30.275999999999996 - type: map_at_5 value: 32.566 - type: mrr_at_1 value: 22.579 - type: mrr_at_10 value: 34.882999999999996 - type: mrr_at_100 value: 35.984 - type: mrr_at_1000 value: 36.028 - type: mrr_at_3 value: 30.964999999999996 - type: mrr_at_5 value: 33.245000000000005 - type: ndcg_at_1 value: 22.564 - type: ndcg_at_10 value: 41.258 - type: ndcg_at_100 value: 46.824 - type: ndcg_at_1000 value: 48.037 - type: ndcg_at_3 value: 33.17 - type: ndcg_at_5 value: 37.263000000000005 - type: precision_at_1 value: 22.564 - type: precision_at_10 value: 6.572 - type: precision_at_100 value: 0.935 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.130999999999998 - type: precision_at_5 value: 10.544 - type: recall_at_1 value: 21.983 - type: recall_at_10 value: 62.775000000000006 - type: recall_at_100 value: 88.389 - type: recall_at_1000 value: 97.603 - type: recall_at_3 value: 40.878 - type: recall_at_5 value: 50.690000000000005 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.95120839033288 - type: f1 value: 93.73824125055208 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.78978568171455 - type: f1 value: 57.50180552858304 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.24411566913248 - type: f1 value: 74.37851403532832 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.94620040349699 - type: f1 value: 80.21293397970435 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.44403096245675 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.659594631336812 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.53833075108798 - type: mrr value: 33.78840823218308 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 7.185999999999999 - type: map_at_10 value: 15.193999999999999 - type: map_at_100 value: 19.538 - type: map_at_1000 value: 21.178 - type: map_at_3 value: 11.208 - type: map_at_5 value: 12.745999999999999 - type: mrr_at_1 value: 48.916 - type: mrr_at_10 value: 58.141 - type: mrr_at_100 value: 58.656 - type: mrr_at_1000 value: 58.684999999999995 - type: mrr_at_3 value: 55.521 - type: mrr_at_5 value: 57.239 - type: ndcg_at_1 value: 47.059 - type: ndcg_at_10 value: 38.644 - type: ndcg_at_100 value: 36.272999999999996 - type: ndcg_at_1000 value: 44.996 - type: ndcg_at_3 value: 43.293 - type: ndcg_at_5 value: 40.819 - type: precision_at_1 value: 48.916 - type: precision_at_10 value: 28.607 - type: precision_at_100 value: 9.195 - type: precision_at_1000 value: 2.225 - type: precision_at_3 value: 40.454 - type: precision_at_5 value: 34.985 - type: recall_at_1 value: 7.185999999999999 - type: recall_at_10 value: 19.654 - type: recall_at_100 value: 37.224000000000004 - type: recall_at_1000 value: 68.663 - type: recall_at_3 value: 12.158 - type: recall_at_5 value: 14.674999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 31.552000000000003 - type: map_at_10 value: 47.75 - type: map_at_100 value: 48.728 - type: map_at_1000 value: 48.754 - type: map_at_3 value: 43.156 - type: map_at_5 value: 45.883 - type: mrr_at_1 value: 35.66 - type: mrr_at_10 value: 50.269 - type: mrr_at_100 value: 50.974 - type: mrr_at_1000 value: 50.991 - type: mrr_at_3 value: 46.519 - type: mrr_at_5 value: 48.764 - type: ndcg_at_1 value: 35.632000000000005 - type: ndcg_at_10 value: 55.786 - type: ndcg_at_100 value: 59.748999999999995 - type: ndcg_at_1000 value: 60.339 - type: ndcg_at_3 value: 47.292 - type: ndcg_at_5 value: 51.766999999999996 - type: precision_at_1 value: 35.632000000000005 - type: precision_at_10 value: 9.267 - type: precision_at_100 value: 1.149 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 21.601 - type: precision_at_5 value: 15.539 - type: recall_at_1 value: 31.552000000000003 - type: recall_at_10 value: 77.62400000000001 - type: recall_at_100 value: 94.527 - type: recall_at_1000 value: 98.919 - type: recall_at_3 value: 55.898 - type: recall_at_5 value: 66.121 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.414 - type: map_at_10 value: 85.37400000000001 - type: map_at_100 value: 86.01100000000001 - type: map_at_1000 value: 86.027 - type: map_at_3 value: 82.562 - type: map_at_5 value: 84.284 - type: mrr_at_1 value: 82.24000000000001 - type: mrr_at_10 value: 88.225 - type: mrr_at_100 value: 88.324 - type: mrr_at_1000 value: 88.325 - type: mrr_at_3 value: 87.348 - type: mrr_at_5 value: 87.938 - type: ndcg_at_1 value: 82.24000000000001 - type: ndcg_at_10 value: 88.97699999999999 - type: ndcg_at_100 value: 90.16 - type: ndcg_at_1000 value: 90.236 - type: ndcg_at_3 value: 86.371 - type: ndcg_at_5 value: 87.746 - type: precision_at_1 value: 82.24000000000001 - type: precision_at_10 value: 13.481000000000002 - type: precision_at_100 value: 1.534 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.86 - type: precision_at_5 value: 24.738 - type: recall_at_1 value: 71.414 - type: recall_at_10 value: 95.735 - type: recall_at_100 value: 99.696 - type: recall_at_1000 value: 99.979 - type: recall_at_3 value: 88.105 - type: recall_at_5 value: 92.17999999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 60.22146692057259 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 65.29273320614578 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.023 - type: map_at_10 value: 14.161000000000001 - type: map_at_100 value: 16.68 - type: map_at_1000 value: 17.072000000000003 - type: map_at_3 value: 9.763 - type: map_at_5 value: 11.977 - type: mrr_at_1 value: 24.8 - type: mrr_at_10 value: 37.602999999999994 - type: mrr_at_100 value: 38.618 - type: mrr_at_1000 value: 38.659 - type: mrr_at_3 value: 34.117 - type: mrr_at_5 value: 36.082 - type: ndcg_at_1 value: 24.8 - type: ndcg_at_10 value: 23.316 - type: ndcg_at_100 value: 32.613 - type: ndcg_at_1000 value: 38.609 - type: ndcg_at_3 value: 21.697 - type: ndcg_at_5 value: 19.241 - type: precision_at_1 value: 24.8 - type: precision_at_10 value: 12.36 - type: precision_at_100 value: 2.593 - type: precision_at_1000 value: 0.402 - type: precision_at_3 value: 20.767 - type: precision_at_5 value: 17.34 - type: recall_at_1 value: 5.023 - type: recall_at_10 value: 25.069999999999997 - type: recall_at_100 value: 52.563 - type: recall_at_1000 value: 81.525 - type: recall_at_3 value: 12.613 - type: recall_at_5 value: 17.583 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 87.71506247604255 - type: cos_sim_spearman value: 82.91813463738802 - type: euclidean_pearson value: 85.5154616194479 - type: euclidean_spearman value: 82.91815254466314 - type: manhattan_pearson value: 85.5280917850374 - type: manhattan_spearman value: 82.92276537286398 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.43772054228462 - type: cos_sim_spearman value: 78.75750601716682 - type: euclidean_pearson value: 85.76074482955764 - type: euclidean_spearman value: 78.75651057223058 - type: manhattan_pearson value: 85.73390291701668 - type: manhattan_spearman value: 78.72699385957797 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 89.58144067172472 - type: cos_sim_spearman value: 90.3524512966946 - type: euclidean_pearson value: 89.71365391594237 - type: euclidean_spearman value: 90.35239632843408 - type: manhattan_pearson value: 89.66905421746478 - type: manhattan_spearman value: 90.31508211683513 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 87.77692637102102 - type: cos_sim_spearman value: 85.45710562643485 - type: euclidean_pearson value: 87.42456979928723 - type: euclidean_spearman value: 85.45709386240908 - type: manhattan_pearson value: 87.40754529526272 - type: manhattan_spearman value: 85.44834854173303 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.28491331695997 - type: cos_sim_spearman value: 89.62037029566964 - type: euclidean_pearson value: 89.02479391362826 - type: euclidean_spearman value: 89.62036733618466 - type: manhattan_pearson value: 89.00394756040342 - type: manhattan_spearman value: 89.60867744215236 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.08911381280191 - type: cos_sim_spearman value: 86.5791780765767 - type: euclidean_pearson value: 86.16063473577861 - type: euclidean_spearman value: 86.57917745378766 - type: manhattan_pearson value: 86.13677924604175 - type: manhattan_spearman value: 86.56115615768685 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.58029496205235 - type: cos_sim_spearman value: 89.49551253826998 - type: euclidean_pearson value: 90.13714840963748 - type: euclidean_spearman value: 89.49551253826998 - type: manhattan_pearson value: 90.13039633601363 - type: manhattan_spearman value: 89.4513453745516 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 69.01546399666435 - type: cos_sim_spearman value: 69.33824484595624 - type: euclidean_pearson value: 70.76511642998874 - type: euclidean_spearman value: 69.33824484595624 - type: manhattan_pearson value: 70.84320785047453 - type: manhattan_spearman value: 69.54233632223537 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.26389196390119 - type: cos_sim_spearman value: 89.09721478341385 - type: euclidean_pearson value: 88.97208685922517 - type: euclidean_spearman value: 89.09720927308881 - type: manhattan_pearson value: 88.97513670502573 - type: manhattan_spearman value: 89.07647853984004 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.53075025771936 - type: mrr value: 96.24327651288436 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 60.428000000000004 - type: map_at_10 value: 70.088 - type: map_at_100 value: 70.589 - type: map_at_1000 value: 70.614 - type: map_at_3 value: 67.191 - type: map_at_5 value: 68.515 - type: mrr_at_1 value: 63.333 - type: mrr_at_10 value: 71.13000000000001 - type: mrr_at_100 value: 71.545 - type: mrr_at_1000 value: 71.569 - type: mrr_at_3 value: 68.944 - type: mrr_at_5 value: 70.078 - type: ndcg_at_1 value: 63.333 - type: ndcg_at_10 value: 74.72800000000001 - type: ndcg_at_100 value: 76.64999999999999 - type: ndcg_at_1000 value: 77.176 - type: ndcg_at_3 value: 69.659 - type: ndcg_at_5 value: 71.626 - type: precision_at_1 value: 63.333 - type: precision_at_10 value: 10 - type: precision_at_100 value: 1.09 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27.111 - type: precision_at_5 value: 17.666999999999998 - type: recall_at_1 value: 60.428000000000004 - type: recall_at_10 value: 87.98899999999999 - type: recall_at_100 value: 96.167 - type: recall_at_1000 value: 100 - type: recall_at_3 value: 74.006 - type: recall_at_5 value: 79.05 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.87326732673267 - type: cos_sim_ap value: 96.81770773701805 - type: cos_sim_f1 value: 93.6318407960199 - type: cos_sim_precision value: 93.16831683168317 - type: cos_sim_recall value: 94.1 - type: dot_accuracy value: 99.87326732673267 - type: dot_ap value: 96.8174218946665 - type: dot_f1 value: 93.6318407960199 - type: dot_precision value: 93.16831683168317 - type: dot_recall value: 94.1 - type: euclidean_accuracy value: 99.87326732673267 - type: euclidean_ap value: 96.81770773701807 - type: euclidean_f1 value: 93.6318407960199 - type: euclidean_precision value: 93.16831683168317 - type: euclidean_recall value: 94.1 - type: manhattan_accuracy value: 99.87227722772278 - type: manhattan_ap value: 96.83164126821747 - type: manhattan_f1 value: 93.54677338669335 - type: manhattan_precision value: 93.5935935935936 - type: manhattan_recall value: 93.5 - type: max_accuracy value: 99.87326732673267 - type: max_ap value: 96.83164126821747 - type: max_f1 value: 93.6318407960199 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.6212042420246 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.779230635982564 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.217701909036286 - type: mrr value: 56.17658995416349 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.954206018888453 - type: cos_sim_spearman value: 32.71062599450096 - type: dot_pearson value: 30.95420929056943 - type: dot_spearman value: 32.71062599450096 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22699999999999998 - type: map_at_10 value: 1.924 - type: map_at_100 value: 10.525 - type: map_at_1000 value: 24.973 - type: map_at_3 value: 0.638 - type: map_at_5 value: 1.0659999999999998 - type: mrr_at_1 value: 84 - type: mrr_at_10 value: 91.067 - type: mrr_at_100 value: 91.067 - type: mrr_at_1000 value: 91.067 - type: mrr_at_3 value: 90.667 - type: mrr_at_5 value: 91.067 - type: ndcg_at_1 value: 81 - type: ndcg_at_10 value: 75.566 - type: ndcg_at_100 value: 56.387 - type: ndcg_at_1000 value: 49.834 - type: ndcg_at_3 value: 80.899 - type: ndcg_at_5 value: 80.75099999999999 - type: precision_at_1 value: 84 - type: precision_at_10 value: 79 - type: precision_at_100 value: 57.56 - type: precision_at_1000 value: 21.8 - type: precision_at_3 value: 84.667 - type: precision_at_5 value: 85.2 - type: recall_at_1 value: 0.22699999999999998 - type: recall_at_10 value: 2.136 - type: recall_at_100 value: 13.861 - type: recall_at_1000 value: 46.299 - type: recall_at_3 value: 0.6649999999999999 - type: recall_at_5 value: 1.145 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.752 - type: map_at_10 value: 9.951 - type: map_at_100 value: 16.794999999999998 - type: map_at_1000 value: 18.251 - type: map_at_3 value: 5.288 - type: map_at_5 value: 6.954000000000001 - type: mrr_at_1 value: 38.775999999999996 - type: mrr_at_10 value: 50.458000000000006 - type: mrr_at_100 value: 51.324999999999996 - type: mrr_at_1000 value: 51.339999999999996 - type: mrr_at_3 value: 46.939 - type: mrr_at_5 value: 47.857 - type: ndcg_at_1 value: 36.735 - type: ndcg_at_10 value: 25.198999999999998 - type: ndcg_at_100 value: 37.938 - type: ndcg_at_1000 value: 49.145 - type: ndcg_at_3 value: 29.348000000000003 - type: ndcg_at_5 value: 25.804 - type: precision_at_1 value: 38.775999999999996 - type: precision_at_10 value: 22.041 - type: precision_at_100 value: 7.939 - type: precision_at_1000 value: 1.555 - type: precision_at_3 value: 29.932 - type: precision_at_5 value: 24.490000000000002 - type: recall_at_1 value: 2.752 - type: recall_at_10 value: 16.197 - type: recall_at_100 value: 49.166 - type: recall_at_1000 value: 84.18900000000001 - type: recall_at_3 value: 6.438000000000001 - type: recall_at_5 value: 9.093 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.47980000000001 - type: ap value: 14.605194452178754 - type: f1 value: 55.07362924988948 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.708545557441994 - type: f1 value: 60.04751270975683 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.21105960597211 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.58419264469214 - type: cos_sim_ap value: 78.55300004517404 - type: cos_sim_f1 value: 71.49673530889001 - type: cos_sim_precision value: 68.20795400095831 - type: cos_sim_recall value: 75.11873350923483 - type: dot_accuracy value: 87.58419264469214 - type: dot_ap value: 78.55297659559511 - type: dot_f1 value: 71.49673530889001 - type: dot_precision value: 68.20795400095831 - type: dot_recall value: 75.11873350923483 - type: euclidean_accuracy value: 87.58419264469214 - type: euclidean_ap value: 78.55300477331477 - type: euclidean_f1 value: 71.49673530889001 - type: euclidean_precision value: 68.20795400095831 - type: euclidean_recall value: 75.11873350923483 - type: manhattan_accuracy value: 87.5663110210407 - type: manhattan_ap value: 78.49982050876562 - type: manhattan_f1 value: 71.35488740722104 - type: manhattan_precision value: 68.18946862226497 - type: manhattan_recall value: 74.82849604221636 - type: max_accuracy value: 87.58419264469214 - type: max_ap value: 78.55300477331477 - type: max_f1 value: 71.49673530889001 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.09069740365584 - type: cos_sim_ap value: 86.22749303724757 - type: cos_sim_f1 value: 78.36863452005407 - type: cos_sim_precision value: 76.49560117302053 - type: cos_sim_recall value: 80.33569448721897 - type: dot_accuracy value: 89.09069740365584 - type: dot_ap value: 86.22750233655673 - type: dot_f1 value: 78.36863452005407 - type: dot_precision value: 76.49560117302053 - type: dot_recall value: 80.33569448721897 - type: euclidean_accuracy value: 89.09069740365584 - type: euclidean_ap value: 86.22749355597347 - type: euclidean_f1 value: 78.36863452005407 - type: euclidean_precision value: 76.49560117302053 - type: euclidean_recall value: 80.33569448721897 - type: manhattan_accuracy value: 89.08293553770326 - type: manhattan_ap value: 86.21913616084771 - type: manhattan_f1 value: 78.3907031479847 - type: manhattan_precision value: 75.0352013517319 - type: manhattan_recall value: 82.06036341238065 - type: max_accuracy value: 89.09069740365584 - type: max_ap value: 86.22750233655673 - type: max_f1 value: 78.3907031479847 --- <br><br> <p align="center"> <svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" viewBox="0 0 2020 1130" width="150" height="150" aria-hidden="true"><path fill="#e95a0f" d="M398.167 621.992c-1.387-20.362-4.092-40.739-3.851-61.081.355-30.085 6.873-59.139 21.253-85.976 10.487-19.573 24.09-36.822 40.662-51.515 16.394-14.535 34.338-27.046 54.336-36.182 15.224-6.955 31.006-12.609 47.829-14.168 11.809-1.094 23.753-2.514 35.524-1.836 23.033 1.327 45.131 7.255 66.255 16.75 16.24 7.3 31.497 16.165 45.651 26.969 12.997 9.921 24.412 21.37 34.158 34.509 11.733 15.817 20.849 33.037 25.987 52.018 3.468 12.81 6.438 25.928 7.779 39.097 1.722 16.908 1.642 34.003 2.235 51.021.427 12.253.224 24.547 1.117 36.762 1.677 22.93 4.062 45.764 11.8 67.7 5.376 15.239 12.499 29.55 20.846 43.681l-18.282 20.328c-1.536 1.71-2.795 3.665-4.254 5.448l-19.323 23.533c-13.859-5.449-27.446-11.803-41.657-16.086-13.622-4.106-27.793-6.765-41.905-8.775-15.256-2.173-30.701-3.475-46.105-4.049-23.571-.879-47.178-1.056-70.769-1.029-10.858.013-21.723 1.116-32.57 1.926-5.362.4-10.69 1.255-16.464 1.477-2.758-7.675-5.284-14.865-7.367-22.181-3.108-10.92-4.325-22.554-13.16-31.095-2.598-2.512-5.069-5.341-6.883-8.443-6.366-10.884-12.48-21.917-18.571-32.959-4.178-7.573-8.411-14.375-17.016-18.559-10.34-5.028-19.538-12.387-29.311-18.611-3.173-2.021-6.414-4.312-9.952-5.297-5.857-1.63-11.98-2.301-17.991-3.376z"></path><path fill="#ed6d7b" d="M1478.998 758.842c-12.025.042-24.05.085-36.537-.373-.14-8.536.231-16.569.453-24.607.033-1.179-.315-2.986-1.081-3.4-.805-.434-2.376.338-3.518.81-.856.354-1.562 1.069-3.589 2.521-.239-3.308-.664-5.586-.519-7.827.488-7.544 2.212-15.166 1.554-22.589-1.016-11.451 1.397-14.592-12.332-14.419-3.793.048-3.617-2.803-3.332-5.331.499-4.422 1.45-8.803 1.77-13.233.311-4.316.068-8.672.068-12.861-2.554-.464-4.326-.86-6.12-1.098-4.415-.586-6.051-2.251-5.065-7.31 1.224-6.279.848-12.862 1.276-19.306.19-2.86-.971-4.473-3.794-4.753-4.113-.407-8.242-1.057-12.352-.975-4.663.093-5.192-2.272-4.751-6.012.733-6.229 1.252-12.483 1.875-18.726l1.102-10.495c-5.905-.309-11.146-.805-16.385-.778-3.32.017-5.174-1.4-5.566-4.4-1.172-8.968-2.479-17.944-3.001-26.96-.26-4.484-1.936-5.705-6.005-5.774-9.284-.158-18.563-.594-27.843-.953-7.241-.28-10.137-2.764-11.3-9.899-.746-4.576-2.715-7.801-7.777-8.207-7.739-.621-15.511-.992-23.207-1.961-7.327-.923-14.587-2.415-21.853-3.777-5.021-.941-10.003-2.086-15.003-3.14 4.515-22.952 13.122-44.382 26.284-63.587 18.054-26.344 41.439-47.239 69.102-63.294 15.847-9.197 32.541-16.277 50.376-20.599 16.655-4.036 33.617-5.715 50.622-4.385 33.334 2.606 63.836 13.955 92.415 31.15 15.864 9.545 30.241 20.86 42.269 34.758 8.113 9.374 15.201 19.78 21.718 30.359 10.772 17.484 16.846 36.922 20.611 56.991 1.783 9.503 2.815 19.214 3.318 28.876.758 14.578.755 29.196.65 44.311l-51.545 20.013c-7.779 3.059-15.847 5.376-21.753 12.365-4.73 5.598-10.658 10.316-16.547 14.774-9.9 7.496-18.437 15.988-25.083 26.631-3.333 5.337-7.901 10.381-12.999 14.038-11.355 8.144-17.397 18.973-19.615 32.423l-6.988 41.011z"></path><path fill="#ec663e" d="M318.11 923.047c-.702 17.693-.832 35.433-2.255 53.068-1.699 21.052-6.293 41.512-14.793 61.072-9.001 20.711-21.692 38.693-38.496 53.583-16.077 14.245-34.602 24.163-55.333 30.438-21.691 6.565-43.814 8.127-66.013 6.532-22.771-1.636-43.88-9.318-62.74-22.705-20.223-14.355-35.542-32.917-48.075-54.096-9.588-16.203-16.104-33.55-19.201-52.015-2.339-13.944-2.307-28.011-.403-42.182 2.627-19.545 9.021-37.699 17.963-55.067 11.617-22.564 27.317-41.817 48.382-56.118 15.819-10.74 33.452-17.679 52.444-20.455 8.77-1.282 17.696-1.646 26.568-2.055 11.755-.542 23.534-.562 35.289-1.11 8.545-.399 17.067-1.291 26.193-1.675 1.349 1.77 2.24 3.199 2.835 4.742 4.727 12.261 10.575 23.865 18.636 34.358 7.747 10.084 14.83 20.684 22.699 30.666 3.919 4.972 8.37 9.96 13.609 13.352 7.711 4.994 16.238 8.792 24.617 12.668 5.852 2.707 12.037 4.691 18.074 6.998z"></path><path fill="#ea580e" d="M1285.167 162.995c3.796-29.75 13.825-56.841 32.74-80.577 16.339-20.505 36.013-36.502 59.696-47.614 14.666-6.881 29.971-11.669 46.208-12.749 10.068-.669 20.239-1.582 30.255-.863 16.6 1.191 32.646 5.412 47.9 12.273 19.39 8.722 36.44 20.771 50.582 36.655 15.281 17.162 25.313 37.179 31.49 59.286 5.405 19.343 6.31 39.161 4.705 58.825-2.37 29.045-11.836 55.923-30.451 78.885-10.511 12.965-22.483 24.486-37.181 33.649-5.272-5.613-10.008-11.148-14.539-16.846-5.661-7.118-10.958-14.533-16.78-21.513-4.569-5.478-9.548-10.639-14.624-15.658-3.589-3.549-7.411-6.963-11.551-9.827-5.038-3.485-10.565-6.254-15.798-9.468-8.459-5.195-17.011-9.669-26.988-11.898-12.173-2.72-24.838-4.579-35.622-11.834-1.437-.967-3.433-1.192-5.213-1.542-12.871-2.529-25.454-5.639-36.968-12.471-5.21-3.091-11.564-4.195-17.011-6.965-4.808-2.445-8.775-6.605-13.646-8.851-8.859-4.085-18.114-7.311-27.204-10.896z"></path><path fill="#f8ab00" d="M524.963 311.12c-9.461-5.684-19.513-10.592-28.243-17.236-12.877-9.801-24.031-21.578-32.711-35.412-11.272-17.965-19.605-37.147-21.902-58.403-1.291-11.951-2.434-24.073-1.87-36.034.823-17.452 4.909-34.363 11.581-50.703 8.82-21.603 22.25-39.792 39.568-55.065 18.022-15.894 39.162-26.07 62.351-32.332 19.22-5.19 38.842-6.177 58.37-4.674 23.803 1.831 45.56 10.663 65.062 24.496 17.193 12.195 31.688 27.086 42.894 45.622-11.403 8.296-22.633 16.117-34.092 23.586-17.094 11.142-34.262 22.106-48.036 37.528-8.796 9.848-17.201 20.246-27.131 28.837-16.859 14.585-27.745 33.801-41.054 51.019-11.865 15.349-20.663 33.117-30.354 50.08-5.303 9.283-9.654 19.11-14.434 28.692z"></path><path fill="#ea5227" d="M1060.11 1122.049c-7.377 1.649-14.683 4.093-22.147 4.763-11.519 1.033-23.166 1.441-34.723 1.054-19.343-.647-38.002-4.7-55.839-12.65-15.078-6.72-28.606-15.471-40.571-26.836-24.013-22.81-42.053-49.217-49.518-81.936-1.446-6.337-1.958-12.958-2.235-19.477-.591-13.926-.219-27.909-1.237-41.795-.916-12.5-3.16-24.904-4.408-37.805 1.555-1.381 3.134-2.074 3.778-3.27 4.729-8.79 12.141-15.159 19.083-22.03 5.879-5.818 10.688-12.76 16.796-18.293 6.993-6.335 11.86-13.596 14.364-22.612l8.542-29.993c8.015 1.785 15.984 3.821 24.057 5.286 8.145 1.478 16.371 2.59 24.602 3.493 8.453.927 16.956 1.408 25.891 2.609 1.119 16.09 1.569 31.667 2.521 47.214.676 11.045 1.396 22.154 3.234 33.043 2.418 14.329 5.708 28.527 9.075 42.674 3.499 14.705 4.028 29.929 10.415 44.188 10.157 22.674 18.29 46.25 28.281 69.004 7.175 16.341 12.491 32.973 15.078 50.615.645 4.4 3.256 8.511 4.963 12.755z"></path><path fill="#ea5330" d="M1060.512 1122.031c-2.109-4.226-4.72-8.337-5.365-12.737-2.587-17.642-7.904-34.274-15.078-50.615-9.991-22.755-18.124-46.33-28.281-69.004-6.387-14.259-6.916-29.482-10.415-44.188-3.366-14.147-6.656-28.346-9.075-42.674-1.838-10.889-2.558-21.999-3.234-33.043-.951-15.547-1.401-31.124-2.068-47.146 8.568-.18 17.146.487 25.704.286l41.868-1.4c.907 3.746 1.245 7.04 1.881 10.276l8.651 42.704c.903 4.108 2.334 8.422 4.696 11.829 7.165 10.338 14.809 20.351 22.456 30.345 4.218 5.512 8.291 11.304 13.361 15.955 8.641 7.927 18.065 14.995 27.071 22.532 12.011 10.052 24.452 19.302 40.151 22.854-1.656 11.102-2.391 22.44-5.172 33.253-4.792 18.637-12.38 36.209-23.412 52.216-13.053 18.94-29.086 34.662-49.627 45.055-10.757 5.443-22.443 9.048-34.111 13.501z"></path><path fill="#f8aa05" d="M1989.106 883.951c5.198 8.794 11.46 17.148 15.337 26.491 5.325 12.833 9.744 26.207 12.873 39.737 2.95 12.757 3.224 25.908 1.987 39.219-1.391 14.973-4.643 29.268-10.349 43.034-5.775 13.932-13.477 26.707-23.149 38.405-14.141 17.104-31.215 30.458-50.807 40.488-14.361 7.352-29.574 12.797-45.741 14.594-10.297 1.144-20.732 2.361-31.031 1.894-24.275-1.1-47.248-7.445-68.132-20.263-6.096-3.741-11.925-7.917-17.731-12.342 5.319-5.579 10.361-10.852 15.694-15.811l37.072-34.009c.975-.892 2.113-1.606 3.08-2.505 6.936-6.448 14.765-12.2 20.553-19.556 8.88-11.285 20.064-19.639 31.144-28.292 4.306-3.363 9.06-6.353 12.673-10.358 5.868-6.504 10.832-13.814 16.422-20.582 6.826-8.264 13.727-16.481 20.943-24.401 4.065-4.461 8.995-8.121 13.249-12.424 14.802-14.975 28.77-30.825 45.913-43.317z"></path><path fill="#ed6876" d="M1256.099 523.419c5.065.642 10.047 1.787 15.068 2.728 7.267 1.362 14.526 2.854 21.853 3.777 7.696.97 15.468 1.34 23.207 1.961 5.062.406 7.031 3.631 7.777 8.207 1.163 7.135 4.059 9.62 11.3 9.899l27.843.953c4.069.069 5.745 1.291 6.005 5.774.522 9.016 1.829 17.992 3.001 26.96.392 3 2.246 4.417 5.566 4.4 5.239-.026 10.48.469 16.385.778l-1.102 10.495-1.875 18.726c-.44 3.74.088 6.105 4.751 6.012 4.11-.082 8.239.568 12.352.975 2.823.28 3.984 1.892 3.794 4.753-.428 6.444-.052 13.028-1.276 19.306-.986 5.059.651 6.724 5.065 7.31 1.793.238 3.566.634 6.12 1.098 0 4.189.243 8.545-.068 12.861-.319 4.43-1.27 8.811-1.77 13.233-.285 2.528-.461 5.379 3.332 5.331 13.729-.173 11.316 2.968 12.332 14.419.658 7.423-1.066 15.045-1.554 22.589-.145 2.241.28 4.519.519 7.827 2.026-1.452 2.733-2.167 3.589-2.521 1.142-.472 2.713-1.244 3.518-.81.767.414 1.114 2.221 1.081 3.4l-.917 24.539c-11.215.82-22.45.899-33.636 1.674l-43.952 3.436c-1.086-3.01-2.319-5.571-2.296-8.121.084-9.297-4.468-16.583-9.091-24.116-3.872-6.308-8.764-13.052-9.479-19.987-1.071-10.392-5.716-15.936-14.889-18.979-1.097-.364-2.16-.844-3.214-1.327-7.478-3.428-15.548-5.918-19.059-14.735-.904-2.27-3.657-3.775-5.461-5.723-2.437-2.632-4.615-5.525-7.207-7.987-2.648-2.515-5.352-5.346-8.589-6.777-4.799-2.121-10.074-3.185-15.175-4.596l-15.785-4.155c.274-12.896 1.722-25.901.54-38.662-1.647-17.783-3.457-35.526-2.554-53.352.528-10.426 2.539-20.777 3.948-31.574z"></path><path fill="#f6a200" d="M525.146 311.436c4.597-9.898 8.947-19.725 14.251-29.008 9.691-16.963 18.49-34.73 30.354-50.08 13.309-17.218 24.195-36.434 41.054-51.019 9.93-8.591 18.335-18.989 27.131-28.837 13.774-15.422 30.943-26.386 48.036-37.528 11.459-7.469 22.688-15.29 34.243-23.286 11.705 16.744 19.716 35.424 22.534 55.717 2.231 16.066 2.236 32.441 2.753 49.143-4.756 1.62-9.284 2.234-13.259 4.056-6.43 2.948-12.193 7.513-18.774 9.942-19.863 7.331-33.806 22.349-47.926 36.784-7.86 8.035-13.511 18.275-19.886 27.705-4.434 6.558-9.345 13.037-12.358 20.254-4.249 10.177-6.94 21.004-10.296 31.553-12.33.053-24.741 1.027-36.971-.049-20.259-1.783-40.227-5.567-58.755-14.69-.568-.28-1.295-.235-2.132-.658z"></path><path fill="#f7a80d" d="M1989.057 883.598c-17.093 12.845-31.061 28.695-45.863 43.67-4.254 4.304-9.184 7.963-13.249 12.424-7.216 7.92-14.117 16.137-20.943 24.401-5.59 6.768-10.554 14.078-16.422 20.582-3.614 4.005-8.367 6.995-12.673 10.358-11.08 8.653-22.264 17.007-31.144 28.292-5.788 7.356-13.617 13.108-20.553 19.556-.967.899-2.105 1.614-3.08 2.505l-37.072 34.009c-5.333 4.96-10.375 10.232-15.859 15.505-21.401-17.218-37.461-38.439-48.623-63.592 3.503-1.781 7.117-2.604 9.823-4.637 8.696-6.536 20.392-8.406 27.297-17.714.933-1.258 2.646-1.973 4.065-2.828 17.878-10.784 36.338-20.728 53.441-32.624 10.304-7.167 18.637-17.23 27.583-26.261 3.819-3.855 7.436-8.091 10.3-12.681 12.283-19.68 24.43-39.446 40.382-56.471 12.224-13.047 17.258-29.524 22.539-45.927 15.85 4.193 29.819 12.129 42.632 22.08 10.583 8.219 19.782 17.883 27.42 29.351z"></path><path fill="#ef7a72" d="M1479.461 758.907c1.872-13.734 4.268-27.394 6.525-41.076 2.218-13.45 8.26-24.279 19.615-32.423 5.099-3.657 9.667-8.701 12.999-14.038 6.646-10.643 15.183-19.135 25.083-26.631 5.888-4.459 11.817-9.176 16.547-14.774 5.906-6.99 13.974-9.306 21.753-12.365l51.48-19.549c.753 11.848.658 23.787 1.641 35.637 1.771 21.353 4.075 42.672 11.748 62.955.17.449.107.985-.019 2.158-6.945 4.134-13.865 7.337-20.437 11.143-3.935 2.279-7.752 5.096-10.869 8.384-6.011 6.343-11.063 13.624-17.286 19.727-9.096 8.92-12.791 20.684-18.181 31.587-.202.409-.072.984-.096 1.481-8.488-1.72-16.937-3.682-25.476-5.094-9.689-1.602-19.426-3.084-29.201-3.949-15.095-1.335-30.241-2.1-45.828-3.172z"></path><path fill="#e94e3b" d="M957.995 766.838c-20.337-5.467-38.791-14.947-55.703-27.254-8.2-5.967-15.451-13.238-22.958-20.37 2.969-3.504 5.564-6.772 8.598-9.563 7.085-6.518 11.283-14.914 15.8-23.153 4.933-8.996 10.345-17.743 14.966-26.892 2.642-5.231 5.547-11.01 5.691-16.611.12-4.651.194-8.932 2.577-12.742 8.52-13.621 15.483-28.026 18.775-43.704 2.11-10.049 7.888-18.774 7.81-29.825-.064-9.089 4.291-18.215 6.73-27.313 3.212-11.983 7.369-23.797 9.492-35.968 3.202-18.358 5.133-36.945 7.346-55.466l4.879-45.8c6.693.288 13.386.575 20.54 1.365.13 3.458-.41 6.407-.496 9.37l-1.136 42.595c-.597 11.552-2.067 23.058-3.084 34.59l-3.845 44.478c-.939 10.202-1.779 20.432-3.283 30.557-.96 6.464-4.46 12.646-1.136 19.383.348.706-.426 1.894-.448 2.864-.224 9.918-5.99 19.428-2.196 29.646.103.279-.033.657-.092.983l-8.446 46.205c-1.231 6.469-2.936 12.846-4.364 19.279-1.5 6.757-2.602 13.621-4.456 20.277-3.601 12.93-10.657 25.3-5.627 39.47.368 1.036.234 2.352.017 3.476l-5.949 30.123z"></path><path fill="#ea5043" d="M958.343 767.017c1.645-10.218 3.659-20.253 5.602-30.302.217-1.124.351-2.44-.017-3.476-5.03-14.17 2.026-26.539 5.627-39.47 1.854-6.656 2.956-13.52 4.456-20.277 1.428-6.433 3.133-12.81 4.364-19.279l8.446-46.205c.059-.326.196-.705.092-.983-3.794-10.218 1.972-19.728 2.196-29.646.022-.97.796-2.158.448-2.864-3.324-6.737.176-12.919 1.136-19.383 1.504-10.125 2.344-20.355 3.283-30.557l3.845-44.478c1.017-11.532 2.488-23.038 3.084-34.59.733-14.18.722-28.397 1.136-42.595.086-2.963.626-5.912.956-9.301 5.356-.48 10.714-.527 16.536-.081 2.224 15.098 1.855 29.734 1.625 44.408-.157 10.064 1.439 20.142 1.768 30.23.334 10.235-.035 20.49.116 30.733.084 5.713.789 11.418.861 17.13.054 4.289-.469 8.585-.702 12.879-.072 1.323-.138 2.659-.031 3.975l2.534 34.405-1.707 36.293-1.908 48.69c-.182 8.103.993 16.237.811 24.34-.271 12.076-1.275 24.133-1.787 36.207-.102 2.414-.101 5.283 1.06 7.219 4.327 7.22 4.463 15.215 4.736 23.103.365 10.553.088 21.128.086 31.693-11.44 2.602-22.84.688-34.106-.916-11.486-1.635-22.806-4.434-34.546-6.903z"></path><path fill="#eb5d19" d="M398.091 622.45c6.086.617 12.21 1.288 18.067 2.918 3.539.985 6.779 3.277 9.952 5.297 9.773 6.224 18.971 13.583 29.311 18.611 8.606 4.184 12.839 10.986 17.016 18.559l18.571 32.959c1.814 3.102 4.285 5.931 6.883 8.443 8.835 8.542 10.052 20.175 13.16 31.095 2.082 7.317 4.609 14.507 6.946 22.127-29.472 3.021-58.969 5.582-87.584 15.222-1.185-2.302-1.795-4.362-2.769-6.233-4.398-8.449-6.703-18.174-14.942-24.299-2.511-1.866-5.103-3.814-7.047-6.218-8.358-10.332-17.028-20.276-28.772-26.973 4.423-11.478 9.299-22.806 13.151-34.473 4.406-13.348 6.724-27.18 6.998-41.313.098-5.093.643-10.176 1.06-15.722z"></path><path fill="#e94c32" d="M981.557 392.109c-1.172 15.337-2.617 30.625-4.438 45.869-2.213 18.521-4.144 37.108-7.346 55.466-2.123 12.171-6.28 23.985-9.492 35.968-2.439 9.098-6.794 18.224-6.73 27.313.078 11.051-5.7 19.776-7.81 29.825-3.292 15.677-10.255 30.082-18.775 43.704-2.383 3.81-2.458 8.091-2.577 12.742-.144 5.6-3.049 11.38-5.691 16.611-4.621 9.149-10.033 17.896-14.966 26.892-4.517 8.239-8.715 16.635-15.8 23.153-3.034 2.791-5.629 6.06-8.735 9.255-12.197-10.595-21.071-23.644-29.301-37.24-7.608-12.569-13.282-25.962-17.637-40.37 13.303-6.889 25.873-13.878 35.311-25.315.717-.869 1.934-1.312 2.71-2.147 5.025-5.405 10.515-10.481 14.854-16.397 6.141-8.374 10.861-17.813 17.206-26.008 8.22-10.618 13.657-22.643 20.024-34.466 4.448-.626 6.729-3.21 8.114-6.89 1.455-3.866 2.644-7.895 4.609-11.492 4.397-8.05 9.641-15.659 13.708-23.86 3.354-6.761 5.511-14.116 8.203-21.206 5.727-15.082 7.277-31.248 12.521-46.578 3.704-10.828 3.138-23.116 4.478-34.753l7.56-.073z"></path><path fill="#f7a617" d="M1918.661 831.99c-4.937 16.58-9.971 33.057-22.196 46.104-15.952 17.025-28.099 36.791-40.382 56.471-2.864 4.59-6.481 8.825-10.3 12.681-8.947 9.031-17.279 19.094-27.583 26.261-17.103 11.896-35.564 21.84-53.441 32.624-1.419.856-3.132 1.571-4.065 2.828-6.904 9.308-18.6 11.178-27.297 17.714-2.705 2.033-6.319 2.856-9.874 4.281-3.413-9.821-6.916-19.583-9.36-29.602-1.533-6.284-1.474-12.957-1.665-19.913 1.913-.78 3.374-1.057 4.81-1.431 15.822-4.121 31.491-8.029 43.818-20.323 9.452-9.426 20.371-17.372 30.534-26.097 6.146-5.277 13.024-10.052 17.954-16.326 14.812-18.848 28.876-38.285 43.112-57.581 2.624-3.557 5.506-7.264 6.83-11.367 2.681-8.311 4.375-16.94 6.476-25.438 17.89.279 35.333 3.179 52.629 9.113z"></path><path fill="#ea553a" d="M1172.91 977.582c-15.775-3.127-28.215-12.377-40.227-22.43-9.005-7.537-18.43-14.605-27.071-22.532-5.07-4.651-9.143-10.443-13.361-15.955-7.647-9.994-15.291-20.007-22.456-30.345-2.361-3.407-3.792-7.72-4.696-11.829-3.119-14.183-5.848-28.453-8.651-42.704-.636-3.236-.974-6.53-1.452-10.209 15.234-2.19 30.471-3.969 46.408-5.622 2.692 5.705 4.882 11.222 6.63 16.876 2.9 9.381 7.776 17.194 15.035 24.049 7.056 6.662 13.305 14.311 19.146 22.099 9.509 12.677 23.01 19.061 36.907 25.054-1.048 7.441-2.425 14.854-3.066 22.33-.956 11.162-1.393 22.369-2.052 33.557l-1.096 17.661z"></path><path fill="#ea5453" d="M1163.123 704.036c-4.005 5.116-7.685 10.531-12.075 15.293-12.842 13.933-27.653 25.447-44.902 34.538-3.166-5.708-5.656-11.287-8.189-17.251-3.321-12.857-6.259-25.431-9.963-37.775-4.6-15.329-10.6-30.188-11.349-46.562-.314-6.871-1.275-14.287-7.114-19.644-1.047-.961-1.292-3.053-1.465-4.67l-4.092-39.927c-.554-5.245-.383-10.829-2.21-15.623-3.622-9.503-4.546-19.253-4.688-29.163-.088-6.111 1.068-12.256.782-18.344-.67-14.281-1.76-28.546-2.9-42.8-.657-8.222-1.951-16.395-2.564-24.62-.458-6.137-.285-12.322-.104-18.21.959 5.831 1.076 11.525 2.429 16.909 2.007 7.986 5.225 15.664 7.324 23.632 3.222 12.23 1.547 25.219 6.728 37.355 4.311 10.099 6.389 21.136 9.732 31.669 2.228 7.02 6.167 13.722 7.121 20.863 1.119 8.376 6.1 13.974 10.376 20.716l2.026 10.576c1.711 9.216 3.149 18.283 8.494 26.599 6.393 9.946 11.348 20.815 16.943 31.276 4.021 7.519 6.199 16.075 12.925 22.065l24.462 22.26c.556.503 1.507.571 2.274.841z"></path><path fill="#ea5b15" d="M1285.092 163.432c9.165 3.148 18.419 6.374 27.279 10.459 4.871 2.246 8.838 6.406 13.646 8.851 5.446 2.77 11.801 3.874 17.011 6.965 11.514 6.831 24.097 9.942 36.968 12.471 1.78.35 3.777.576 5.213 1.542 10.784 7.255 23.448 9.114 35.622 11.834 9.977 2.23 18.529 6.703 26.988 11.898 5.233 3.214 10.76 5.983 15.798 9.468 4.14 2.864 7.962 6.279 11.551 9.827 5.076 5.02 10.056 10.181 14.624 15.658 5.822 6.98 11.119 14.395 16.78 21.513 4.531 5.698 9.267 11.233 14.222 16.987-10.005 5.806-20.07 12.004-30.719 16.943-7.694 3.569-16.163 5.464-24.688 7.669-2.878-7.088-5.352-13.741-7.833-20.392-.802-2.15-1.244-4.55-2.498-6.396-4.548-6.7-9.712-12.999-14.011-19.847-6.672-10.627-15.34-18.93-26.063-25.376-9.357-5.625-18.367-11.824-27.644-17.587-6.436-3.997-12.902-8.006-19.659-11.405-5.123-2.577-11.107-3.536-16.046-6.37-17.187-9.863-35.13-17.887-54.031-23.767-4.403-1.37-8.953-2.267-13.436-3.382l.926-27.565z"></path><path fill="#ea504b" d="M1098 737l7.789 16.893c-15.04 9.272-31.679 15.004-49.184 17.995-9.464 1.617-19.122 2.097-29.151 3.019-.457-10.636-.18-21.211-.544-31.764-.273-7.888-.409-15.883-4.736-23.103-1.16-1.936-1.162-4.805-1.06-7.219l1.787-36.207c.182-8.103-.993-16.237-.811-24.34.365-16.236 1.253-32.461 1.908-48.69.484-12 .942-24.001 1.98-36.069 5.57 10.19 10.632 20.42 15.528 30.728 1.122 2.362 2.587 5.09 2.339 7.488-1.536 14.819 5.881 26.839 12.962 38.33 10.008 16.241 16.417 33.54 20.331 51.964 2.285 10.756 4.729 21.394 11.958 30.165L1098 737z"></path><path fill="#f6a320" d="M1865.78 822.529c-1.849 8.846-3.544 17.475-6.224 25.786-1.323 4.102-4.206 7.81-6.83 11.367l-43.112 57.581c-4.93 6.273-11.808 11.049-17.954 16.326-10.162 8.725-21.082 16.671-30.534 26.097-12.327 12.294-27.997 16.202-43.818 20.323-1.436.374-2.897.651-4.744.986-1.107-17.032-1.816-34.076-2.079-51.556 1.265-.535 2.183-.428 2.888-.766 10.596-5.072 20.8-11.059 32.586-13.273 1.69-.317 3.307-1.558 4.732-2.662l26.908-21.114c4.992-4.003 11.214-7.393 14.381-12.585 11.286-18.5 22.363-37.263 27.027-58.87l36.046 1.811c3.487.165 6.983.14 10.727.549z"></path><path fill="#ec6333" d="M318.448 922.814c-6.374-2.074-12.56-4.058-18.412-6.765-8.379-3.876-16.906-7.675-24.617-12.668-5.239-3.392-9.69-8.381-13.609-13.352-7.87-9.983-14.953-20.582-22.699-30.666-8.061-10.493-13.909-22.097-18.636-34.358-.595-1.543-1.486-2.972-2.382-4.783 6.84-1.598 13.797-3.023 20.807-4.106 18.852-2.912 36.433-9.493 53.737-17.819.697.888.889 1.555 1.292 2.051l17.921 21.896c4.14 4.939 8.06 10.191 12.862 14.412 5.67 4.984 12.185 9.007 18.334 13.447-8.937 16.282-16.422 33.178-20.696 51.31-1.638 6.951-2.402 14.107-3.903 21.403z"></path><path fill="#f49700" d="M623.467 326.903c2.893-10.618 5.584-21.446 9.833-31.623 3.013-7.217 7.924-13.696 12.358-20.254 6.375-9.43 12.026-19.67 19.886-27.705 14.12-14.434 28.063-29.453 47.926-36.784 6.581-2.429 12.344-6.994 18.774-9.942 3.975-1.822 8.503-2.436 13.186-3.592 1.947 18.557 3.248 37.15 8.307 55.686-15.453 7.931-28.853 18.092-40.46 29.996-10.417 10.683-19.109 23.111-28.013 35.175-3.238 4.388-4.888 9.948-7.262 14.973-17.803-3.987-35.767-6.498-54.535-5.931z"></path><path fill="#ea544c" d="M1097.956 736.615c-2.925-3.218-5.893-6.822-8.862-10.425-7.229-8.771-9.672-19.409-11.958-30.165-3.914-18.424-10.323-35.722-20.331-51.964-7.081-11.491-14.498-23.511-12.962-38.33.249-2.398-1.217-5.126-2.339-7.488l-15.232-31.019-3.103-34.338c-.107-1.316-.041-2.653.031-3.975.233-4.294.756-8.59.702-12.879-.072-5.713-.776-11.417-.861-17.13l-.116-30.733c-.329-10.088-1.926-20.166-1.768-30.23.23-14.674.599-29.31-1.162-44.341 9.369-.803 18.741-1.179 28.558-1.074 1.446 15.814 2.446 31.146 3.446 46.478.108 6.163-.064 12.348.393 18.485.613 8.225 1.907 16.397 2.564 24.62l2.9 42.8c.286 6.088-.869 12.234-.782 18.344.142 9.91 1.066 19.661 4.688 29.163 1.827 4.794 1.657 10.377 2.21 15.623l4.092 39.927c.172 1.617.417 3.71 1.465 4.67 5.839 5.357 6.8 12.773 7.114 19.644.749 16.374 6.749 31.233 11.349 46.562 3.704 12.344 6.642 24.918 9.963 37.775z"></path><path fill="#ec5c61" d="M1204.835 568.008c1.254 25.351-1.675 50.16-10.168 74.61-8.598-4.883-18.177-8.709-24.354-15.59-7.44-8.289-13.929-17.442-21.675-25.711-8.498-9.072-16.731-18.928-21.084-31.113-.54-1.513-1.691-2.807-2.594-4.564-4.605-9.247-7.706-18.544-7.96-29.09-.835-7.149-1.214-13.944-2.609-20.523-2.215-10.454-5.626-20.496-7.101-31.302-2.513-18.419-7.207-36.512-5.347-55.352.24-2.43-.17-4.949-.477-7.402l-4.468-34.792c2.723-.379 5.446-.757 8.585-.667 1.749 8.781 2.952 17.116 4.448 25.399 1.813 10.037 3.64 20.084 5.934 30.017 1.036 4.482 3.953 8.573 4.73 13.064 1.794 10.377 4.73 20.253 9.272 29.771 2.914 6.105 4.761 12.711 7.496 18.912 2.865 6.496 6.264 12.755 9.35 19.156 3.764 7.805 7.667 15.013 16.1 19.441 7.527 3.952 13.713 10.376 20.983 14.924 6.636 4.152 13.932 7.25 20.937 10.813z"></path><path fill="#ed676f" d="M1140.75 379.231c18.38-4.858 36.222-11.21 53.979-18.971 3.222 3.368 5.693 6.744 8.719 9.512 2.333 2.134 5.451 5.07 8.067 4.923 7.623-.429 12.363 2.688 17.309 8.215 5.531 6.18 12.744 10.854 19.224 16.184-5.121 7.193-10.461 14.241-15.323 21.606-13.691 20.739-22.99 43.255-26.782 67.926-.543 3.536-1.281 7.043-2.366 10.925-14.258-6.419-26.411-14.959-32.731-29.803-1.087-2.553-2.596-4.93-3.969-7.355-1.694-2.993-3.569-5.89-5.143-8.943-1.578-3.062-2.922-6.249-4.295-9.413-1.57-3.621-3.505-7.163-4.47-10.946-1.257-4.93-.636-10.572-2.725-15.013-5.831-12.397-7.467-25.628-9.497-38.847z"></path><path fill="#ed656e" d="M1254.103 647.439c5.325.947 10.603 2.272 15.847 3.722 5.101 1.41 10.376 2.475 15.175 4.596 3.237 1.431 5.942 4.262 8.589 6.777 2.592 2.462 4.77 5.355 7.207 7.987 1.804 1.948 4.557 3.453 5.461 5.723 3.51 8.817 11.581 11.307 19.059 14.735 1.053.483 2.116.963 3.214 1.327 9.172 3.043 13.818 8.587 14.889 18.979.715 6.935 5.607 13.679 9.479 19.987 4.623 7.533 9.175 14.819 9.091 24.116-.023 2.55 1.21 5.111 1.874 8.055-19.861 2.555-39.795 4.296-59.597 9.09l-11.596-23.203c-1.107-2.169-2.526-4.353-4.307-5.975-7.349-6.694-14.863-13.209-22.373-19.723l-17.313-14.669c-2.776-2.245-5.935-4.017-8.92-6.003l11.609-38.185c1.508-5.453 1.739-11.258 2.613-17.336z"></path><path fill="#ec6168" d="M1140.315 379.223c2.464 13.227 4.101 26.459 9.931 38.856 2.089 4.441 1.468 10.083 2.725 15.013.965 3.783 2.9 7.325 4.47 10.946 1.372 3.164 2.716 6.351 4.295 9.413 1.574 3.053 3.449 5.95 5.143 8.943 1.372 2.425 2.882 4.803 3.969 7.355 6.319 14.844 18.473 23.384 32.641 30.212.067 5.121-.501 10.201-.435 15.271l.985 38.117c.151 4.586.616 9.162.868 14.201-7.075-3.104-14.371-6.202-21.007-10.354-7.269-4.548-13.456-10.972-20.983-14.924-8.434-4.428-12.337-11.637-16.1-19.441-3.087-6.401-6.485-12.66-9.35-19.156-2.735-6.201-4.583-12.807-7.496-18.912-4.542-9.518-7.477-19.394-9.272-29.771-.777-4.491-3.694-8.581-4.73-13.064-2.294-9.933-4.121-19.98-5.934-30.017-1.496-8.283-2.699-16.618-4.036-25.335 10.349-2.461 20.704-4.511 31.054-6.582.957-.191 1.887-.515 3.264-.769z"></path><path fill="#e94c28" d="M922 537c-6.003 11.784-11.44 23.81-19.66 34.428-6.345 8.196-11.065 17.635-17.206 26.008-4.339 5.916-9.828 10.992-14.854 16.397-.776.835-1.993 1.279-2.71 2.147-9.439 11.437-22.008 18.427-35.357 24.929-4.219-10.885-6.942-22.155-7.205-33.905l-.514-49.542c7.441-2.893 14.452-5.197 21.334-7.841 1.749-.672 3.101-2.401 4.604-3.681 6.749-5.745 12.845-12.627 20.407-16.944 7.719-4.406 14.391-9.101 18.741-16.889.626-1.122 1.689-2.077 2.729-2.877 7.197-5.533 12.583-12.51 16.906-20.439.68-1.247 2.495-1.876 4.105-2.651 2.835 1.408 5.267 2.892 7.884 3.892 3.904 1.491 4.392 3.922 2.833 7.439-1.47 3.318-2.668 6.756-4.069 10.106-1.247 2.981-.435 5.242 2.413 6.544 2.805 1.282 3.125 3.14 1.813 5.601l-6.907 12.799L922 537z"></path><path fill="#eb5659" d="M1124.995 566c.868 1.396 2.018 2.691 2.559 4.203 4.353 12.185 12.586 22.041 21.084 31.113 7.746 8.269 14.235 17.422 21.675 25.711 6.176 6.881 15.756 10.707 24.174 15.932-6.073 22.316-16.675 42.446-31.058 60.937-1.074-.131-2.025-.199-2.581-.702l-24.462-22.26c-6.726-5.99-8.904-14.546-12.925-22.065-5.594-10.461-10.55-21.33-16.943-31.276-5.345-8.315-6.783-17.383-8.494-26.599-.63-3.394-1.348-6.772-1.738-10.848-.371-6.313-1.029-11.934-1.745-18.052l6.34 4.04 1.288-.675-2.143-15.385 9.454 1.208v-8.545L1124.995 566z"></path><path fill="#f5a02d" d="M1818.568 820.096c-4.224 21.679-15.302 40.442-26.587 58.942-3.167 5.192-9.389 8.582-14.381 12.585l-26.908 21.114c-1.425 1.104-3.042 2.345-4.732 2.662-11.786 2.214-21.99 8.201-32.586 13.273-.705.338-1.624.231-2.824.334a824.35 824.35 0 0 1-8.262-42.708c4.646-2.14 9.353-3.139 13.269-5.47 5.582-3.323 11.318-6.942 15.671-11.652 7.949-8.6 14.423-18.572 22.456-27.081 8.539-9.046 13.867-19.641 18.325-30.922l46.559 8.922z"></path><path fill="#eb5a57" d="M1124.96 565.639c-5.086-4.017-10.208-8.395-15.478-12.901v8.545l-9.454-1.208 2.143 15.385-1.288.675-6.34-4.04c.716 6.118 1.375 11.74 1.745 17.633-4.564-6.051-9.544-11.649-10.663-20.025-.954-7.141-4.892-13.843-7.121-20.863-3.344-10.533-5.421-21.57-9.732-31.669-5.181-12.135-3.506-25.125-6.728-37.355-2.099-7.968-5.317-15.646-7.324-23.632-1.353-5.384-1.47-11.078-2.429-16.909l-3.294-46.689a278.63 278.63 0 0 1 27.57-2.084c2.114 12.378 3.647 24.309 5.479 36.195 1.25 8.111 2.832 16.175 4.422 24.23 1.402 7.103 2.991 14.169 4.55 21.241 1.478 6.706.273 14.002 4.6 20.088 5.401 7.597 7.176 16.518 9.467 25.337 1.953 7.515 5.804 14.253 11.917 19.406.254 10.095 3.355 19.392 7.96 28.639z"></path><path fill="#ea541c" d="M911.651 810.999c-2.511 10.165-5.419 20.146-8.2 30.162-2.503 9.015-7.37 16.277-14.364 22.612-6.108 5.533-10.917 12.475-16.796 18.293-6.942 6.871-14.354 13.24-19.083 22.03-.644 1.196-2.222 1.889-3.705 2.857-2.39-7.921-4.101-15.991-6.566-23.823-5.451-17.323-12.404-33.976-23.414-48.835l21.627-21.095c3.182-3.29 5.532-7.382 8.295-11.083l10.663-14.163c9.528 4.78 18.925 9.848 28.625 14.247 7.324 3.321 15.036 5.785 22.917 8.799z"></path><path fill="#eb5d19" d="M1284.092 191.421c4.557.69 9.107 1.587 13.51 2.957 18.901 5.881 36.844 13.904 54.031 23.767 4.938 2.834 10.923 3.792 16.046 6.37 6.757 3.399 13.224 7.408 19.659 11.405l27.644 17.587c10.723 6.446 19.392 14.748 26.063 25.376 4.299 6.848 9.463 13.147 14.011 19.847 1.254 1.847 1.696 4.246 2.498 6.396l7.441 20.332c-11.685 1.754-23.379 3.133-35.533 4.037-.737-2.093-.995-3.716-1.294-5.33-3.157-17.057-14.048-30.161-23.034-44.146-3.027-4.71-7.786-8.529-12.334-11.993-9.346-7.116-19.004-13.834-28.688-20.491-6.653-4.573-13.311-9.251-20.431-13.002-8.048-4.24-16.479-7.85-24.989-11.091-11.722-4.465-23.673-8.328-35.527-12.449l.927-19.572z"></path><path fill="#eb5e24" d="M1283.09 211.415c11.928 3.699 23.88 7.562 35.602 12.027 8.509 3.241 16.941 6.852 24.989 11.091 7.12 3.751 13.778 8.429 20.431 13.002 9.684 6.657 19.342 13.375 28.688 20.491 4.548 3.463 9.307 7.283 12.334 11.993 8.986 13.985 19.877 27.089 23.034 44.146.299 1.615.557 3.237.836 5.263-13.373-.216-26.749-.839-40.564-1.923-2.935-9.681-4.597-18.92-12.286-26.152-15.577-14.651-30.4-30.102-45.564-45.193-.686-.683-1.626-1.156-2.516-1.584l-47.187-22.615 2.203-20.546z"></path><path fill="#e9511f" d="M913 486.001c-1.29.915-3.105 1.543-3.785 2.791-4.323 7.929-9.709 14.906-16.906 20.439-1.04.8-2.103 1.755-2.729 2.877-4.35 7.788-11.022 12.482-18.741 16.889-7.562 4.317-13.658 11.199-20.407 16.944-1.503 1.28-2.856 3.009-4.604 3.681-6.881 2.643-13.893 4.948-21.262 7.377-.128-11.151.202-22.302.378-33.454.03-1.892-.6-3.795-.456-6.12 13.727-1.755 23.588-9.527 33.278-17.663 2.784-2.337 6.074-4.161 8.529-6.784l29.057-31.86c1.545-1.71 3.418-3.401 4.221-5.459 5.665-14.509 11.49-28.977 16.436-43.736 2.817-8.407 4.074-17.338 6.033-26.032 5.039.714 10.078 1.427 15.536 2.629-.909 8.969-2.31 17.438-3.546 25.931-2.41 16.551-5.84 32.839-11.991 48.461L913 486.001z"></path><path fill="#ea5741" d="M1179.451 903.828c-14.224-5.787-27.726-12.171-37.235-24.849-5.841-7.787-12.09-15.436-19.146-22.099-7.259-6.854-12.136-14.667-15.035-24.049-1.748-5.654-3.938-11.171-6.254-17.033 15.099-4.009 30.213-8.629 44.958-15.533l28.367 36.36c6.09 8.015 13.124 14.75 22.72 18.375-7.404 14.472-13.599 29.412-17.48 45.244-.271 1.106-.382 2.25-.895 3.583z"></path><path fill="#ea522a" d="M913.32 486.141c2.693-7.837 5.694-15.539 8.722-23.231 6.151-15.622 9.581-31.91 11.991-48.461l3.963-25.861c7.582.317 15.168 1.031 22.748 1.797 4.171.421 8.333.928 12.877 1.596-.963 11.836-.398 24.125-4.102 34.953-5.244 15.33-6.794 31.496-12.521 46.578-2.692 7.09-4.849 14.445-8.203 21.206-4.068 8.201-9.311 15.81-13.708 23.86-1.965 3.597-3.154 7.627-4.609 11.492-1.385 3.68-3.666 6.265-8.114 6.89-1.994-1.511-3.624-3.059-5.077-4.44l6.907-12.799c1.313-2.461.993-4.318-1.813-5.601-2.849-1.302-3.66-3.563-2.413-6.544 1.401-3.35 2.599-6.788 4.069-10.106 1.558-3.517 1.071-5.948-2.833-7.439-2.617-1-5.049-2.484-7.884-3.892z"></path><path fill="#eb5e24" d="M376.574 714.118c12.053 6.538 20.723 16.481 29.081 26.814 1.945 2.404 4.537 4.352 7.047 6.218 8.24 6.125 10.544 15.85 14.942 24.299.974 1.871 1.584 3.931 2.376 6.29-7.145 3.719-14.633 6.501-21.386 10.517-9.606 5.713-18.673 12.334-28.425 18.399-3.407-3.73-6.231-7.409-9.335-10.834l-30.989-33.862c11.858-11.593 22.368-24.28 31.055-38.431 1.86-3.031 3.553-6.164 5.632-9.409z"></path><path fill="#e95514" d="M859.962 787.636c-3.409 5.037-6.981 9.745-10.516 14.481-2.763 3.701-5.113 7.792-8.295 11.083-6.885 7.118-14.186 13.834-21.65 20.755-13.222-17.677-29.417-31.711-48.178-42.878-.969-.576-2.068-.934-3.27-1.709 6.28-8.159 12.733-15.993 19.16-23.849 1.459-1.783 2.718-3.738 4.254-5.448l18.336-19.969c4.909 5.34 9.619 10.738 14.081 16.333 9.72 12.19 21.813 21.566 34.847 29.867.411.262.725.674 1.231 1.334z"></path><path fill="#eb5f2d" d="M339.582 762.088l31.293 33.733c3.104 3.425 5.928 7.104 9.024 10.979-12.885 11.619-24.548 24.139-33.899 38.704-.872 1.359-1.56 2.837-2.644 4.428-6.459-4.271-12.974-8.294-18.644-13.278-4.802-4.221-8.722-9.473-12.862-14.412l-17.921-21.896c-.403-.496-.595-1.163-.926-2.105 16.738-10.504 32.58-21.87 46.578-36.154z"></path><path fill="#f28d00" d="M678.388 332.912c1.989-5.104 3.638-10.664 6.876-15.051 8.903-12.064 17.596-24.492 28.013-35.175 11.607-11.904 25.007-22.064 40.507-29.592 4.873 11.636 9.419 23.412 13.67 35.592-5.759 4.084-11.517 7.403-16.594 11.553-4.413 3.607-8.124 8.092-12.023 12.301-5.346 5.772-10.82 11.454-15.782 17.547-3.929 4.824-7.17 10.208-10.716 15.344l-33.95-12.518z"></path><path fill="#f08369" d="M1580.181 771.427c-.191-.803-.322-1.377-.119-1.786 5.389-10.903 9.084-22.666 18.181-31.587 6.223-6.103 11.276-13.385 17.286-19.727 3.117-3.289 6.933-6.105 10.869-8.384 6.572-3.806 13.492-7.009 20.461-10.752 1.773 3.23 3.236 6.803 4.951 10.251l12.234 24.993c-1.367 1.966-2.596 3.293-3.935 4.499-7.845 7.07-16.315 13.564-23.407 21.32-6.971 7.623-12.552 16.517-18.743 24.854l-37.777-13.68z"></path><path fill="#f18b5e" d="M1618.142 785.4c6.007-8.63 11.588-17.524 18.559-25.147 7.092-7.755 15.562-14.249 23.407-21.32 1.338-1.206 2.568-2.534 3.997-4.162l28.996 33.733c1.896 2.205 4.424 3.867 6.66 6.394-6.471 7.492-12.967 14.346-19.403 21.255l-18.407 19.953c-12.958-12.409-27.485-22.567-43.809-30.706z"></path><path fill="#f49c3a" d="M1771.617 811.1c-4.066 11.354-9.394 21.949-17.933 30.995-8.032 8.509-14.507 18.481-22.456 27.081-4.353 4.71-10.089 8.329-15.671 11.652-3.915 2.331-8.623 3.331-13.318 5.069-4.298-9.927-8.255-19.998-12.1-30.743 4.741-4.381 9.924-7.582 13.882-11.904 7.345-8.021 14.094-16.603 20.864-25.131 4.897-6.168 9.428-12.626 14.123-18.955l32.61 11.936z"></path><path fill="#f08000" d="M712.601 345.675c3.283-5.381 6.524-10.765 10.453-15.589 4.962-6.093 10.435-11.774 15.782-17.547 3.899-4.21 7.61-8.695 12.023-12.301 5.078-4.15 10.836-7.469 16.636-11.19a934.12 934.12 0 0 1 23.286 35.848c-4.873 6.234-9.676 11.895-14.63 17.421l-25.195 27.801c-11.713-9.615-24.433-17.645-38.355-24.443z"></path><path fill="#ed6e04" d="M751.11 370.42c8.249-9.565 16.693-18.791 25.041-28.103 4.954-5.526 9.757-11.187 14.765-17.106 7.129 6.226 13.892 13.041 21.189 19.225 5.389 4.567 11.475 8.312 17.53 12.92-5.51 7.863-10.622 15.919-17.254 22.427-8.881 8.716-18.938 16.233-28.49 24.264-5.703-6.587-11.146-13.427-17.193-19.682-4.758-4.921-10.261-9.121-15.587-13.944z"></path><path fill="#ea541c" d="M921.823 385.544c-1.739 9.04-2.995 17.971-5.813 26.378-4.946 14.759-10.771 29.227-16.436 43.736-.804 2.058-2.676 3.749-4.221 5.459l-29.057 31.86c-2.455 2.623-5.745 4.447-8.529 6.784-9.69 8.135-19.551 15.908-33.208 17.237-1.773-9.728-3.147-19.457-4.091-29.6l36.13-16.763c.581-.267 1.046-.812 1.525-1.269 8.033-7.688 16.258-15.19 24.011-23.152 4.35-4.467 9.202-9.144 11.588-14.69 6.638-15.425 15.047-30.299 17.274-47.358 3.536.344 7.072.688 10.829 1.377z"></path><path fill="#f3944d" d="M1738.688 798.998c-4.375 6.495-8.906 12.953-13.803 19.121-6.771 8.528-13.519 17.11-20.864 25.131-3.958 4.322-9.141 7.523-13.925 11.54-8.036-13.464-16.465-26.844-27.999-38.387 5.988-6.951 12.094-13.629 18.261-20.25l19.547-20.95 38.783 23.794z"></path><path fill="#ec6168" d="M1239.583 703.142c3.282 1.805 6.441 3.576 9.217 5.821 5.88 4.755 11.599 9.713 17.313 14.669l22.373 19.723c1.781 1.622 3.2 3.806 4.307 5.975 3.843 7.532 7.477 15.171 11.194 23.136-10.764 4.67-21.532 8.973-32.69 12.982l-22.733-27.366c-2.003-2.416-4.096-4.758-6.194-7.093-3.539-3.94-6.927-8.044-10.74-11.701-2.57-2.465-5.762-4.283-8.675-6.39l16.627-29.755z"></path><path fill="#ec663e" d="M1351.006 332.839l-28.499 10.33c-.294.107-.533.367-1.194.264-11.067-19.018-27.026-32.559-44.225-44.855-4.267-3.051-8.753-5.796-13.138-8.682l9.505-24.505c10.055 4.069 19.821 8.227 29.211 13.108 3.998 2.078 7.299 5.565 10.753 8.598 3.077 2.701 5.743 5.891 8.926 8.447 4.116 3.304 9.787 5.345 12.62 9.432 6.083 8.777 10.778 18.517 16.041 27.863z"></path><path fill="#eb5e5b" d="M1222.647 733.051c3.223 1.954 6.415 3.771 8.985 6.237 3.813 3.658 7.201 7.761 10.74 11.701l6.194 7.093 22.384 27.409c-13.056 6.836-25.309 14.613-36.736 24.161l-39.323-44.7 24.494-27.846c1.072-1.224 1.974-2.598 3.264-4.056z"></path><path fill="#ea580e" d="M876.001 376.171c5.874 1.347 11.748 2.694 17.812 4.789-.81 5.265-2.687 9.791-2.639 14.296.124 11.469-4.458 20.383-12.73 27.863-2.075 1.877-3.659 4.286-5.668 6.248l-22.808 21.967c-.442.422-1.212.488-1.813.757l-23.113 10.389-9.875 4.514c-2.305-6.09-4.609-12.181-6.614-18.676 7.64-4.837 15.567-8.54 22.18-13.873 9.697-7.821 18.931-16.361 27.443-25.455 5.613-5.998 12.679-11.331 14.201-20.475.699-4.2 2.384-8.235 3.623-12.345z"></path><path fill="#e95514" d="M815.103 467.384c3.356-1.894 6.641-3.415 9.94-4.903l23.113-10.389c.6-.269 1.371-.335 1.813-.757l22.808-21.967c2.008-1.962 3.593-4.371 5.668-6.248 8.272-7.48 12.854-16.394 12.73-27.863-.049-4.505 1.828-9.031 2.847-13.956 5.427.559 10.836 1.526 16.609 2.68-1.863 17.245-10.272 32.119-16.91 47.544-2.387 5.546-7.239 10.223-11.588 14.69-7.753 7.962-15.978 15.464-24.011 23.152-.478.458-.944 1.002-1.525 1.269l-36.069 16.355c-2.076-6.402-3.783-12.81-5.425-19.607z"></path><path fill="#eb620b" d="M783.944 404.402c9.499-8.388 19.556-15.905 28.437-24.621 6.631-6.508 11.744-14.564 17.575-22.273 9.271 4.016 18.501 8.375 27.893 13.43-4.134 7.07-8.017 13.778-12.833 19.731-5.785 7.15-12.109 13.917-18.666 20.376-7.99 7.869-16.466 15.244-24.731 22.832l-17.674-29.475z"></path><path fill="#ea544c" d="M1197.986 854.686c-9.756-3.309-16.79-10.044-22.88-18.059l-28.001-36.417c8.601-5.939 17.348-11.563 26.758-17.075 1.615 1.026 2.639 1.876 3.505 2.865l26.664 30.44c3.723 4.139 7.995 7.785 12.017 11.656l-18.064 26.591z"></path><path fill="#ec6333" d="M1351.41 332.903c-5.667-9.409-10.361-19.149-16.445-27.926-2.833-4.087-8.504-6.128-12.62-9.432-3.184-2.555-5.849-5.745-8.926-8.447-3.454-3.033-6.756-6.52-10.753-8.598-9.391-4.88-19.157-9.039-29.138-13.499 1.18-5.441 2.727-10.873 4.81-16.607 11.918 4.674 24.209 8.261 34.464 14.962 14.239 9.304 29.011 18.453 39.595 32.464 2.386 3.159 5.121 6.077 7.884 8.923 6.564 6.764 10.148 14.927 11.723 24.093l-20.594 4.067z"></path><path fill="#eb5e5b" d="M1117 536.549c-6.113-4.702-9.965-11.44-11.917-18.955-2.292-8.819-4.066-17.74-9.467-25.337-4.327-6.085-3.122-13.382-4.6-20.088l-4.55-21.241c-1.59-8.054-3.172-16.118-4.422-24.23l-5.037-36.129c6.382-1.43 12.777-2.462 19.582-3.443 1.906 11.646 3.426 23.24 4.878 34.842.307 2.453.717 4.973.477 7.402-1.86 18.84 2.834 36.934 5.347 55.352 1.474 10.806 4.885 20.848 7.101 31.302 1.394 6.579 1.774 13.374 2.609 20.523z"></path><path fill="#ec644b" d="M1263.638 290.071c4.697 2.713 9.183 5.458 13.45 8.509 17.199 12.295 33.158 25.836 43.873 44.907-8.026 4.725-16.095 9.106-24.83 13.372-11.633-15.937-25.648-28.515-41.888-38.689-1.609-1.008-3.555-1.48-5.344-2.2 2.329-3.852 4.766-7.645 6.959-11.573l7.78-14.326z"></path><path fill="#eb5f2d" d="M1372.453 328.903c-2.025-9.233-5.608-17.396-12.172-24.16-2.762-2.846-5.498-5.764-7.884-8.923-10.584-14.01-25.356-23.16-39.595-32.464-10.256-6.701-22.546-10.289-34.284-15.312.325-5.246 1.005-10.444 2.027-15.863l47.529 22.394c.89.428 1.83.901 2.516 1.584l45.564 45.193c7.69 7.233 9.352 16.472 11.849 26.084-5.032.773-10.066 1.154-15.55 1.466z"></path><path fill="#e95a0f" d="M801.776 434.171c8.108-7.882 16.584-15.257 24.573-23.126 6.558-6.459 12.881-13.226 18.666-20.376 4.817-5.953 8.7-12.661 13.011-19.409 5.739 1.338 11.463 3.051 17.581 4.838-.845 4.183-2.53 8.219-3.229 12.418-1.522 9.144-8.588 14.477-14.201 20.475-8.512 9.094-17.745 17.635-27.443 25.455-6.613 5.333-14.54 9.036-22.223 13.51-2.422-4.469-4.499-8.98-6.735-13.786z"></path><path fill="#eb5e5b" d="M1248.533 316.002c2.155.688 4.101 1.159 5.71 2.168 16.24 10.174 30.255 22.752 41.532 38.727-7.166 5.736-14.641 11.319-22.562 16.731-1.16-1.277-1.684-2.585-2.615-3.46l-38.694-36.2 14.203-15.029c.803-.86 1.38-1.93 2.427-2.936z"></path><path fill="#eb5a57" d="M1216.359 827.958c-4.331-3.733-8.603-7.379-12.326-11.518l-26.664-30.44c-.866-.989-1.89-1.839-3.152-2.902 6.483-6.054 13.276-11.959 20.371-18.005l39.315 44.704c-5.648 6.216-11.441 12.12-17.544 18.161z"></path><path fill="#ec6168" d="M1231.598 334.101l38.999 36.066c.931.876 1.456 2.183 2.303 3.608-4.283 4.279-8.7 8.24-13.769 12.091-4.2-3.051-7.512-6.349-11.338-8.867-12.36-8.136-22.893-18.27-32.841-29.093l16.646-13.805z"></path><path fill="#ed656e" d="M1214.597 347.955c10.303 10.775 20.836 20.908 33.196 29.044 3.825 2.518 7.137 5.816 10.992 8.903-3.171 4.397-6.65 8.648-10.432 13.046-6.785-5.184-13.998-9.858-19.529-16.038-4.946-5.527-9.687-8.644-17.309-8.215-2.616.147-5.734-2.788-8.067-4.923-3.026-2.769-5.497-6.144-8.35-9.568 6.286-4.273 12.715-8.237 19.499-12.25z"></path></svg> </p> <p align="center"> <b>The crispy sentence embedding family from <a href="https://mixedbread.com"><b>Mixedbread</b></a>.</b> </p> <p align="center"> <sup> 🍞 Looking for a simple end-to-end retrieval solution? Meet Omni, our multimodal and multilingual model. <a href="https://mixedbread.com"><b>Get in touch for access.</a> </sup> </p> # mixedbread-ai/mxbai-embed-large-v1 Here, we provide several ways to produce sentence embeddings. Please note that you have to provide the prompt `Represent this sentence for searching relevant passages:` for query if you want to use it for retrieval. Besides that you don't need any prompt. Our model also supports [Matryoshka Representation Learning and binary quantization](https://www.mixedbread.ai/blog/binary-mrl). ## Quickstart Here, we provide several ways to produce sentence embeddings. Please note that you have to provide the prompt `Represent this sentence for searching relevant passages: ` for query if you want to use it for retrieval. Besides that you don't need any prompt. ### sentence-transformers ``` python -m pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim from sentence_transformers.quantization import quantize_embeddings # 1. Specify preffered dimensions dimensions = 512 # 2. load model model = SentenceTransformer("mixedbread-ai/mxbai-embed-large-v1", truncate_dim=dimensions) # The prompt used for query retrieval tasks: # query_prompt = 'Represent this sentence for searching relevant passages: ' query = "A man is eating a piece of bread" docs = [ "A man is eating food.", "A man is eating pasta.", "The girl is carrying a baby.", "A man is riding a horse.", ] # 2. Encode query_embedding = model.encode(query, prompt_name="query") # Equivalent Alternatives: # query_embedding = model.encode(query_prompt + query) # query_embedding = model.encode(query, prompt=query_prompt) docs_embeddings = model.encode(docs) # Optional: Quantize the embeddings binary_query_embedding = quantize_embeddings(query_embedding, precision="ubinary") binary_docs_embeddings = quantize_embeddings(docs_embeddings, precision="ubinary") similarities = cos_sim(query_embedding, docs_embeddings) print('similarities:', similarities) ``` ### Transformers ```python from typing import Dict import torch import numpy as np from transformers import AutoModel, AutoTokenizer from sentence_transformers.util import cos_sim # For retrieval you need to pass this prompt. Please find our more in our blog post. def transform_query(query: str) -> str: """ For retrieval, add the prompt for query (not for documents). """ return f'Represent this sentence for searching relevant passages: {query}' # The model works really well with cls pooling (default) but also with mean pooling. def pooling(outputs: torch.Tensor, inputs: Dict, strategy: str = 'cls') -> np.ndarray: if strategy == 'cls': outputs = outputs[:, 0] elif strategy == 'mean': outputs = torch.sum( outputs * inputs["attention_mask"][:, :, None], dim=1) / torch.sum(inputs["attention_mask"], dim=1, keepdim=True) else: raise NotImplementedError return outputs.detach().cpu().numpy() # 1. load model model_id = 'mixedbread-ai/mxbai-embed-large-v1' tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModel.from_pretrained(model_id).cuda() docs = [ transform_query('A man is eating a piece of bread'), "A man is eating food.", "A man is eating pasta.", "The girl is carrying a baby.", "A man is riding a horse.", ] # 2. encode inputs = tokenizer(docs, padding=True, return_tensors='pt') for k, v in inputs.items(): inputs[k] = v.cuda() outputs = model(**inputs).last_hidden_state embeddings = pooling(outputs, inputs, 'cls') similarities = cos_sim(embeddings[0], embeddings[1:]) print('similarities:', similarities) ``` ### Transformers.js If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@xenova/transformers) using: ``` npm i @xenova/transformers ``` You can then use the model to compute embeddings like this: ```javascript import { pipeline, cos_sim } from '@xenova/transformers'; // Create a feature extraction pipeline const extractor = await pipeline('feature-extraction', 'mixedbread-ai/mxbai-embed-large-v1', { quantized: false, // Comment out this line to use the quantized version }); // Generate sentence embeddings const docs = [ 'Represent this sentence for searching relevant passages: A man is eating a piece of bread', 'A man is eating food.', 'A man is eating pasta.', 'The girl is carrying a baby.', 'A man is riding a horse.', ] const output = await extractor(docs, { pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => cos_sim(source_embeddings, x)); console.log(similarities); // [0.7919578577247139, 0.6369278664248345, 0.16512018371357193, 0.3620778366720027] ``` ### Using API You can use the model via our API as follows: ```python from mixedbread_ai.client import MixedbreadAI, EncodingFormat from sklearn.metrics.pairwise import cosine_similarity import os mxbai = MixedbreadAI(api_key="{MIXEDBREAD_API_KEY}") english_sentences = [ 'What is the capital of Australia?', 'Canberra is the capital of Australia.' ] res = mxbai.embeddings( input=english_sentences, model="mixedbread-ai/mxbai-embed-large-v1", normalized=True, encoding_format=[EncodingFormat.FLOAT, EncodingFormat.UBINARY, EncodingFormat.INT_8], dimensions=512 ) encoded_embeddings = res.data[0].embedding print(res.dimensions, encoded_embeddings.ubinary, encoded_embeddings.float_, encoded_embeddings.int_8) ``` The API comes with native int8 and binary quantization support! Check out the [docs](https://mixedbread.ai/docs) for more information. ### Infinity ```bash docker run --gpus all -v $PWD/data:/app/.cache -p "7997":"7997" \ michaelf34/infinity:0.0.68 \ v2 --model-id mixedbread-ai/mxbai-embed-large-v1 --revision "main" --dtype float16 --engine torch --port 7997 ``` ## Evaluation As of March 2024, our model archives SOTA performance for Bert-large sized models on the [MTEB](https://huggingface.co/spaces/mteb/leaderboard). It ourperforms commercial models like OpenAIs text-embedding-3-large and matches the performance of model 20x it's size like the [echo-mistral-7b](https://huggingface.co/jspringer/echo-mistral-7b-instruct-lasttoken). Our model was trained with no overlap of the MTEB data, which indicates that our model generalizes well across several domains, tasks and text length. We know there are some limitations with this model, which will be fixed in v2. | Model | Avg (56 datasets) | Classification (12 datasets) | Clustering (11 datasets) | PairClassification (3 datasets) | Reranking (4 datasets) | Retrieval (15 datasets) | STS (10 datasets) | Summarization (1 dataset) | | --------------------------------------------------------------------------------------------- | ----------------- | ---------------------------- | ------------------------ | ------------------------------- | ---------------------- | ----------------------- | ----------------- | ------------------------- | | **mxbai-embed-large-v1** | **64.68** | 75.64 | 46.71 | 87.2 | 60.11 | 54.39 | 85.00 | 32.71 | | [bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 64.23 | 75.97 | 46.08 | 87.12 | 60.03 | 54.29 | 83.11 | 31.61 | | [mxbai-embed-2d-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-2d-large-v1) | 63.25 | 74.14 | 46.07 | 85.89 | 58.94 | 51.42 | 84.9 | 31.55 | | [nomic-embed-text-v1](https://huggingface.co/nomic-ai/nomic-embed-text-v1) | 62.39 | 74.12 | 43.91 | 85.15 | 55.69 | 52.81 | 82.06 | 30.08 | | [jina-embeddings-v2-base-en](https://huggingface.co/jinaai/jina-embeddings-v2-base-en) | 60.38 | 73.45 | 41.73 | 85.38 | 56.98 | 47.87 | 80.7 | 31.6 | | *Proprietary Models* | | | | | | | | | | [OpenAI text-embedding-3-large](https://openai.com/blog/new-embedding-models-and-api-updates) | 64.58 | 75.45 | 49.01 | 85.72 | 59.16 | 55.44 | 81.73 | 29.92 | | [Cohere embed-english-v3.0](https://txt.cohere.com/introducing-embed-v3/) | 64.47 | 76.49 | 47.43 | 85.84 | 58.01 | 55.00 | 82.62 | 30.18 | | [OpenAI text-embedding-ada-002](https://openai.com/blog/new-and-improved-embedding-model) | 60.99 | 70.93 | 45.90 | 84.89 | 56.32 | 49.25 | 80.97 | 30.80 | Please find more information in our [blog post](https://mixedbread.ai/blog/mxbai-embed-large-v1). ## Matryoshka and Binary Quantization Embeddings in their commonly used form (float arrays) have a high memory footprint when used at scale. Two approaches to solve this problem are Matryoshka Representation Learning (MRL) and (Binary) Quantization. While MRL reduces the number of dimensions of an embedding, binary quantization transforms the value of each dimension from a float32 into a lower precision (int8 or even binary). <b> The model supports both approaches! </b> You can also take it one step further, and combine both MRL and quantization. This combination of binary quantization and MRL allows you to reduce the memory usage of your embeddings significantly. This leads to much lower costs when using a vector database in particular. You can read more about the technology and its advantages in our [blog post](https://www.mixedbread.ai/blog/binary-mrl). ## Community Please join our [Discord Community](https://discord.gg/jDfMHzAVfU) and share your feedback and thoughts! We are here to help and also always happy to chat. ## License Apache 2.0 ## Citation ```bibtex @online{emb2024mxbai, title={Open Source Strikes Bread - New Fluffy Embeddings Model}, author={Sean Lee and Aamir Shakir and Darius Koenig and Julius Lipp}, year={2024}, url={https://www.mixedbread.ai/blog/mxbai-embed-large-v1}, } @article{li2023angle, title={AnglE-optimized Text Embeddings}, author={Li, Xianming and Li, Jing}, journal={arXiv preprint arXiv:2309.12871}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
intfloat/multilingual-e5-small
intfloat
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "openvino", "bert", "mteb", "Sentence Transformers", "sentence-similarity", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "arxiv:2402.05672", "arxiv:2108.08787", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-06-30T07:31:03Z"
2025-02-17T03:22:45+00:00
2,371,021
183
--- language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - 'no' - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh license: mit tags: - mteb - Sentence Transformers - sentence-similarity - sentence-transformers model-index: - name: intfloat/multilingual-e5-small results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.79104477611939 - type: ap value: 36.9996434842022 - type: f1 value: 67.95453679103099 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.64882226980728 - type: ap value: 82.11942130026586 - type: f1 value: 69.87963421606715 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.8095952023988 - type: ap value: 24.46869495579561 - type: f1 value: 63.00108480037597 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 64.186295503212 - type: ap value: 15.496804690197042 - type: f1 value: 52.07153895475031 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 88.699325 - type: ap value: 85.27039559917269 - type: f1 value: 88.65556295032513 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.69799999999999 - type: f1 value: 43.73187348654165 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.245999999999995 - type: f1 value: 39.3863530637684 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.394 - type: f1 value: 39.301223469483446 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.864 - type: f1 value: 37.97974261868003 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.682 - type: f1 value: 37.07399369768313 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.504 - type: f1 value: 36.62317273874278 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 19.061 - type: map_at_10 value: 31.703 - type: map_at_100 value: 32.967 - type: map_at_1000 value: 33.001000000000005 - type: map_at_3 value: 27.466 - type: map_at_5 value: 29.564 - type: mrr_at_1 value: 19.559 - type: mrr_at_10 value: 31.874999999999996 - type: mrr_at_100 value: 33.146 - type: mrr_at_1000 value: 33.18 - type: mrr_at_3 value: 27.667 - type: mrr_at_5 value: 29.74 - type: ndcg_at_1 value: 19.061 - type: ndcg_at_10 value: 39.062999999999995 - type: ndcg_at_100 value: 45.184000000000005 - type: ndcg_at_1000 value: 46.115 - type: ndcg_at_3 value: 30.203000000000003 - type: ndcg_at_5 value: 33.953 - type: precision_at_1 value: 19.061 - type: precision_at_10 value: 6.279999999999999 - type: precision_at_100 value: 0.9129999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 12.706999999999999 - type: precision_at_5 value: 9.431000000000001 - type: recall_at_1 value: 19.061 - type: recall_at_10 value: 62.802 - type: recall_at_100 value: 91.323 - type: recall_at_1000 value: 98.72 - type: recall_at_3 value: 38.122 - type: recall_at_5 value: 47.155 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 39.22266660528253 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 30.79980849482483 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 57.8790068352054 - type: mrr value: 71.78791276436706 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 82.36328364043163 - type: cos_sim_spearman value: 82.26211536195868 - type: euclidean_pearson value: 80.3183865039173 - type: euclidean_spearman value: 79.88495276296132 - type: manhattan_pearson value: 80.14484480692127 - type: manhattan_spearman value: 80.39279565980743 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.0375782881002 - type: f1 value: 97.86012526096033 - type: precision value: 97.77139874739039 - type: recall value: 98.0375782881002 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 93.35241030156286 - type: f1 value: 92.66050333846944 - type: precision value: 92.3306919069631 - type: recall value: 93.35241030156286 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 94.0699688257707 - type: f1 value: 93.50236693222492 - type: precision value: 93.22791825424315 - type: recall value: 94.0699688257707 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 89.25750394944708 - type: f1 value: 88.79234684921889 - type: precision value: 88.57293312269616 - type: recall value: 89.25750394944708 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 79.41558441558442 - type: f1 value: 79.25886487487219 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.747820820329736 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 27.045143830596146 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 24.252999999999997 - type: map_at_10 value: 31.655916666666666 - type: map_at_100 value: 32.680749999999996 - type: map_at_1000 value: 32.79483333333334 - type: map_at_3 value: 29.43691666666666 - type: map_at_5 value: 30.717416666666665 - type: mrr_at_1 value: 28.602750000000004 - type: mrr_at_10 value: 35.56875 - type: mrr_at_100 value: 36.3595 - type: mrr_at_1000 value: 36.427749999999996 - type: mrr_at_3 value: 33.586166666666664 - type: mrr_at_5 value: 34.73641666666666 - type: ndcg_at_1 value: 28.602750000000004 - type: ndcg_at_10 value: 36.06933333333334 - type: ndcg_at_100 value: 40.70141666666667 - type: ndcg_at_1000 value: 43.24341666666667 - type: ndcg_at_3 value: 32.307916666666664 - type: ndcg_at_5 value: 34.129999999999995 - type: precision_at_1 value: 28.602750000000004 - type: precision_at_10 value: 6.097666666666667 - type: precision_at_100 value: 0.9809166666666668 - type: precision_at_1000 value: 0.13766666666666663 - type: precision_at_3 value: 14.628166666666667 - type: precision_at_5 value: 10.266916666666667 - type: recall_at_1 value: 24.252999999999997 - type: recall_at_10 value: 45.31916666666667 - type: recall_at_100 value: 66.03575000000001 - type: recall_at_1000 value: 83.94708333333334 - type: recall_at_3 value: 34.71941666666666 - type: recall_at_5 value: 39.46358333333333 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 9.024000000000001 - type: map_at_10 value: 15.644 - type: map_at_100 value: 17.154 - type: map_at_1000 value: 17.345 - type: map_at_3 value: 13.028 - type: map_at_5 value: 14.251 - type: mrr_at_1 value: 19.674 - type: mrr_at_10 value: 29.826999999999998 - type: mrr_at_100 value: 30.935000000000002 - type: mrr_at_1000 value: 30.987 - type: mrr_at_3 value: 26.645000000000003 - type: mrr_at_5 value: 28.29 - type: ndcg_at_1 value: 19.674 - type: ndcg_at_10 value: 22.545 - type: ndcg_at_100 value: 29.207 - type: ndcg_at_1000 value: 32.912 - type: ndcg_at_3 value: 17.952 - type: ndcg_at_5 value: 19.363 - type: precision_at_1 value: 19.674 - type: precision_at_10 value: 7.212000000000001 - type: precision_at_100 value: 1.435 - type: precision_at_1000 value: 0.212 - type: precision_at_3 value: 13.507 - type: precision_at_5 value: 10.397 - type: recall_at_1 value: 9.024000000000001 - type: recall_at_10 value: 28.077999999999996 - type: recall_at_100 value: 51.403 - type: recall_at_1000 value: 72.406 - type: recall_at_3 value: 16.768 - type: recall_at_5 value: 20.737 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.012 - type: map_at_10 value: 17.138 - type: map_at_100 value: 24.146 - type: map_at_1000 value: 25.622 - type: map_at_3 value: 12.552 - type: map_at_5 value: 14.435 - type: mrr_at_1 value: 62.25000000000001 - type: mrr_at_10 value: 71.186 - type: mrr_at_100 value: 71.504 - type: mrr_at_1000 value: 71.514 - type: mrr_at_3 value: 69.333 - type: mrr_at_5 value: 70.408 - type: ndcg_at_1 value: 49.75 - type: ndcg_at_10 value: 37.76 - type: ndcg_at_100 value: 42.071 - type: ndcg_at_1000 value: 49.309 - type: ndcg_at_3 value: 41.644 - type: ndcg_at_5 value: 39.812999999999995 - type: precision_at_1 value: 62.25000000000001 - type: precision_at_10 value: 30.15 - type: precision_at_100 value: 9.753 - type: precision_at_1000 value: 1.9189999999999998 - type: precision_at_3 value: 45.667 - type: precision_at_5 value: 39.15 - type: recall_at_1 value: 8.012 - type: recall_at_10 value: 22.599 - type: recall_at_100 value: 48.068 - type: recall_at_1000 value: 71.328 - type: recall_at_3 value: 14.043 - type: recall_at_5 value: 17.124 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 42.455 - type: f1 value: 37.59462649781862 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 58.092 - type: map_at_10 value: 69.586 - type: map_at_100 value: 69.968 - type: map_at_1000 value: 69.982 - type: map_at_3 value: 67.48100000000001 - type: map_at_5 value: 68.915 - type: mrr_at_1 value: 62.166 - type: mrr_at_10 value: 73.588 - type: mrr_at_100 value: 73.86399999999999 - type: mrr_at_1000 value: 73.868 - type: mrr_at_3 value: 71.6 - type: mrr_at_5 value: 72.99 - type: ndcg_at_1 value: 62.166 - type: ndcg_at_10 value: 75.27199999999999 - type: ndcg_at_100 value: 76.816 - type: ndcg_at_1000 value: 77.09700000000001 - type: ndcg_at_3 value: 71.36 - type: ndcg_at_5 value: 73.785 - type: precision_at_1 value: 62.166 - type: precision_at_10 value: 9.716 - type: precision_at_100 value: 1.065 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 28.278 - type: precision_at_5 value: 18.343999999999998 - type: recall_at_1 value: 58.092 - type: recall_at_10 value: 88.73400000000001 - type: recall_at_100 value: 95.195 - type: recall_at_1000 value: 97.04599999999999 - type: recall_at_3 value: 78.45 - type: recall_at_5 value: 84.316 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 16.649 - type: map_at_10 value: 26.457000000000004 - type: map_at_100 value: 28.169 - type: map_at_1000 value: 28.352 - type: map_at_3 value: 23.305 - type: map_at_5 value: 25.169000000000004 - type: mrr_at_1 value: 32.407000000000004 - type: mrr_at_10 value: 40.922 - type: mrr_at_100 value: 41.931000000000004 - type: mrr_at_1000 value: 41.983 - type: mrr_at_3 value: 38.786 - type: mrr_at_5 value: 40.205999999999996 - type: ndcg_at_1 value: 32.407000000000004 - type: ndcg_at_10 value: 33.314 - type: ndcg_at_100 value: 40.312 - type: ndcg_at_1000 value: 43.685 - type: ndcg_at_3 value: 30.391000000000002 - type: ndcg_at_5 value: 31.525 - type: precision_at_1 value: 32.407000000000004 - type: precision_at_10 value: 8.966000000000001 - type: precision_at_100 value: 1.6019999999999999 - type: precision_at_1000 value: 0.22200000000000003 - type: precision_at_3 value: 20.165 - type: precision_at_5 value: 14.722 - type: recall_at_1 value: 16.649 - type: recall_at_10 value: 39.117000000000004 - type: recall_at_100 value: 65.726 - type: recall_at_1000 value: 85.784 - type: recall_at_3 value: 27.914 - type: recall_at_5 value: 33.289 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 36.253 - type: map_at_10 value: 56.16799999999999 - type: map_at_100 value: 57.06099999999999 - type: map_at_1000 value: 57.126 - type: map_at_3 value: 52.644999999999996 - type: map_at_5 value: 54.909 - type: mrr_at_1 value: 72.505 - type: mrr_at_10 value: 79.66 - type: mrr_at_100 value: 79.869 - type: mrr_at_1000 value: 79.88 - type: mrr_at_3 value: 78.411 - type: mrr_at_5 value: 79.19800000000001 - type: ndcg_at_1 value: 72.505 - type: ndcg_at_10 value: 65.094 - type: ndcg_at_100 value: 68.219 - type: ndcg_at_1000 value: 69.515 - type: ndcg_at_3 value: 59.99 - type: ndcg_at_5 value: 62.909000000000006 - type: precision_at_1 value: 72.505 - type: precision_at_10 value: 13.749 - type: precision_at_100 value: 1.619 - type: precision_at_1000 value: 0.179 - type: precision_at_3 value: 38.357 - type: precision_at_5 value: 25.313000000000002 - type: recall_at_1 value: 36.253 - type: recall_at_10 value: 68.744 - type: recall_at_100 value: 80.925 - type: recall_at_1000 value: 89.534 - type: recall_at_3 value: 57.535000000000004 - type: recall_at_5 value: 63.282000000000004 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 80.82239999999999 - type: ap value: 75.65895781725314 - type: f1 value: 80.75880969095746 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.624 - type: map_at_10 value: 34.075 - type: map_at_100 value: 35.229 - type: map_at_1000 value: 35.276999999999994 - type: map_at_3 value: 30.245 - type: map_at_5 value: 32.42 - type: mrr_at_1 value: 22.264 - type: mrr_at_10 value: 34.638000000000005 - type: mrr_at_100 value: 35.744 - type: mrr_at_1000 value: 35.787 - type: mrr_at_3 value: 30.891000000000002 - type: mrr_at_5 value: 33.042 - type: ndcg_at_1 value: 22.264 - type: ndcg_at_10 value: 40.991 - type: ndcg_at_100 value: 46.563 - type: ndcg_at_1000 value: 47.743 - type: ndcg_at_3 value: 33.198 - type: ndcg_at_5 value: 37.069 - type: precision_at_1 value: 22.264 - type: precision_at_10 value: 6.5089999999999995 - type: precision_at_100 value: 0.9299999999999999 - type: precision_at_1000 value: 0.10300000000000001 - type: precision_at_3 value: 14.216999999999999 - type: precision_at_5 value: 10.487 - type: recall_at_1 value: 21.624 - type: recall_at_10 value: 62.303 - type: recall_at_100 value: 88.124 - type: recall_at_1000 value: 97.08 - type: recall_at_3 value: 41.099999999999994 - type: recall_at_5 value: 50.381 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 91.06703146374831 - type: f1 value: 90.86867815863172 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.46970977740209 - type: f1 value: 86.36832872036588 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.26951300867245 - type: f1 value: 88.93561193959502 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 84.22799874725963 - type: f1 value: 84.30490069236556 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 86.02007888131948 - type: f1 value: 85.39376041027991 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 85.34900542495481 - type: f1 value: 85.39859673336713 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 71.078431372549 - type: f1 value: 53.45071102002276 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 65.85798816568047 - type: f1 value: 46.53112748993529 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 67.96864576384256 - type: f1 value: 45.966703022829506 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 61.31537738803633 - type: f1 value: 45.52601712835461 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 66.29616349946218 - type: f1 value: 47.24166485726613 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 67.51537070524412 - type: f1 value: 49.463476319014276 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.06792199058508 - type: f1 value: 54.094921857502285 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.960322797579025 - type: f1 value: 48.547371223370945 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.425016812373904 - type: f1 value: 50.47069202054312 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.798251513113655 - type: f1 value: 57.05013069086648 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.37794216543376 - type: f1 value: 56.3607992649805 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.56018829858777 - type: f1 value: 43.87319715715134 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.9724277067922 - type: f1 value: 59.36480066245562 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.72696704774715 - type: f1 value: 59.143595966615855 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.5971755211836 - type: f1 value: 59.169445724946726 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.29589778076665 - type: f1 value: 67.7577001808977 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.31136516476126 - type: f1 value: 64.52032955983242 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.54472091459314 - type: f1 value: 61.47903120066317 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.45595158036314 - type: f1 value: 58.0891846024637 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.47074646940149 - type: f1 value: 62.84830858877575 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.046402151983855 - type: f1 value: 55.269074430533195 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.06523201075991 - type: f1 value: 61.35339643021369 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.954942837928726 - type: f1 value: 57.07035922704846 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.404169468728995 - type: f1 value: 53.94259011839138 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.16610625420309 - type: f1 value: 61.337103431499365 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.262945527908535 - type: f1 value: 49.7610691598921 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.54472091459314 - type: f1 value: 63.469099018440154 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.22797579018157 - type: f1 value: 64.89098471083001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 50.847343644922674 - type: f1 value: 47.8536963168393 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 48.45326160053799 - type: f1 value: 46.370078045805556 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 42.83120376597175 - type: f1 value: 39.68948521599982 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.5084061869536 - type: f1 value: 53.961876160401545 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.7895090786819 - type: f1 value: 61.134223684676 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.98991257565569 - type: f1 value: 52.579862862826296 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.90316072629456 - type: f1 value: 58.203024538290336 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.09818426361802 - type: f1 value: 54.22718458445455 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.991257565568255 - type: f1 value: 55.84892781767421 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.901143241425686 - type: f1 value: 52.25264332199797 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.96368527236047 - type: f1 value: 58.927243876153454 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.64223268325489 - type: f1 value: 62.340453718379706 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.52589105581708 - type: f1 value: 61.661113187022174 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.84599865501009 - type: f1 value: 64.59342572873005 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.81035642232684 - type: f1 value: 57.5169089806797 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.652238071815056 - type: f1 value: 53.22732406426353 - type: f1_weighted value: 57.585586737209546 - type: main_score value: 58.652238071815056 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.51647612642906 - type: f1 value: 54.33154780100043 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.985877605917956 - type: f1 value: 54.46187524463802 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.03026227303296 - type: f1 value: 62.34377392877748 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.567585743106925 - type: f1 value: 50.73770655983206 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.2595830531271 - type: f1 value: 53.657327291708626 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.82784129119032 - type: f1 value: 54.82518072665301 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.06859448554137 - type: f1 value: 63.00185280500495 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.91055817081371 - type: f1 value: 55.54116301224262 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.54404841963686 - type: f1 value: 59.57650946030184 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.27706792199059 - type: f1 value: 56.50010066083435 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.0719569603228 - type: f1 value: 61.817075925647956 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.23806321452591 - type: f1 value: 65.24917026029749 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.53530598520511 - type: f1 value: 61.71131132295768 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.04303967720243 - type: f1 value: 60.3950085685985 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.83591123066578 - type: f1 value: 54.95059828830849 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.62340282447881 - type: f1 value: 59.525159996498225 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.85406859448555 - type: f1 value: 59.129299095681276 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.76731674512441 - type: f1 value: 61.159560612627715 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.181573638197705 - type: f1 value: 46.98422176289957 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.92737054472092 - type: f1 value: 67.69135611952979 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.18964357767318 - type: f1 value: 68.46106138186214 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.0712844653665 - type: f1 value: 66.75545422473901 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.4754539340955 - type: f1 value: 74.38427146553252 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.82515131136518 - type: f1 value: 69.63516462173847 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.70880968392737 - type: f1 value: 67.45420662567926 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.95494283792871 - type: f1 value: 65.06191009049222 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.75924680564896 - type: f1 value: 68.30833379585945 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.806321452589096 - type: f1 value: 63.273048243765054 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.68997982515133 - type: f1 value: 66.54703855381324 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.46940147948891 - type: f1 value: 65.91017343463396 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.49899125756556 - type: f1 value: 57.90333469917769 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.9219905850706 - type: f1 value: 67.23169403762938 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.486213853396094 - type: f1 value: 54.85282355583758 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.04169468728985 - type: f1 value: 68.83833333320462 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.88702084734365 - type: f1 value: 74.04474735232299 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.63416274377943 - type: f1 value: 55.11332211687954 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.23604572965702 - type: f1 value: 50.86529813991055 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 46.62407531943511 - type: f1 value: 43.63485467164535 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.15601882985878 - type: f1 value: 57.522837510959924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.84532616005382 - type: f1 value: 69.60021127179697 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.65770006724949 - type: f1 value: 55.84219135523227 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.53665097511768 - type: f1 value: 65.09087787792639 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.31405514458642 - type: f1 value: 58.06135303831491 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.88231338264964 - type: f1 value: 62.751099407787926 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.86012104909213 - type: f1 value: 56.29118323058282 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.37390719569602 - type: f1 value: 66.27922244885102 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.8675184936113 - type: f1 value: 70.22146529932019 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.2212508406187 - type: f1 value: 67.77454802056282 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.18090114324143 - type: f1 value: 68.03737625431621 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.65030262273034 - type: f1 value: 63.792945486912856 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.772749631087066 - type: f1 value: 63.4539101720024 - type: f1_weighted value: 62.778603897469566 - type: main_score value: 63.772749631087066 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.17821116341627 - type: f1 value: 59.3935969827171 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.86146603900471 - type: f1 value: 60.133692735032376 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.89441829186282 - type: f1 value: 70.03064076194089 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.15063887020847 - type: f1 value: 56.23326278499678 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.43846671149966 - type: f1 value: 57.70440450281974 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.8507061197041 - type: f1 value: 59.22916396061171 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.65568258238063 - type: f1 value: 69.90736239440633 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.8843308675185 - type: f1 value: 59.30332663713599 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.05312710154674 - type: f1 value: 67.44024062594775 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.111634162743776 - type: f1 value: 60.89083013084519 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.44115669132482 - type: f1 value: 67.92227541674552 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.4687289845326 - type: f1 value: 74.16376793486025 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.31876260928043 - type: f1 value: 68.5246745215607 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 30.90431696479766 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 27.259158476693774 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.28445330838555 - type: mrr value: 31.15758529581164 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.353 - type: map_at_10 value: 11.565 - type: map_at_100 value: 14.097000000000001 - type: map_at_1000 value: 15.354999999999999 - type: map_at_3 value: 8.749 - type: map_at_5 value: 9.974 - type: mrr_at_1 value: 42.105 - type: mrr_at_10 value: 50.589 - type: mrr_at_100 value: 51.187000000000005 - type: mrr_at_1000 value: 51.233 - type: mrr_at_3 value: 48.246 - type: mrr_at_5 value: 49.546 - type: ndcg_at_1 value: 40.402 - type: ndcg_at_10 value: 31.009999999999998 - type: ndcg_at_100 value: 28.026 - type: ndcg_at_1000 value: 36.905 - type: ndcg_at_3 value: 35.983 - type: ndcg_at_5 value: 33.764 - type: precision_at_1 value: 42.105 - type: precision_at_10 value: 22.786 - type: precision_at_100 value: 6.916 - type: precision_at_1000 value: 1.981 - type: precision_at_3 value: 33.333 - type: precision_at_5 value: 28.731 - type: recall_at_1 value: 5.353 - type: recall_at_10 value: 15.039 - type: recall_at_100 value: 27.348 - type: recall_at_1000 value: 59.453 - type: recall_at_3 value: 9.792 - type: recall_at_5 value: 11.882 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 33.852 - type: map_at_10 value: 48.924 - type: map_at_100 value: 49.854 - type: map_at_1000 value: 49.886 - type: map_at_3 value: 44.9 - type: map_at_5 value: 47.387 - type: mrr_at_1 value: 38.035999999999994 - type: mrr_at_10 value: 51.644 - type: mrr_at_100 value: 52.339 - type: mrr_at_1000 value: 52.35999999999999 - type: mrr_at_3 value: 48.421 - type: mrr_at_5 value: 50.468999999999994 - type: ndcg_at_1 value: 38.007000000000005 - type: ndcg_at_10 value: 56.293000000000006 - type: ndcg_at_100 value: 60.167 - type: ndcg_at_1000 value: 60.916000000000004 - type: ndcg_at_3 value: 48.903999999999996 - type: ndcg_at_5 value: 52.978 - type: precision_at_1 value: 38.007000000000005 - type: precision_at_10 value: 9.041 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 22.084 - type: precision_at_5 value: 15.608 - type: recall_at_1 value: 33.852 - type: recall_at_10 value: 75.893 - type: recall_at_100 value: 92.589 - type: recall_at_1000 value: 98.153 - type: recall_at_3 value: 56.969 - type: recall_at_5 value: 66.283 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 69.174 - type: map_at_10 value: 82.891 - type: map_at_100 value: 83.545 - type: map_at_1000 value: 83.56700000000001 - type: map_at_3 value: 79.944 - type: map_at_5 value: 81.812 - type: mrr_at_1 value: 79.67999999999999 - type: mrr_at_10 value: 86.279 - type: mrr_at_100 value: 86.39 - type: mrr_at_1000 value: 86.392 - type: mrr_at_3 value: 85.21 - type: mrr_at_5 value: 85.92999999999999 - type: ndcg_at_1 value: 79.69000000000001 - type: ndcg_at_10 value: 86.929 - type: ndcg_at_100 value: 88.266 - type: ndcg_at_1000 value: 88.428 - type: ndcg_at_3 value: 83.899 - type: ndcg_at_5 value: 85.56700000000001 - type: precision_at_1 value: 79.69000000000001 - type: precision_at_10 value: 13.161000000000001 - type: precision_at_100 value: 1.513 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.603 - type: precision_at_5 value: 24.138 - type: recall_at_1 value: 69.174 - type: recall_at_10 value: 94.529 - type: recall_at_100 value: 99.15 - type: recall_at_1000 value: 99.925 - type: recall_at_3 value: 85.86200000000001 - type: recall_at_5 value: 90.501 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 39.13064340585255 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 58.97884249325877 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 3.4680000000000004 - type: map_at_10 value: 7.865 - type: map_at_100 value: 9.332 - type: map_at_1000 value: 9.587 - type: map_at_3 value: 5.800000000000001 - type: map_at_5 value: 6.8790000000000004 - type: mrr_at_1 value: 17.0 - type: mrr_at_10 value: 25.629 - type: mrr_at_100 value: 26.806 - type: mrr_at_1000 value: 26.889000000000003 - type: mrr_at_3 value: 22.8 - type: mrr_at_5 value: 24.26 - type: ndcg_at_1 value: 17.0 - type: ndcg_at_10 value: 13.895 - type: ndcg_at_100 value: 20.491999999999997 - type: ndcg_at_1000 value: 25.759999999999998 - type: ndcg_at_3 value: 13.347999999999999 - type: ndcg_at_5 value: 11.61 - type: precision_at_1 value: 17.0 - type: precision_at_10 value: 7.090000000000001 - type: precision_at_100 value: 1.669 - type: precision_at_1000 value: 0.294 - type: precision_at_3 value: 12.3 - type: precision_at_5 value: 10.02 - type: recall_at_1 value: 3.4680000000000004 - type: recall_at_10 value: 14.363000000000001 - type: recall_at_100 value: 33.875 - type: recall_at_1000 value: 59.711999999999996 - type: recall_at_3 value: 7.483 - type: recall_at_5 value: 10.173 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.04084311714061 - type: cos_sim_spearman value: 77.51342467443078 - type: euclidean_pearson value: 80.0321166028479 - type: euclidean_spearman value: 77.29249114733226 - type: manhattan_pearson value: 80.03105964262431 - type: manhattan_spearman value: 77.22373689514794 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.1680158034387 - type: cos_sim_spearman value: 76.55983344071117 - type: euclidean_pearson value: 79.75266678300143 - type: euclidean_spearman value: 75.34516823467025 - type: manhattan_pearson value: 79.75959151517357 - type: manhattan_spearman value: 75.42330344141912 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 76.48898993209346 - type: cos_sim_spearman value: 76.96954120323366 - type: euclidean_pearson value: 76.94139109279668 - type: euclidean_spearman value: 76.85860283201711 - type: manhattan_pearson value: 76.6944095091912 - type: manhattan_spearman value: 76.61096912972553 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 77.85082366246944 - type: cos_sim_spearman value: 75.52053350101731 - type: euclidean_pearson value: 77.1165845070926 - type: euclidean_spearman value: 75.31216065884388 - type: manhattan_pearson value: 77.06193941833494 - type: manhattan_spearman value: 75.31003701700112 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.36305246526497 - type: cos_sim_spearman value: 87.11704613927415 - type: euclidean_pearson value: 86.04199125810939 - type: euclidean_spearman value: 86.51117572414263 - type: manhattan_pearson value: 86.0805106816633 - type: manhattan_spearman value: 86.52798366512229 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.18536255599724 - type: cos_sim_spearman value: 83.63377151025418 - type: euclidean_pearson value: 83.24657467993141 - type: euclidean_spearman value: 84.02751481993825 - type: manhattan_pearson value: 83.11941806582371 - type: manhattan_spearman value: 83.84251281019304 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 78.95816528475514 - type: cos_sim_spearman value: 78.86607380120462 - type: euclidean_pearson value: 78.51268699230545 - type: euclidean_spearman value: 79.11649316502229 - type: manhattan_pearson value: 78.32367302808157 - type: manhattan_spearman value: 78.90277699624637 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 72.89126914997624 - type: cos_sim_spearman value: 73.0296921832678 - type: euclidean_pearson value: 71.50385903677738 - type: euclidean_spearman value: 73.13368899716289 - type: manhattan_pearson value: 71.47421463379519 - type: manhattan_spearman value: 73.03383242946575 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 59.22923684492637 - type: cos_sim_spearman value: 57.41013211368396 - type: euclidean_pearson value: 61.21107388080905 - type: euclidean_spearman value: 60.07620768697254 - type: manhattan_pearson value: 59.60157142786555 - type: manhattan_spearman value: 59.14069604103739 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 76.24345978774299 - type: cos_sim_spearman value: 77.24225743830719 - type: euclidean_pearson value: 76.66226095469165 - type: euclidean_spearman value: 77.60708820493146 - type: manhattan_pearson value: 76.05303324760429 - type: manhattan_spearman value: 76.96353149912348 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.50879160160852 - type: cos_sim_spearman value: 86.43594662965224 - type: euclidean_pearson value: 86.06846012826577 - type: euclidean_spearman value: 86.02041395794136 - type: manhattan_pearson value: 86.10916255616904 - type: manhattan_spearman value: 86.07346068198953 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 58.39803698977196 - type: cos_sim_spearman value: 55.96910950423142 - type: euclidean_pearson value: 58.17941175613059 - type: euclidean_spearman value: 55.03019330522745 - type: manhattan_pearson value: 57.333358138183286 - type: manhattan_spearman value: 54.04614023149965 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 70.98304089637197 - type: cos_sim_spearman value: 72.44071656215888 - type: euclidean_pearson value: 72.19224359033983 - type: euclidean_spearman value: 73.89871188913025 - type: manhattan_pearson value: 71.21098311547406 - type: manhattan_spearman value: 72.93405764824821 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.99792397466308 - type: cos_sim_spearman value: 84.83824377879495 - type: euclidean_pearson value: 85.70043288694438 - type: euclidean_spearman value: 84.70627558703686 - type: manhattan_pearson value: 85.89570850150801 - type: manhattan_spearman value: 84.95806105313007 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 72.21850322994712 - type: cos_sim_spearman value: 72.28669398117248 - type: euclidean_pearson value: 73.40082510412948 - type: euclidean_spearman value: 73.0326539281865 - type: manhattan_pearson value: 71.8659633964841 - type: manhattan_spearman value: 71.57817425823303 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 75.80921368595645 - type: cos_sim_spearman value: 77.33209091229315 - type: euclidean_pearson value: 76.53159540154829 - type: euclidean_spearman value: 78.17960842810093 - type: manhattan_pearson value: 76.13530186637601 - type: manhattan_spearman value: 78.00701437666875 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 74.74980608267349 - type: cos_sim_spearman value: 75.37597374318821 - type: euclidean_pearson value: 74.90506081911661 - type: euclidean_spearman value: 75.30151613124521 - type: manhattan_pearson value: 74.62642745918002 - type: manhattan_spearman value: 75.18619716592303 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.632662289205584 - type: cos_sim_spearman value: 60.938543391610914 - type: euclidean_pearson value: 62.113200529767056 - type: euclidean_spearman value: 61.410312633261164 - type: manhattan_pearson value: 61.75494698945686 - type: manhattan_spearman value: 60.92726195322362 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 45.283470551557244 - type: cos_sim_spearman value: 53.44833015864201 - type: euclidean_pearson value: 41.17892011120893 - type: euclidean_spearman value: 53.81441383126767 - type: manhattan_pearson value: 41.17482200420659 - type: manhattan_spearman value: 53.82180269276363 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 60.5069165306236 - type: cos_sim_spearman value: 66.87803259033826 - type: euclidean_pearson value: 63.5428979418236 - type: euclidean_spearman value: 66.9293576586897 - type: manhattan_pearson value: 63.59789526178922 - type: manhattan_spearman value: 66.86555009875066 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 28.23026196280264 - type: cos_sim_spearman value: 35.79397812652861 - type: euclidean_pearson value: 17.828102102767353 - type: euclidean_spearman value: 35.721501145568894 - type: manhattan_pearson value: 17.77134274219677 - type: manhattan_spearman value: 35.98107902846267 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 56.51946541393812 - type: cos_sim_spearman value: 63.714686006214485 - type: euclidean_pearson value: 58.32104651305898 - type: euclidean_spearman value: 62.237110895702216 - type: manhattan_pearson value: 58.579416468759185 - type: manhattan_spearman value: 62.459738981727 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 48.76009839569795 - type: cos_sim_spearman value: 56.65188431953149 - type: euclidean_pearson value: 50.997682160915595 - type: euclidean_spearman value: 55.99910008818135 - type: manhattan_pearson value: 50.76220659606342 - type: manhattan_spearman value: 55.517347595391456 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cosine_pearson value: 50.724322379215934 - type: cosine_spearman value: 59.90449732164651 - type: euclidean_pearson value: 50.227545226784024 - type: euclidean_spearman value: 59.898906527601085 - type: main_score value: 59.90449732164651 - type: manhattan_pearson value: 50.21762139819405 - type: manhattan_spearman value: 59.761039813759 - type: pearson value: 50.724322379215934 - type: spearman value: 59.90449732164651 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.717524559088005 - type: cos_sim_spearman value: 66.83570886252286 - type: euclidean_pearson value: 58.41338625505467 - type: euclidean_spearman value: 66.68991427704938 - type: manhattan_pearson value: 58.78638572916807 - type: manhattan_spearman value: 66.58684161046335 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 73.2962042954962 - type: cos_sim_spearman value: 76.58255504852025 - type: euclidean_pearson value: 75.70983192778257 - type: euclidean_spearman value: 77.4547684870542 - type: manhattan_pearson value: 75.75565853870485 - type: manhattan_spearman value: 76.90208974949428 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.47396266924846 - type: cos_sim_spearman value: 56.492267162048606 - type: euclidean_pearson value: 55.998505203070195 - type: euclidean_spearman value: 56.46447012960222 - type: manhattan_pearson value: 54.873172394430995 - type: manhattan_spearman value: 56.58111534551218 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 69.87177267688686 - type: cos_sim_spearman value: 74.57160943395763 - type: euclidean_pearson value: 70.88330406826788 - type: euclidean_spearman value: 74.29767636038422 - type: manhattan_pearson value: 71.38245248369536 - type: manhattan_spearman value: 74.53102232732175 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 72.80225656959544 - type: cos_sim_spearman value: 76.52646173725735 - type: euclidean_pearson value: 73.95710720200799 - type: euclidean_spearman value: 76.54040031984111 - type: manhattan_pearson value: 73.89679971946774 - type: manhattan_spearman value: 76.60886958161574 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 70.70844249898789 - type: cos_sim_spearman value: 72.68571783670241 - type: euclidean_pearson value: 72.38800772441031 - type: euclidean_spearman value: 72.86804422703312 - type: manhattan_pearson value: 71.29840508203515 - type: manhattan_spearman value: 71.86264441749513 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 58.647478923935694 - type: cos_sim_spearman value: 63.74453623540931 - type: euclidean_pearson value: 59.60138032437505 - type: euclidean_spearman value: 63.947930832166065 - type: manhattan_pearson value: 58.59735509491861 - type: manhattan_spearman value: 62.082503844627404 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 65.8722516867162 - type: cos_sim_spearman value: 71.81208592523012 - type: euclidean_pearson value: 67.95315252165956 - type: euclidean_spearman value: 73.00749822046009 - type: manhattan_pearson value: 68.07884688638924 - type: manhattan_spearman value: 72.34210325803069 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.5405814240949 - type: cos_sim_spearman value: 60.56838649023775 - type: euclidean_pearson value: 53.011731611314104 - type: euclidean_spearman value: 58.533194841668426 - type: manhattan_pearson value: 53.623067729338494 - type: manhattan_spearman value: 58.018756154446926 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 13.611046866216112 - type: cos_sim_spearman value: 28.238192909158492 - type: euclidean_pearson value: 22.16189199885129 - type: euclidean_spearman value: 35.012895679076564 - type: manhattan_pearson value: 21.969771178698387 - type: manhattan_spearman value: 32.456985088607475 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 74.58077407011655 - type: cos_sim_spearman value: 84.51542547285167 - type: euclidean_pearson value: 74.64613843596234 - type: euclidean_spearman value: 84.51542547285167 - type: manhattan_pearson value: 75.15335973101396 - type: manhattan_spearman value: 84.51542547285167 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 82.0739825531578 - type: cos_sim_spearman value: 84.01057479311115 - type: euclidean_pearson value: 83.85453227433344 - type: euclidean_spearman value: 84.01630226898655 - type: manhattan_pearson value: 83.75323603028978 - type: manhattan_spearman value: 83.89677983727685 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 78.12945623123957 - type: mrr value: 93.87738713719106 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 52.983000000000004 - type: map_at_10 value: 62.946000000000005 - type: map_at_100 value: 63.514 - type: map_at_1000 value: 63.554 - type: map_at_3 value: 60.183 - type: map_at_5 value: 61.672000000000004 - type: mrr_at_1 value: 55.667 - type: mrr_at_10 value: 64.522 - type: mrr_at_100 value: 64.957 - type: mrr_at_1000 value: 64.995 - type: mrr_at_3 value: 62.388999999999996 - type: mrr_at_5 value: 63.639 - type: ndcg_at_1 value: 55.667 - type: ndcg_at_10 value: 67.704 - type: ndcg_at_100 value: 70.299 - type: ndcg_at_1000 value: 71.241 - type: ndcg_at_3 value: 62.866 - type: ndcg_at_5 value: 65.16999999999999 - type: precision_at_1 value: 55.667 - type: precision_at_10 value: 9.033 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 24.444 - type: precision_at_5 value: 16.133 - type: recall_at_1 value: 52.983000000000004 - type: recall_at_10 value: 80.656 - type: recall_at_100 value: 92.5 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 67.744 - type: recall_at_5 value: 73.433 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.72772277227723 - type: cos_sim_ap value: 92.17845897992215 - type: cos_sim_f1 value: 85.9746835443038 - type: cos_sim_precision value: 87.07692307692308 - type: cos_sim_recall value: 84.89999999999999 - type: dot_accuracy value: 99.3039603960396 - type: dot_ap value: 60.70244020124878 - type: dot_f1 value: 59.92742353551063 - type: dot_precision value: 62.21743810548978 - type: dot_recall value: 57.8 - type: euclidean_accuracy value: 99.71683168316832 - type: euclidean_ap value: 91.53997039964659 - type: euclidean_f1 value: 84.88372093023257 - type: euclidean_precision value: 90.02242152466367 - type: euclidean_recall value: 80.30000000000001 - type: manhattan_accuracy value: 99.72376237623763 - type: manhattan_ap value: 91.80756777790289 - type: manhattan_f1 value: 85.48468106479157 - type: manhattan_precision value: 85.8728557013118 - type: manhattan_recall value: 85.1 - type: max_accuracy value: 99.72772277227723 - type: max_ap value: 92.17845897992215 - type: max_f1 value: 85.9746835443038 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 53.52464042600003 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.071631948736 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.19552407604654 - type: mrr value: 49.95269130379425 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 29.345293033095427 - type: cos_sim_spearman value: 29.976931423258403 - type: dot_pearson value: 27.047078008958408 - type: dot_spearman value: 27.75894368380218 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22 - type: map_at_10 value: 1.706 - type: map_at_100 value: 9.634 - type: map_at_1000 value: 23.665 - type: map_at_3 value: 0.5950000000000001 - type: map_at_5 value: 0.95 - type: mrr_at_1 value: 86.0 - type: mrr_at_10 value: 91.8 - type: mrr_at_100 value: 91.8 - type: mrr_at_1000 value: 91.8 - type: mrr_at_3 value: 91.0 - type: mrr_at_5 value: 91.8 - type: ndcg_at_1 value: 80.0 - type: ndcg_at_10 value: 72.573 - type: ndcg_at_100 value: 53.954 - type: ndcg_at_1000 value: 47.760999999999996 - type: ndcg_at_3 value: 76.173 - type: ndcg_at_5 value: 75.264 - type: precision_at_1 value: 86.0 - type: precision_at_10 value: 76.4 - type: precision_at_100 value: 55.50000000000001 - type: precision_at_1000 value: 21.802 - type: precision_at_3 value: 81.333 - type: precision_at_5 value: 80.4 - type: recall_at_1 value: 0.22 - type: recall_at_10 value: 1.925 - type: recall_at_100 value: 12.762 - type: recall_at_1000 value: 44.946000000000005 - type: recall_at_3 value: 0.634 - type: recall_at_5 value: 1.051 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.0 - type: f1 value: 88.55666666666666 - type: precision value: 87.46166666666667 - type: recall value: 91.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 57.22543352601156 - type: f1 value: 51.03220478943021 - type: precision value: 48.8150289017341 - type: recall value: 57.22543352601156 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 46.58536585365854 - type: f1 value: 39.66870798578116 - type: precision value: 37.416085946573745 - type: recall value: 46.58536585365854 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.7 - type: f1 value: 86.77999999999999 - type: precision value: 85.45333333333332 - type: recall value: 89.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.58333333333331 - type: precision value: 96.2 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.4 - type: f1 value: 90.3 - type: precision value: 89.31666666666668 - type: recall value: 92.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.9 - type: f1 value: 83.67190476190476 - type: precision value: 82.23333333333332 - type: recall value: 86.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 50.0 - type: f1 value: 42.23229092632078 - type: precision value: 39.851634683724235 - type: recall value: 50.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.3 - type: f1 value: 70.86190476190477 - type: precision value: 68.68777777777777 - type: recall value: 76.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 57.073170731707314 - type: f1 value: 50.658958927251604 - type: precision value: 48.26480836236933 - type: recall value: 57.073170731707314 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.2 - type: f1 value: 62.156507936507936 - type: precision value: 59.84964285714286 - type: recall value: 68.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.52126366950182 - type: f1 value: 72.8496210148701 - type: precision value: 70.92171498003819 - type: recall value: 77.52126366950182 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.78260869565217 - type: f1 value: 65.32422360248447 - type: precision value: 63.063067367415194 - type: recall value: 70.78260869565217 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.43478260869566 - type: f1 value: 73.02608695652172 - type: precision value: 70.63768115942028 - type: recall value: 78.43478260869566 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 60.9 - type: f1 value: 55.309753694581275 - type: precision value: 53.130476190476195 - type: recall value: 60.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 72.89999999999999 - type: f1 value: 67.92023809523809 - type: precision value: 65.82595238095237 - type: recall value: 72.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 46.80337756332931 - type: f1 value: 39.42174900558496 - type: precision value: 36.97101116280851 - type: recall value: 46.80337756332931 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.8 - type: f1 value: 86.79 - type: precision value: 85.375 - type: recall value: 89.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.199999999999996 - type: f1 value: 39.95484348984349 - type: precision value: 37.561071428571424 - type: recall value: 47.199999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.8 - type: f1 value: 84.68190476190475 - type: precision value: 83.275 - type: recall value: 87.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.76190476190476 - type: f1 value: 42.14965986394558 - type: precision value: 39.96743626743626 - type: recall value: 48.76190476190476 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.10000000000001 - type: f1 value: 59.58580086580086 - type: precision value: 57.150238095238095 - type: recall value: 66.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.3 - type: f1 value: 84.0 - type: precision value: 82.48666666666666 - type: recall value: 87.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.4 - type: f1 value: 87.79523809523809 - type: precision value: 86.6 - type: recall value: 90.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.0 - type: f1 value: 83.81 - type: precision value: 82.36666666666666 - type: recall value: 87.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.9 - type: f1 value: 57.76533189033189 - type: precision value: 55.50595238095239 - type: recall value: 63.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.1 - type: f1 value: 71.83690476190478 - type: precision value: 70.04928571428573 - type: recall value: 76.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.3 - type: f1 value: 59.32626984126984 - type: precision value: 56.62535714285713 - type: recall value: 66.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.10000000000001 - type: f1 value: 89.76666666666667 - type: main_score value: 89.76666666666667 - type: precision value: 88.64999999999999 - type: recall value: 92.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.10000000000001 - type: f1 value: 91.10000000000001 - type: precision value: 90.16666666666666 - type: recall value: 93.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.71428571428571 - type: f1 value: 82.29142600436403 - type: precision value: 80.8076626877166 - type: recall value: 85.71428571428571 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.88888888888889 - type: f1 value: 85.7834757834758 - type: precision value: 84.43732193732193 - type: recall value: 88.88888888888889 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.5 - type: f1 value: 85.67190476190476 - type: precision value: 84.43333333333332 - type: recall value: 88.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.72727272727273 - type: f1 value: 78.21969696969695 - type: precision value: 76.18181818181819 - type: recall value: 82.72727272727273 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 61.0062893081761 - type: f1 value: 55.13976240391334 - type: precision value: 52.92112499659669 - type: recall value: 61.0062893081761 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.5 - type: f1 value: 86.86666666666666 - type: precision value: 85.69166666666668 - type: recall value: 89.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.54085603112841 - type: f1 value: 68.56031128404669 - type: precision value: 66.53047989623866 - type: recall value: 73.54085603112841 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.58974358974359 - type: f1 value: 36.45299145299145 - type: precision value: 33.81155881155882 - type: recall value: 43.58974358974359 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 59.599999999999994 - type: f1 value: 53.264689754689755 - type: precision value: 50.869166666666665 - type: recall value: 59.599999999999994 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.2 - type: f1 value: 81.61666666666665 - type: precision value: 80.02833333333335 - type: recall value: 85.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.78504672897196 - type: f1 value: 58.00029669188548 - type: precision value: 55.815809968847354 - type: recall value: 63.78504672897196 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.5 - type: f1 value: 61.518333333333345 - type: precision value: 59.622363699102834 - type: recall value: 66.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.6 - type: f1 value: 85.60222222222221 - type: precision value: 84.27916666666665 - type: recall value: 88.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 58.699999999999996 - type: f1 value: 52.732375957375965 - type: precision value: 50.63214035964035 - type: recall value: 58.699999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.10000000000001 - type: f1 value: 89.99666666666667 - type: precision value: 89.03333333333333 - type: recall value: 92.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.10000000000001 - type: f1 value: 87.55666666666667 - type: precision value: 86.36166666666668 - type: recall value: 90.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.4 - type: f1 value: 88.89000000000001 - type: precision value: 87.71166666666666 - type: recall value: 91.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.7 - type: f1 value: 60.67427750410509 - type: precision value: 58.71785714285714 - type: recall value: 65.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.39999999999999 - type: f1 value: 81.93190476190475 - type: precision value: 80.37833333333333 - type: recall value: 85.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.833333333333336 - type: f1 value: 42.006625781625786 - type: precision value: 40.077380952380956 - type: recall value: 47.833333333333336 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.4 - type: f1 value: 8.24465007215007 - type: precision value: 7.664597069597071 - type: recall value: 10.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.6 - type: f1 value: 77.76333333333334 - type: precision value: 75.57833333333332 - type: recall value: 82.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 52.67857142857143 - type: f1 value: 44.302721088435376 - type: precision value: 41.49801587301587 - type: recall value: 52.67857142857143 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 28.3205268935236 - type: f1 value: 22.426666605171157 - type: precision value: 20.685900116470915 - type: recall value: 28.3205268935236 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 22.7 - type: f1 value: 17.833970473970474 - type: precision value: 16.407335164835164 - type: recall value: 22.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.2 - type: f1 value: 89.92999999999999 - type: precision value: 88.87 - type: recall value: 92.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.4 - type: f1 value: 89.25 - type: precision value: 88.21666666666667 - type: recall value: 91.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.19999999999999 - type: f1 value: 63.38269841269841 - type: precision value: 61.14773809523809 - type: recall value: 69.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.8 - type: f1 value: 42.839915639915645 - type: precision value: 40.770287114845935 - type: recall value: 48.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.8 - type: f1 value: 85.90666666666668 - type: precision value: 84.54166666666666 - type: recall value: 88.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 46.6 - type: f1 value: 40.85892920804686 - type: precision value: 38.838223114604695 - type: recall value: 46.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.0 - type: f1 value: 80.14190476190475 - type: precision value: 78.45333333333333 - type: recall value: 84.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.5 - type: f1 value: 87.78333333333333 - type: precision value: 86.5 - type: recall value: 90.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.5 - type: f1 value: 69.48397546897547 - type: precision value: 67.51869047619049 - type: recall value: 74.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 32.846715328467155 - type: f1 value: 27.828177499710343 - type: precision value: 26.63451511991658 - type: recall value: 32.846715328467155 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.0 - type: f1 value: 6.07664116764988 - type: precision value: 5.544177607179943 - type: recall value: 8.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.38555555555554 - type: precision value: 82.91583333333334 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.5 - type: f1 value: 84.08333333333331 - type: precision value: 82.47333333333333 - type: recall value: 87.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.95238095238095 - type: f1 value: 76.13095238095238 - type: precision value: 74.05753968253967 - type: recall value: 80.95238095238095 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.799999999999999 - type: f1 value: 6.971422975172975 - type: precision value: 6.557814916172301 - type: recall value: 8.799999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 44.099378881987576 - type: f1 value: 37.01649742022413 - type: precision value: 34.69420618488942 - type: recall value: 44.099378881987576 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.3 - type: f1 value: 80.32666666666667 - type: precision value: 78.60666666666665 - type: recall value: 84.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.5 - type: f1 value: 90.49666666666666 - type: precision value: 89.56666666666668 - type: recall value: 92.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.0 - type: f1 value: 8.268423529875141 - type: precision value: 7.878118605532398 - type: recall value: 10.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.22077922077922 - type: f1 value: 74.27128427128426 - type: precision value: 72.28715728715729 - type: recall value: 79.22077922077922 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.64885496183206 - type: f1 value: 58.87495456197747 - type: precision value: 55.992366412213734 - type: recall value: 65.64885496183206 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.06986899563319 - type: f1 value: 94.78408539543909 - type: precision value: 94.15332362930616 - type: recall value: 96.06986899563319 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.2 - type: f1 value: 71.72571428571428 - type: precision value: 69.41000000000001 - type: recall value: 77.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.4406779661017 - type: f1 value: 83.2391713747646 - type: precision value: 81.74199623352166 - type: recall value: 86.4406779661017 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.4 - type: f1 value: 6.017828743398003 - type: precision value: 5.4829865484756795 - type: recall value: 8.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.5 - type: f1 value: 79.74833333333333 - type: precision value: 78.04837662337664 - type: recall value: 83.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 60.4 - type: f1 value: 54.467301587301584 - type: precision value: 52.23242424242424 - type: recall value: 60.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.9 - type: f1 value: 69.68699134199134 - type: precision value: 67.59873015873016 - type: recall value: 74.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.0 - type: f1 value: 84.9652380952381 - type: precision value: 83.66166666666666 - type: recall value: 88.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.1 - type: f1 value: 7.681244588744588 - type: precision value: 7.370043290043291 - type: recall value: 9.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.9651474530831 - type: f1 value: 76.84220605132133 - type: precision value: 75.19606398962966 - type: recall value: 80.9651474530831 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.9 - type: f1 value: 83.705 - type: precision value: 82.3120634920635 - type: recall value: 86.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 29.64426877470356 - type: f1 value: 23.98763072676116 - type: precision value: 22.506399397703746 - type: recall value: 29.64426877470356 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.4225352112676 - type: f1 value: 62.84037558685445 - type: precision value: 59.56572769953053 - type: recall value: 70.4225352112676 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 19.64071856287425 - type: f1 value: 15.125271011207756 - type: precision value: 13.865019261197494 - type: recall value: 19.64071856287425 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.2 - type: f1 value: 87.80666666666666 - type: precision value: 86.70833333333331 - type: recall value: 90.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 23.15270935960591 - type: f1 value: 18.407224958949097 - type: precision value: 16.982385430661292 - type: recall value: 23.15270935960591 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.98591549295775 - type: f1 value: 49.94718309859154 - type: precision value: 47.77864154624717 - type: recall value: 55.98591549295775 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.07692307692307 - type: f1 value: 66.74358974358974 - type: precision value: 64.06837606837607 - type: recall value: 73.07692307692307 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.25 - type: precision value: 92.43333333333332 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 37.78705636743215 - type: f1 value: 31.63899658680452 - type: precision value: 29.72264397629742 - type: recall value: 37.78705636743215 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21.6 - type: f1 value: 16.91697302697303 - type: precision value: 15.71225147075147 - type: recall value: 21.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.01628664495115 - type: f1 value: 81.38514037536838 - type: precision value: 79.83170466883823 - type: recall value: 85.01628664495115 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.39999999999999 - type: f1 value: 79.96380952380952 - type: precision value: 78.48333333333333 - type: recall value: 83.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.2 - type: f1 value: 79.26190476190476 - type: precision value: 77.58833333333334 - type: recall value: 83.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.59055118110236 - type: f1 value: 71.66854143232096 - type: precision value: 70.30183727034121 - type: recall value: 75.59055118110236 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.5 - type: f1 value: 59.26095238095238 - type: precision value: 56.81909090909092 - type: recall value: 65.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.26315789473685 - type: f1 value: 47.986523325858506 - type: precision value: 45.33950006595436 - type: recall value: 55.26315789473685 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.89999999999999 - type: f1 value: 78.835 - type: precision value: 77.04761904761905 - type: recall value: 82.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.269230769230774 - type: f1 value: 36.20421245421245 - type: precision value: 33.57371794871795 - type: recall value: 43.269230769230774 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.0 - type: f1 value: 84.70666666666666 - type: precision value: 83.23166666666665 - type: recall value: 88.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.4 - type: f1 value: 72.54666666666667 - type: precision value: 70.54318181818181 - type: recall value: 77.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.60000000000001 - type: f1 value: 74.1588888888889 - type: precision value: 72.30250000000001 - type: recall value: 78.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 72.40566037735849 - type: f1 value: 66.82587328813744 - type: precision value: 64.75039308176099 - type: recall value: 72.40566037735849 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.8 - type: f1 value: 68.56357142857144 - type: precision value: 66.3178822055138 - type: recall value: 73.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.78832116788321 - type: f1 value: 89.3552311435523 - type: precision value: 88.20559610705597 - type: recall value: 91.78832116788321 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.3 - type: f1 value: 69.05085581085581 - type: precision value: 66.955 - type: recall value: 74.3 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.896 - type: map_at_10 value: 8.993 - type: map_at_100 value: 14.133999999999999 - type: map_at_1000 value: 15.668000000000001 - type: map_at_3 value: 5.862 - type: map_at_5 value: 7.17 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 42.931000000000004 - type: mrr_at_100 value: 44.81 - type: mrr_at_1000 value: 44.81 - type: mrr_at_3 value: 38.435 - type: mrr_at_5 value: 41.701 - type: ndcg_at_1 value: 31.633 - type: ndcg_at_10 value: 21.163 - type: ndcg_at_100 value: 33.306000000000004 - type: ndcg_at_1000 value: 45.275999999999996 - type: ndcg_at_3 value: 25.685999999999996 - type: ndcg_at_5 value: 23.732 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 17.755000000000003 - type: precision_at_100 value: 6.938999999999999 - type: precision_at_1000 value: 1.48 - type: precision_at_3 value: 25.85 - type: precision_at_5 value: 23.265 - type: recall_at_1 value: 2.896 - type: recall_at_10 value: 13.333999999999998 - type: recall_at_100 value: 43.517 - type: recall_at_1000 value: 79.836 - type: recall_at_3 value: 6.306000000000001 - type: recall_at_5 value: 8.825 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 69.3874 - type: ap value: 13.829909072469423 - type: f1 value: 53.54534203543492 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 62.62026032823995 - type: f1 value: 62.85251350485221 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 33.21527881409797 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 84.97943613280086 - type: cos_sim_ap value: 70.75454316885921 - type: cos_sim_f1 value: 65.38274012676743 - type: cos_sim_precision value: 60.761214318078835 - type: cos_sim_recall value: 70.76517150395777 - type: dot_accuracy value: 79.0546581629612 - type: dot_ap value: 47.3197121792147 - type: dot_f1 value: 49.20106524633821 - type: dot_precision value: 42.45499808502489 - type: dot_recall value: 58.49604221635884 - type: euclidean_accuracy value: 85.08076533349228 - type: euclidean_ap value: 70.95016106374474 - type: euclidean_f1 value: 65.43987900176455 - type: euclidean_precision value: 62.64478764478765 - type: euclidean_recall value: 68.49604221635884 - type: manhattan_accuracy value: 84.93771234428085 - type: manhattan_ap value: 70.63668388755362 - type: manhattan_f1 value: 65.23895401262398 - type: manhattan_precision value: 56.946084218811485 - type: manhattan_recall value: 76.35883905013192 - type: max_accuracy value: 85.08076533349228 - type: max_ap value: 70.95016106374474 - type: max_f1 value: 65.43987900176455 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.69096130709822 - type: cos_sim_ap value: 84.82526278228542 - type: cos_sim_f1 value: 77.65485060585536 - type: cos_sim_precision value: 75.94582658619167 - type: cos_sim_recall value: 79.44256236526024 - type: dot_accuracy value: 80.97954748321496 - type: dot_ap value: 64.81642914145866 - type: dot_f1 value: 60.631996987229975 - type: dot_precision value: 54.5897293631712 - type: dot_recall value: 68.17831844779796 - type: euclidean_accuracy value: 88.6987231730508 - type: euclidean_ap value: 84.80003825477253 - type: euclidean_f1 value: 77.67194179854496 - type: euclidean_precision value: 75.7128235122094 - type: euclidean_recall value: 79.73514012935017 - type: manhattan_accuracy value: 88.62692591298949 - type: manhattan_ap value: 84.80451408255276 - type: manhattan_f1 value: 77.69888949572183 - type: manhattan_precision value: 73.70311528631622 - type: manhattan_recall value: 82.15275639051433 - type: max_accuracy value: 88.6987231730508 - type: max_ap value: 84.82526278228542 - type: max_f1 value: 77.69888949572183 - task: type: BitextMining dataset: name: MTEB BUCC.v2 (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: 1739dc11ffe9b7bfccd7f3d585aeb4c544fc6677 metrics: - type: accuracy value: 95.72566678212678 - type: f1 value: 94.42443135896548 - type: main_score value: 94.42443135896548 - type: precision value: 93.80868260016165 - type: recall value: 95.72566678212678 - task: type: Retrieval dataset: name: MTEB BelebeleRetrieval (rus_Cyrl-rus_Cyrl) type: facebook/belebele config: rus_Cyrl-rus_Cyrl split: test revision: 75b399394a9803252cfec289d103de462763db7c metrics: - type: main_score value: 92.23599999999999 - type: map_at_1 value: 87.111 - type: map_at_10 value: 90.717 - type: map_at_100 value: 90.879 - type: map_at_1000 value: 90.881 - type: map_at_20 value: 90.849 - type: map_at_3 value: 90.074 - type: map_at_5 value: 90.535 - type: mrr_at_1 value: 87.1111111111111 - type: mrr_at_10 value: 90.7173721340388 - type: mrr_at_100 value: 90.87859682638407 - type: mrr_at_1000 value: 90.88093553612326 - type: mrr_at_20 value: 90.84863516113515 - type: mrr_at_3 value: 90.07407407407409 - type: mrr_at_5 value: 90.53518518518521 - type: nauc_map_at_1000_diff1 value: 92.37373187280554 - type: nauc_map_at_1000_max value: 79.90465445423249 - type: nauc_map_at_1000_std value: -0.6220290556185463 - type: nauc_map_at_100_diff1 value: 92.37386697345335 - type: nauc_map_at_100_max value: 79.90991577223959 - type: nauc_map_at_100_std value: -0.602247514642845 - type: nauc_map_at_10_diff1 value: 92.30907447072467 - type: nauc_map_at_10_max value: 79.86831935337598 - type: nauc_map_at_10_std value: -0.7455191860719699 - type: nauc_map_at_1_diff1 value: 93.29828518358822 - type: nauc_map_at_1_max value: 78.69539619887887 - type: nauc_map_at_1_std value: -4.097150817605763 - type: nauc_map_at_20_diff1 value: 92.38414149703077 - type: nauc_map_at_20_max value: 79.94789814504661 - type: nauc_map_at_20_std value: -0.3928031130400773 - type: nauc_map_at_3_diff1 value: 92.21688899306734 - type: nauc_map_at_3_max value: 80.34586671780885 - type: nauc_map_at_3_std value: 0.24088319695435909 - type: nauc_map_at_5_diff1 value: 92.27931726042982 - type: nauc_map_at_5_max value: 79.99198834003367 - type: nauc_map_at_5_std value: -0.6296366922840796 - type: nauc_mrr_at_1000_diff1 value: 92.37373187280554 - type: nauc_mrr_at_1000_max value: 79.90465445423249 - type: nauc_mrr_at_1000_std value: -0.6220290556185463 - type: nauc_mrr_at_100_diff1 value: 92.37386697345335 - type: nauc_mrr_at_100_max value: 79.90991577223959 - type: nauc_mrr_at_100_std value: -0.602247514642845 - type: nauc_mrr_at_10_diff1 value: 92.30907447072467 - type: nauc_mrr_at_10_max value: 79.86831935337598 - type: nauc_mrr_at_10_std value: -0.7455191860719699 - type: nauc_mrr_at_1_diff1 value: 93.29828518358822 - type: nauc_mrr_at_1_max value: 78.69539619887887 - type: nauc_mrr_at_1_std value: -4.097150817605763 - type: nauc_mrr_at_20_diff1 value: 92.38414149703077 - type: nauc_mrr_at_20_max value: 79.94789814504661 - type: nauc_mrr_at_20_std value: -0.3928031130400773 - type: nauc_mrr_at_3_diff1 value: 92.21688899306734 - type: nauc_mrr_at_3_max value: 80.34586671780885 - type: nauc_mrr_at_3_std value: 0.24088319695435909 - type: nauc_mrr_at_5_diff1 value: 92.27931726042982 - type: nauc_mrr_at_5_max value: 79.99198834003367 - type: nauc_mrr_at_5_std value: -0.6296366922840796 - type: nauc_ndcg_at_1000_diff1 value: 92.30526497646306 - type: nauc_ndcg_at_1000_max value: 80.12734537480418 - type: nauc_ndcg_at_1000_std value: 0.22849408935578744 - type: nauc_ndcg_at_100_diff1 value: 92.31347123202318 - type: nauc_ndcg_at_100_max value: 80.29207038703142 - type: nauc_ndcg_at_100_std value: 0.816825944406239 - type: nauc_ndcg_at_10_diff1 value: 92.05430189845808 - type: nauc_ndcg_at_10_max value: 80.16515667442968 - type: nauc_ndcg_at_10_std value: 0.7486447532544893 - type: nauc_ndcg_at_1_diff1 value: 93.29828518358822 - type: nauc_ndcg_at_1_max value: 78.69539619887887 - type: nauc_ndcg_at_1_std value: -4.097150817605763 - type: nauc_ndcg_at_20_diff1 value: 92.40147868825079 - type: nauc_ndcg_at_20_max value: 80.5117307181802 - type: nauc_ndcg_at_20_std value: 2.0431351539517033 - type: nauc_ndcg_at_3_diff1 value: 91.88894444422789 - type: nauc_ndcg_at_3_max value: 81.09256084196045 - type: nauc_ndcg_at_3_std value: 2.422705909643621 - type: nauc_ndcg_at_5_diff1 value: 91.99711052955728 - type: nauc_ndcg_at_5_max value: 80.46996334573979 - type: nauc_ndcg_at_5_std value: 0.9086986899040708 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: 93.46405228758012 - type: nauc_precision_at_100_max value: 100.0 - type: nauc_precision_at_100_std value: 70.71661998132774 - type: nauc_precision_at_10_diff1 value: 90.13938908896874 - type: nauc_precision_at_10_max value: 82.21121782046167 - type: nauc_precision_at_10_std value: 13.075230092036083 - type: nauc_precision_at_1_diff1 value: 93.29828518358822 - type: nauc_precision_at_1_max value: 78.69539619887887 - type: nauc_precision_at_1_std value: -4.097150817605763 - type: nauc_precision_at_20_diff1 value: 94.9723479135242 - type: nauc_precision_at_20_max value: 91.04000574588684 - type: nauc_precision_at_20_std value: 48.764634058749586 - type: nauc_precision_at_3_diff1 value: 90.52690041533852 - type: nauc_precision_at_3_max value: 84.35075179497126 - type: nauc_precision_at_3_std value: 12.036768730480507 - type: nauc_precision_at_5_diff1 value: 90.44234360410769 - type: nauc_precision_at_5_max value: 83.21895424836558 - type: nauc_precision_at_5_std value: 9.974323062558037 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 93.46405228758294 - type: nauc_recall_at_100_max value: 100.0 - type: nauc_recall_at_100_std value: 70.71661998132666 - type: nauc_recall_at_10_diff1 value: 90.13938908896864 - type: nauc_recall_at_10_max value: 82.21121782046124 - type: nauc_recall_at_10_std value: 13.075230092036506 - type: nauc_recall_at_1_diff1 value: 93.29828518358822 - type: nauc_recall_at_1_max value: 78.69539619887887 - type: nauc_recall_at_1_std value: -4.097150817605763 - type: nauc_recall_at_20_diff1 value: 94.97234791352489 - type: nauc_recall_at_20_max value: 91.04000574588774 - type: nauc_recall_at_20_std value: 48.764634058752065 - type: nauc_recall_at_3_diff1 value: 90.52690041533845 - type: nauc_recall_at_3_max value: 84.35075179497079 - type: nauc_recall_at_3_std value: 12.036768730480583 - type: nauc_recall_at_5_diff1 value: 90.44234360410861 - type: nauc_recall_at_5_max value: 83.21895424836595 - type: nauc_recall_at_5_std value: 9.974323062558147 - type: ndcg_at_1 value: 87.111 - type: ndcg_at_10 value: 92.23599999999999 - type: ndcg_at_100 value: 92.87100000000001 - type: ndcg_at_1000 value: 92.928 - type: ndcg_at_20 value: 92.67699999999999 - type: ndcg_at_3 value: 90.973 - type: ndcg_at_5 value: 91.801 - type: precision_at_1 value: 87.111 - type: precision_at_10 value: 9.689 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.928 - type: precision_at_3 value: 31.185000000000002 - type: precision_at_5 value: 19.111 - type: recall_at_1 value: 87.111 - type: recall_at_10 value: 96.88900000000001 - type: recall_at_100 value: 99.556 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 98.556 - type: recall_at_3 value: 93.556 - type: recall_at_5 value: 95.556 - task: type: Retrieval dataset: name: MTEB BelebeleRetrieval (rus_Cyrl-eng_Latn) type: facebook/belebele config: rus_Cyrl-eng_Latn split: test revision: 75b399394a9803252cfec289d103de462763db7c metrics: - type: main_score value: 86.615 - type: map_at_1 value: 78.0 - type: map_at_10 value: 83.822 - type: map_at_100 value: 84.033 - type: map_at_1000 value: 84.03500000000001 - type: map_at_20 value: 83.967 - type: map_at_3 value: 82.315 - type: map_at_5 value: 83.337 - type: mrr_at_1 value: 78.0 - type: mrr_at_10 value: 83.82213403880073 - type: mrr_at_100 value: 84.03281327810801 - type: mrr_at_1000 value: 84.03460051000452 - type: mrr_at_20 value: 83.9673773122303 - type: mrr_at_3 value: 82.31481481481484 - type: mrr_at_5 value: 83.33703703703708 - type: nauc_map_at_1000_diff1 value: 80.78467576987832 - type: nauc_map_at_1000_max value: 51.41718334647604 - type: nauc_map_at_1000_std value: -16.23873782768812 - type: nauc_map_at_100_diff1 value: 80.78490931240695 - type: nauc_map_at_100_max value: 51.41504597713061 - type: nauc_map_at_100_std value: -16.23538559475366 - type: nauc_map_at_10_diff1 value: 80.73989245374868 - type: nauc_map_at_10_max value: 51.43026079433827 - type: nauc_map_at_10_std value: -16.13414330905897 - type: nauc_map_at_1_diff1 value: 82.36966971144186 - type: nauc_map_at_1_max value: 52.988877039509916 - type: nauc_map_at_1_std value: -15.145824639495546 - type: nauc_map_at_20_diff1 value: 80.75923781626145 - type: nauc_map_at_20_max value: 51.40181079374639 - type: nauc_map_at_20_std value: -16.260566097377165 - type: nauc_map_at_3_diff1 value: 80.65242627065471 - type: nauc_map_at_3_max value: 50.623980338841214 - type: nauc_map_at_3_std value: -16.818343442794294 - type: nauc_map_at_5_diff1 value: 80.45976387021862 - type: nauc_map_at_5_max value: 51.533621728445866 - type: nauc_map_at_5_std value: -16.279891536945815 - type: nauc_mrr_at_1000_diff1 value: 80.78467576987832 - type: nauc_mrr_at_1000_max value: 51.41718334647604 - type: nauc_mrr_at_1000_std value: -16.23873782768812 - type: nauc_mrr_at_100_diff1 value: 80.78490931240695 - type: nauc_mrr_at_100_max value: 51.41504597713061 - type: nauc_mrr_at_100_std value: -16.23538559475366 - type: nauc_mrr_at_10_diff1 value: 80.73989245374868 - type: nauc_mrr_at_10_max value: 51.43026079433827 - type: nauc_mrr_at_10_std value: -16.13414330905897 - type: nauc_mrr_at_1_diff1 value: 82.36966971144186 - type: nauc_mrr_at_1_max value: 52.988877039509916 - type: nauc_mrr_at_1_std value: -15.145824639495546 - type: nauc_mrr_at_20_diff1 value: 80.75923781626145 - type: nauc_mrr_at_20_max value: 51.40181079374639 - type: nauc_mrr_at_20_std value: -16.260566097377165 - type: nauc_mrr_at_3_diff1 value: 80.65242627065471 - type: nauc_mrr_at_3_max value: 50.623980338841214 - type: nauc_mrr_at_3_std value: -16.818343442794294 - type: nauc_mrr_at_5_diff1 value: 80.45976387021862 - type: nauc_mrr_at_5_max value: 51.533621728445866 - type: nauc_mrr_at_5_std value: -16.279891536945815 - type: nauc_ndcg_at_1000_diff1 value: 80.60009446938174 - type: nauc_ndcg_at_1000_max value: 51.381708043594166 - type: nauc_ndcg_at_1000_std value: -16.054256944160848 - type: nauc_ndcg_at_100_diff1 value: 80.58971462930421 - type: nauc_ndcg_at_100_max value: 51.25436917735444 - type: nauc_ndcg_at_100_std value: -15.862944972269894 - type: nauc_ndcg_at_10_diff1 value: 80.37967179454489 - type: nauc_ndcg_at_10_max value: 51.590394257251006 - type: nauc_ndcg_at_10_std value: -15.489799384799591 - type: nauc_ndcg_at_1_diff1 value: 82.36966971144186 - type: nauc_ndcg_at_1_max value: 52.988877039509916 - type: nauc_ndcg_at_1_std value: -15.145824639495546 - type: nauc_ndcg_at_20_diff1 value: 80.40299527470081 - type: nauc_ndcg_at_20_max value: 51.395132284307074 - type: nauc_ndcg_at_20_std value: -15.906165526937203 - type: nauc_ndcg_at_3_diff1 value: 80.10347913649302 - type: nauc_ndcg_at_3_max value: 50.018431855573844 - type: nauc_ndcg_at_3_std value: -17.12743750163884 - type: nauc_ndcg_at_5_diff1 value: 79.65918647776613 - type: nauc_ndcg_at_5_max value: 51.76710880330806 - type: nauc_ndcg_at_5_std value: -16.071901882035945 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: 77.41596638655459 - type: nauc_precision_at_100_max value: 22.572362278246565 - type: nauc_precision_at_100_std value: 26.890756302525716 - type: nauc_precision_at_10_diff1 value: 77.82112845138009 - type: nauc_precision_at_10_max value: 54.2550353474723 - type: nauc_precision_at_10_std value: -7.492997198879646 - type: nauc_precision_at_1_diff1 value: 82.36966971144186 - type: nauc_precision_at_1_max value: 52.988877039509916 - type: nauc_precision_at_1_std value: -15.145824639495546 - type: nauc_precision_at_20_diff1 value: 75.89091192032318 - type: nauc_precision_at_20_max value: 52.03275754746293 - type: nauc_precision_at_20_std value: -7.8411920323686175 - type: nauc_precision_at_3_diff1 value: 78.0256020644638 - type: nauc_precision_at_3_max value: 47.80353641248523 - type: nauc_precision_at_3_std value: -18.181625255723503 - type: nauc_precision_at_5_diff1 value: 75.21583976056174 - type: nauc_precision_at_5_max value: 53.716281032960765 - type: nauc_precision_at_5_std value: -14.411700753360812 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 77.4159663865523 - type: nauc_recall_at_100_max value: 22.57236227824646 - type: nauc_recall_at_100_std value: 26.89075630252133 - type: nauc_recall_at_10_diff1 value: 77.82112845138037 - type: nauc_recall_at_10_max value: 54.25503534747204 - type: nauc_recall_at_10_std value: -7.492997198879666 - type: nauc_recall_at_1_diff1 value: 82.36966971144186 - type: nauc_recall_at_1_max value: 52.988877039509916 - type: nauc_recall_at_1_std value: -15.145824639495546 - type: nauc_recall_at_20_diff1 value: 75.89091192032362 - type: nauc_recall_at_20_max value: 52.032757547463184 - type: nauc_recall_at_20_std value: -7.84119203236888 - type: nauc_recall_at_3_diff1 value: 78.02560206446354 - type: nauc_recall_at_3_max value: 47.80353641248526 - type: nauc_recall_at_3_std value: -18.181625255723656 - type: nauc_recall_at_5_diff1 value: 75.21583976056185 - type: nauc_recall_at_5_max value: 53.71628103296118 - type: nauc_recall_at_5_std value: -14.411700753360634 - type: ndcg_at_1 value: 78.0 - type: ndcg_at_10 value: 86.615 - type: ndcg_at_100 value: 87.558 - type: ndcg_at_1000 value: 87.613 - type: ndcg_at_20 value: 87.128 - type: ndcg_at_3 value: 83.639 - type: ndcg_at_5 value: 85.475 - type: precision_at_1 value: 78.0 - type: precision_at_10 value: 9.533 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.867 - type: precision_at_3 value: 29.148000000000003 - type: precision_at_5 value: 18.378 - type: recall_at_1 value: 78.0 - type: recall_at_10 value: 95.333 - type: recall_at_100 value: 99.556 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 97.333 - type: recall_at_3 value: 87.444 - type: recall_at_5 value: 91.889 - task: type: Retrieval dataset: name: MTEB BelebeleRetrieval (eng_Latn-rus_Cyrl) type: facebook/belebele config: eng_Latn-rus_Cyrl split: test revision: 75b399394a9803252cfec289d103de462763db7c metrics: - type: main_score value: 82.748 - type: map_at_1 value: 73.444 - type: map_at_10 value: 79.857 - type: map_at_100 value: 80.219 - type: map_at_1000 value: 80.22500000000001 - type: map_at_20 value: 80.10300000000001 - type: map_at_3 value: 78.593 - type: map_at_5 value: 79.515 - type: mrr_at_1 value: 73.44444444444444 - type: mrr_at_10 value: 79.85705467372136 - type: mrr_at_100 value: 80.21942320422542 - type: mrr_at_1000 value: 80.2245364027152 - type: mrr_at_20 value: 80.10273201266493 - type: mrr_at_3 value: 78.59259259259258 - type: mrr_at_5 value: 79.51481481481483 - type: nauc_map_at_1000_diff1 value: 83.69682652271125 - type: nauc_map_at_1000_max value: 61.70131708044767 - type: nauc_map_at_1000_std value: 9.345825405274955 - type: nauc_map_at_100_diff1 value: 83.68924820523492 - type: nauc_map_at_100_max value: 61.6965735573098 - type: nauc_map_at_100_std value: 9.366132859525775 - type: nauc_map_at_10_diff1 value: 83.61802964269985 - type: nauc_map_at_10_max value: 61.74274476167882 - type: nauc_map_at_10_std value: 9.504060995819101 - type: nauc_map_at_1_diff1 value: 86.37079221403225 - type: nauc_map_at_1_max value: 61.856861655370686 - type: nauc_map_at_1_std value: 4.708911881992707 - type: nauc_map_at_20_diff1 value: 83.62920965453047 - type: nauc_map_at_20_max value: 61.761029350326965 - type: nauc_map_at_20_std value: 9.572978651118351 - type: nauc_map_at_3_diff1 value: 83.66665673154306 - type: nauc_map_at_3_max value: 61.13597610587937 - type: nauc_map_at_3_std value: 9.309596395240598 - type: nauc_map_at_5_diff1 value: 83.52307226455358 - type: nauc_map_at_5_max value: 61.59405758027573 - type: nauc_map_at_5_std value: 9.320025423287671 - type: nauc_mrr_at_1000_diff1 value: 83.69682652271125 - type: nauc_mrr_at_1000_max value: 61.70131708044767 - type: nauc_mrr_at_1000_std value: 9.345825405274955 - type: nauc_mrr_at_100_diff1 value: 83.68924820523492 - type: nauc_mrr_at_100_max value: 61.6965735573098 - type: nauc_mrr_at_100_std value: 9.366132859525775 - type: nauc_mrr_at_10_diff1 value: 83.61802964269985 - type: nauc_mrr_at_10_max value: 61.74274476167882 - type: nauc_mrr_at_10_std value: 9.504060995819101 - type: nauc_mrr_at_1_diff1 value: 86.37079221403225 - type: nauc_mrr_at_1_max value: 61.856861655370686 - type: nauc_mrr_at_1_std value: 4.708911881992707 - type: nauc_mrr_at_20_diff1 value: 83.62920965453047 - type: nauc_mrr_at_20_max value: 61.761029350326965 - type: nauc_mrr_at_20_std value: 9.572978651118351 - type: nauc_mrr_at_3_diff1 value: 83.66665673154306 - type: nauc_mrr_at_3_max value: 61.13597610587937 - type: nauc_mrr_at_3_std value: 9.309596395240598 - type: nauc_mrr_at_5_diff1 value: 83.52307226455358 - type: nauc_mrr_at_5_max value: 61.59405758027573 - type: nauc_mrr_at_5_std value: 9.320025423287671 - type: nauc_ndcg_at_1000_diff1 value: 83.24213186482201 - type: nauc_ndcg_at_1000_max value: 61.77629841787496 - type: nauc_ndcg_at_1000_std value: 10.332527869705851 - type: nauc_ndcg_at_100_diff1 value: 83.06815820441027 - type: nauc_ndcg_at_100_max value: 61.6947181864579 - type: nauc_ndcg_at_100_std value: 10.888922975877316 - type: nauc_ndcg_at_10_diff1 value: 82.58238431386295 - type: nauc_ndcg_at_10_max value: 62.10333663935709 - type: nauc_ndcg_at_10_std value: 11.746030330958174 - type: nauc_ndcg_at_1_diff1 value: 86.37079221403225 - type: nauc_ndcg_at_1_max value: 61.856861655370686 - type: nauc_ndcg_at_1_std value: 4.708911881992707 - type: nauc_ndcg_at_20_diff1 value: 82.67888324480154 - type: nauc_ndcg_at_20_max value: 62.28124917486516 - type: nauc_ndcg_at_20_std value: 12.343058917563914 - type: nauc_ndcg_at_3_diff1 value: 82.71277373710663 - type: nauc_ndcg_at_3_max value: 60.66677922989939 - type: nauc_ndcg_at_3_std value: 10.843633736296528 - type: nauc_ndcg_at_5_diff1 value: 82.34691124846786 - type: nauc_ndcg_at_5_max value: 61.605961382062716 - type: nauc_ndcg_at_5_std value: 11.129011077702602 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: 60.93103908230194 - type: nauc_precision_at_100_max value: 52.621048419370695 - type: nauc_precision_at_100_std value: 85.60090702947922 - type: nauc_precision_at_10_diff1 value: 76.26517273576093 - type: nauc_precision_at_10_max value: 65.2013694366636 - type: nauc_precision_at_10_std value: 26.50357920946173 - type: nauc_precision_at_1_diff1 value: 86.37079221403225 - type: nauc_precision_at_1_max value: 61.856861655370686 - type: nauc_precision_at_1_std value: 4.708911881992707 - type: nauc_precision_at_20_diff1 value: 73.47946930710295 - type: nauc_precision_at_20_max value: 70.19520986689217 - type: nauc_precision_at_20_std value: 45.93186111653967 - type: nauc_precision_at_3_diff1 value: 79.02026879450186 - type: nauc_precision_at_3_max value: 58.75074624692399 - type: nauc_precision_at_3_std value: 16.740684654251037 - type: nauc_precision_at_5_diff1 value: 76.47585662281637 - type: nauc_precision_at_5_max value: 61.86270922013127 - type: nauc_precision_at_5_std value: 20.1833625455035 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 60.93103908229921 - type: nauc_recall_at_100_max value: 52.62104841936668 - type: nauc_recall_at_100_std value: 85.60090702947748 - type: nauc_recall_at_10_diff1 value: 76.26517273576097 - type: nauc_recall_at_10_max value: 65.20136943666347 - type: nauc_recall_at_10_std value: 26.50357920946174 - type: nauc_recall_at_1_diff1 value: 86.37079221403225 - type: nauc_recall_at_1_max value: 61.856861655370686 - type: nauc_recall_at_1_std value: 4.708911881992707 - type: nauc_recall_at_20_diff1 value: 73.47946930710269 - type: nauc_recall_at_20_max value: 70.19520986689254 - type: nauc_recall_at_20_std value: 45.93186111653943 - type: nauc_recall_at_3_diff1 value: 79.02026879450173 - type: nauc_recall_at_3_max value: 58.750746246923924 - type: nauc_recall_at_3_std value: 16.740684654251076 - type: nauc_recall_at_5_diff1 value: 76.4758566228162 - type: nauc_recall_at_5_max value: 61.862709220131386 - type: nauc_recall_at_5_std value: 20.18336254550361 - type: ndcg_at_1 value: 73.444 - type: ndcg_at_10 value: 82.748 - type: ndcg_at_100 value: 84.416 - type: ndcg_at_1000 value: 84.52300000000001 - type: ndcg_at_20 value: 83.646 - type: ndcg_at_3 value: 80.267 - type: ndcg_at_5 value: 81.922 - type: precision_at_1 value: 73.444 - type: precision_at_10 value: 9.167 - type: precision_at_100 value: 0.992 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.761 - type: precision_at_3 value: 28.37 - type: precision_at_5 value: 17.822 - type: recall_at_1 value: 73.444 - type: recall_at_10 value: 91.667 - type: recall_at_100 value: 99.222 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 95.222 - type: recall_at_3 value: 85.111 - type: recall_at_5 value: 89.11099999999999 - task: type: BitextMining dataset: name: MTEB BibleNLPBitextMining (eng_Latn-rus_Cyrl) type: davidstap/biblenlp-corpus-mmteb config: eng_Latn-rus_Cyrl split: train revision: 264a18480c529d9e922483839b4b9758e690b762 metrics: - type: accuracy value: 96.875 - type: f1 value: 95.83333333333333 - type: main_score value: 95.83333333333333 - type: precision value: 95.3125 - type: recall value: 96.875 - task: type: BitextMining dataset: name: MTEB BibleNLPBitextMining (rus_Cyrl-eng_Latn) type: davidstap/biblenlp-corpus-mmteb config: rus_Cyrl-eng_Latn split: train revision: 264a18480c529d9e922483839b4b9758e690b762 metrics: - type: accuracy value: 88.671875 - type: f1 value: 85.3515625 - type: main_score value: 85.3515625 - type: precision value: 83.85416666666667 - type: recall value: 88.671875 - task: type: MultilabelClassification dataset: name: MTEB CEDRClassification (default) type: ai-forever/cedr-classification config: default split: test revision: c0ba03d058e3e1b2f3fd20518875a4563dd12db4 metrics: - type: accuracy value: 40.06907545164719 - type: f1 value: 26.285000550712407 - type: lrap value: 64.4280021253997 - type: main_score value: 40.06907545164719 - task: type: Classification dataset: name: MTEB CyrillicTurkicLangClassification (default) type: tatiana-merz/cyrillic_turkic_langs config: default split: test revision: e42d330f33d65b7b72dfd408883daf1661f06f18 metrics: - type: accuracy value: 43.3447265625 - type: f1 value: 40.08400146827895 - type: f1_weighted value: 40.08499428040896 - type: main_score value: 43.3447265625 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ace_Arab-rus_Cyrl) type: mteb/flores config: ace_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 6.225296442687747 - type: f1 value: 5.5190958860075 - type: main_score value: 5.5190958860075 - type: precision value: 5.3752643758000005 - type: recall value: 6.225296442687747 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bam_Latn-rus_Cyrl) type: mteb/flores config: bam_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 68.37944664031622 - type: f1 value: 64.54819836666252 - type: main_score value: 64.54819836666252 - type: precision value: 63.07479233454916 - type: recall value: 68.37944664031622 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (dzo_Tibt-rus_Cyrl) type: mteb/flores config: dzo_Tibt-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 0.09881422924901186 - type: f1 value: 0.00019509225912934226 - type: main_score value: 0.00019509225912934226 - type: precision value: 9.76425190207627e-05 - type: recall value: 0.09881422924901186 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (hin_Deva-rus_Cyrl) type: mteb/flores config: hin_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.60474308300395 - type: f1 value: 99.47299077733861 - type: main_score value: 99.47299077733861 - type: precision value: 99.40711462450594 - type: recall value: 99.60474308300395 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (khm_Khmr-rus_Cyrl) type: mteb/flores config: khm_Khmr-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 88.83399209486166 - type: f1 value: 87.71151056318254 - type: main_score value: 87.71151056318254 - type: precision value: 87.32012500709193 - type: recall value: 88.83399209486166 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mag_Deva-rus_Cyrl) type: mteb/flores config: mag_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.7239789196311 - type: main_score value: 97.7239789196311 - type: precision value: 97.61904761904762 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (pap_Latn-rus_Cyrl) type: mteb/flores config: pap_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.0711462450593 - type: f1 value: 93.68187806922984 - type: main_score value: 93.68187806922984 - type: precision value: 93.58925452707051 - type: recall value: 94.0711462450593 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (sot_Latn-rus_Cyrl) type: mteb/flores config: sot_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 90.9090909090909 - type: f1 value: 89.23171936758892 - type: main_score value: 89.23171936758892 - type: precision value: 88.51790014083866 - type: recall value: 90.9090909090909 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tur_Latn-rus_Cyrl) type: mteb/flores config: tur_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.9459815546772 - type: main_score value: 98.9459815546772 - type: precision value: 98.81422924901186 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ace_Latn-rus_Cyrl) type: mteb/flores config: ace_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 66.10671936758892 - type: f1 value: 63.81888256297873 - type: main_score value: 63.81888256297873 - type: precision value: 63.01614067933451 - type: recall value: 66.10671936758892 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ban_Latn-rus_Cyrl) type: mteb/flores config: ban_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 79.44664031620553 - type: f1 value: 77.6311962082713 - type: main_score value: 77.6311962082713 - type: precision value: 76.93977931929739 - type: recall value: 79.44664031620553 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ell_Grek-rus_Cyrl) type: mteb/flores config: ell_Grek-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.2094861660079 - type: main_score value: 99.2094861660079 - type: precision value: 99.1106719367589 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (hne_Deva-rus_Cyrl) type: mteb/flores config: hne_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.83794466403161 - type: f1 value: 96.25352907961603 - type: main_score value: 96.25352907961603 - type: precision value: 96.02155091285526 - type: recall value: 96.83794466403161 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kik_Latn-rus_Cyrl) type: mteb/flores config: kik_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 76.28458498023716 - type: f1 value: 73.5596919895859 - type: main_score value: 73.5596919895859 - type: precision value: 72.40900759055246 - type: recall value: 76.28458498023716 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mai_Deva-rus_Cyrl) type: mteb/flores config: mai_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.72727272727273 - type: f1 value: 97.37812911725956 - type: main_score value: 97.37812911725956 - type: precision value: 97.26002258610953 - type: recall value: 97.72727272727273 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (pbt_Arab-rus_Cyrl) type: mteb/flores config: pbt_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.0711462450593 - type: f1 value: 93.34700387331966 - type: main_score value: 93.34700387331966 - type: precision value: 93.06920556920556 - type: recall value: 94.0711462450593 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (spa_Latn-rus_Cyrl) type: mteb/flores config: spa_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.9459815546772 - type: main_score value: 98.9459815546772 - type: precision value: 98.81422924901186 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (twi_Latn-rus_Cyrl) type: mteb/flores config: twi_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 80.73122529644269 - type: f1 value: 77.77434363246721 - type: main_score value: 77.77434363246721 - type: precision value: 76.54444287596462 - type: recall value: 80.73122529644269 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (acm_Arab-rus_Cyrl) type: mteb/flores config: acm_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.56521739130434 - type: f1 value: 92.92490118577075 - type: main_score value: 92.92490118577075 - type: precision value: 92.16897233201581 - type: recall value: 94.56521739130434 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bel_Cyrl-rus_Cyrl) type: mteb/flores config: bel_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.98550724637681 - type: main_score value: 98.98550724637681 - type: precision value: 98.88833992094862 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (eng_Latn-rus_Cyrl) type: mteb/flores config: eng_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.60474308300395 - type: f1 value: 99.4729907773386 - type: main_score value: 99.4729907773386 - type: precision value: 99.40711462450594 - type: recall value: 99.60474308300395 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (hrv_Latn-rus_Cyrl) type: mteb/flores config: hrv_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 99.05138339920948 - type: main_score value: 99.05138339920948 - type: precision value: 99.00691699604744 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kin_Latn-rus_Cyrl) type: mteb/flores config: kin_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 88.2411067193676 - type: f1 value: 86.5485246227658 - type: main_score value: 86.5485246227658 - type: precision value: 85.90652101521667 - type: recall value: 88.2411067193676 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mal_Mlym-rus_Cyrl) type: mteb/flores config: mal_Mlym-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.51778656126481 - type: f1 value: 98.07971014492753 - type: main_score value: 98.07971014492753 - type: precision value: 97.88372859025033 - type: recall value: 98.51778656126481 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (pes_Arab-rus_Cyrl) type: mteb/flores config: pes_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.51778656126481 - type: f1 value: 98.0566534914361 - type: main_score value: 98.0566534914361 - type: precision value: 97.82608695652173 - type: recall value: 98.51778656126481 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (srd_Latn-rus_Cyrl) type: mteb/flores config: srd_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 82.6086956521739 - type: f1 value: 80.9173470979821 - type: main_score value: 80.9173470979821 - type: precision value: 80.24468672882627 - type: recall value: 82.6086956521739 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tzm_Tfng-rus_Cyrl) type: mteb/flores config: tzm_Tfng-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 7.41106719367589 - type: f1 value: 6.363562740945329 - type: main_score value: 6.363562740945329 - type: precision value: 6.090373175353411 - type: recall value: 7.41106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (acq_Arab-rus_Cyrl) type: mteb/flores config: acq_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.25691699604744 - type: f1 value: 93.81422924901187 - type: main_score value: 93.81422924901187 - type: precision value: 93.14064558629775 - type: recall value: 95.25691699604744 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bem_Latn-rus_Cyrl) type: mteb/flores config: bem_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 68.08300395256917 - type: f1 value: 65.01368772860867 - type: main_score value: 65.01368772860867 - type: precision value: 63.91052337510628 - type: recall value: 68.08300395256917 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (epo_Latn-rus_Cyrl) type: mteb/flores config: epo_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.41897233201581 - type: f1 value: 98.17193675889328 - type: main_score value: 98.17193675889328 - type: precision value: 98.08210564139418 - type: recall value: 98.41897233201581 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (hun_Latn-rus_Cyrl) type: mteb/flores config: hun_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.1106719367589 - type: main_score value: 99.1106719367589 - type: precision value: 99.01185770750988 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kir_Cyrl-rus_Cyrl) type: mteb/flores config: kir_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.5296442687747 - type: f1 value: 97.07549806364035 - type: main_score value: 97.07549806364035 - type: precision value: 96.90958498023716 - type: recall value: 97.5296442687747 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mar_Deva-rus_Cyrl) type: mteb/flores config: mar_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.82608695652173 - type: f1 value: 97.44400527009222 - type: main_score value: 97.44400527009222 - type: precision value: 97.28966685488425 - type: recall value: 97.82608695652173 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (plt_Latn-rus_Cyrl) type: mteb/flores config: plt_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 79.9407114624506 - type: f1 value: 78.3154177760691 - type: main_score value: 78.3154177760691 - type: precision value: 77.69877344877344 - type: recall value: 79.9407114624506 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (srp_Cyrl-rus_Cyrl) type: mteb/flores config: srp_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.70355731225297 - type: f1 value: 99.60474308300395 - type: main_score value: 99.60474308300395 - type: precision value: 99.55533596837944 - type: recall value: 99.70355731225297 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (uig_Arab-rus_Cyrl) type: mteb/flores config: uig_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 83.20158102766798 - type: f1 value: 81.44381923034585 - type: main_score value: 81.44381923034585 - type: precision value: 80.78813411582477 - type: recall value: 83.20158102766798 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (aeb_Arab-rus_Cyrl) type: mteb/flores config: aeb_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.20553359683794 - type: f1 value: 88.75352907961603 - type: main_score value: 88.75352907961603 - type: precision value: 87.64328063241106 - type: recall value: 91.20553359683794 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ben_Beng-rus_Cyrl) type: mteb/flores config: ben_Beng-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.60671936758894 - type: main_score value: 98.60671936758894 - type: precision value: 98.4766139657444 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (est_Latn-rus_Cyrl) type: mteb/flores config: est_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.24505928853755 - type: f1 value: 95.27417027417027 - type: main_score value: 95.27417027417027 - type: precision value: 94.84107378129117 - type: recall value: 96.24505928853755 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (hye_Armn-rus_Cyrl) type: mteb/flores config: hye_Armn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.67786561264822 - type: main_score value: 97.67786561264822 - type: precision value: 97.55839022637441 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kmb_Latn-rus_Cyrl) type: mteb/flores config: kmb_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 46.047430830039524 - type: f1 value: 42.94464804804471 - type: main_score value: 42.94464804804471 - type: precision value: 41.9851895607238 - type: recall value: 46.047430830039524 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (min_Arab-rus_Cyrl) type: mteb/flores config: min_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 3.9525691699604746 - type: f1 value: 3.402665192725756 - type: main_score value: 3.402665192725756 - type: precision value: 3.303787557740127 - type: recall value: 3.9525691699604746 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (pol_Latn-rus_Cyrl) type: mteb/flores config: pol_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.60474308300395 - type: f1 value: 99.4729907773386 - type: main_score value: 99.4729907773386 - type: precision value: 99.40711462450594 - type: recall value: 99.60474308300395 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ssw_Latn-rus_Cyrl) type: mteb/flores config: ssw_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 73.22134387351778 - type: f1 value: 70.43086049508975 - type: main_score value: 70.43086049508975 - type: precision value: 69.35312022355656 - type: recall value: 73.22134387351778 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ukr_Cyrl-rus_Cyrl) type: mteb/flores config: ukr_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.90118577075098 - type: f1 value: 99.86824769433464 - type: main_score value: 99.86824769433464 - type: precision value: 99.85177865612648 - type: recall value: 99.90118577075098 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (afr_Latn-rus_Cyrl) type: mteb/flores config: afr_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.9459815546772 - type: main_score value: 98.9459815546772 - type: precision value: 98.81422924901186 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bho_Deva-rus_Cyrl) type: mteb/flores config: bho_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.0711462450593 - type: f1 value: 93.12182382834557 - type: main_score value: 93.12182382834557 - type: precision value: 92.7523453232338 - type: recall value: 94.0711462450593 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (eus_Latn-rus_Cyrl) type: mteb/flores config: eus_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.19367588932806 - type: f1 value: 91.23604975587072 - type: main_score value: 91.23604975587072 - type: precision value: 90.86697443588663 - type: recall value: 92.19367588932806 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ibo_Latn-rus_Cyrl) type: mteb/flores config: ibo_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 82.21343873517787 - type: f1 value: 80.17901604858126 - type: main_score value: 80.17901604858126 - type: precision value: 79.3792284780028 - type: recall value: 82.21343873517787 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kmr_Latn-rus_Cyrl) type: mteb/flores config: kmr_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 68.67588932806325 - type: f1 value: 66.72311714750278 - type: main_score value: 66.72311714750278 - type: precision value: 66.00178401554004 - type: recall value: 68.67588932806325 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (min_Latn-rus_Cyrl) type: mteb/flores config: min_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 78.65612648221344 - type: f1 value: 76.26592719972166 - type: main_score value: 76.26592719972166 - type: precision value: 75.39980459997484 - type: recall value: 78.65612648221344 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (por_Latn-rus_Cyrl) type: mteb/flores config: por_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.83794466403161 - type: f1 value: 95.9669678147939 - type: main_score value: 95.9669678147939 - type: precision value: 95.59453227931488 - type: recall value: 96.83794466403161 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (sun_Latn-rus_Cyrl) type: mteb/flores config: sun_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.4901185770751 - type: f1 value: 91.66553983773662 - type: main_score value: 91.66553983773662 - type: precision value: 91.34530928009188 - type: recall value: 92.4901185770751 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (umb_Latn-rus_Cyrl) type: mteb/flores config: umb_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 41.00790513833992 - type: f1 value: 38.21319326004483 - type: main_score value: 38.21319326004483 - type: precision value: 37.200655467675546 - type: recall value: 41.00790513833992 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ajp_Arab-rus_Cyrl) type: mteb/flores config: ajp_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.35573122529645 - type: f1 value: 93.97233201581028 - type: main_score value: 93.97233201581028 - type: precision value: 93.33333333333333 - type: recall value: 95.35573122529645 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bjn_Arab-rus_Cyrl) type: mteb/flores config: bjn_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 3.6561264822134385 - type: f1 value: 3.1071978056336484 - type: main_score value: 3.1071978056336484 - type: precision value: 3.0039741229718215 - type: recall value: 3.6561264822134385 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ewe_Latn-rus_Cyrl) type: mteb/flores config: ewe_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 62.845849802371546 - type: f1 value: 59.82201175670472 - type: main_score value: 59.82201175670472 - type: precision value: 58.72629236362003 - type: recall value: 62.845849802371546 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ilo_Latn-rus_Cyrl) type: mteb/flores config: ilo_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 83.10276679841897 - type: f1 value: 80.75065288987582 - type: main_score value: 80.75065288987582 - type: precision value: 79.80726451662179 - type: recall value: 83.10276679841897 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (knc_Arab-rus_Cyrl) type: mteb/flores config: knc_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 10.079051383399209 - type: f1 value: 8.759282456080921 - type: main_score value: 8.759282456080921 - type: precision value: 8.474735138956142 - type: recall value: 10.079051383399209 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mkd_Cyrl-rus_Cyrl) type: mteb/flores config: mkd_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.55072463768116 - type: main_score value: 98.55072463768116 - type: precision value: 98.36956521739131 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (prs_Arab-rus_Cyrl) type: mteb/flores config: prs_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.68247694334651 - type: main_score value: 98.68247694334651 - type: precision value: 98.51778656126481 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (swe_Latn-rus_Cyrl) type: mteb/flores config: swe_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.22595520421606 - type: main_score value: 99.22595520421606 - type: precision value: 99.14361001317523 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (urd_Arab-rus_Cyrl) type: mteb/flores config: urd_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.82608695652173 - type: f1 value: 97.25625823451911 - type: main_score value: 97.25625823451911 - type: precision value: 97.03063241106719 - type: recall value: 97.82608695652173 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (aka_Latn-rus_Cyrl) type: mteb/flores config: aka_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.22529644268775 - type: f1 value: 77.94307687941227 - type: main_score value: 77.94307687941227 - type: precision value: 76.58782793293665 - type: recall value: 81.22529644268775 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bjn_Latn-rus_Cyrl) type: mteb/flores config: bjn_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 85.27667984189723 - type: f1 value: 83.6869192829922 - type: main_score value: 83.6869192829922 - type: precision value: 83.08670670691656 - type: recall value: 85.27667984189723 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (fao_Latn-rus_Cyrl) type: mteb/flores config: fao_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 80.9288537549407 - type: f1 value: 79.29806087454745 - type: main_score value: 79.29806087454745 - type: precision value: 78.71445871526987 - type: recall value: 80.9288537549407 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ind_Latn-rus_Cyrl) type: mteb/flores config: ind_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.12252964426878 - type: f1 value: 97.5296442687747 - type: main_score value: 97.5296442687747 - type: precision value: 97.23320158102767 - type: recall value: 98.12252964426878 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (knc_Latn-rus_Cyrl) type: mteb/flores config: knc_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 33.49802371541502 - type: f1 value: 32.02378215033989 - type: main_score value: 32.02378215033989 - type: precision value: 31.511356103747406 - type: recall value: 33.49802371541502 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mlt_Latn-rus_Cyrl) type: mteb/flores config: mlt_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.40316205533597 - type: f1 value: 90.35317684386006 - type: main_score value: 90.35317684386006 - type: precision value: 89.94845939633488 - type: recall value: 91.40316205533597 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (quy_Latn-rus_Cyrl) type: mteb/flores config: quy_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 40.612648221343875 - type: f1 value: 38.74337544712602 - type: main_score value: 38.74337544712602 - type: precision value: 38.133716022178575 - type: recall value: 40.612648221343875 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (swh_Latn-rus_Cyrl) type: mteb/flores config: swh_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.13438735177866 - type: f1 value: 96.47435897435898 - type: main_score value: 96.47435897435898 - type: precision value: 96.18741765480895 - type: recall value: 97.13438735177866 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (uzn_Latn-rus_Cyrl) type: mteb/flores config: uzn_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.83794466403161 - type: f1 value: 96.26355528529442 - type: main_score value: 96.26355528529442 - type: precision value: 96.0501756697409 - type: recall value: 96.83794466403161 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (als_Latn-rus_Cyrl) type: mteb/flores config: als_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.6907114624506 - type: main_score value: 98.6907114624506 - type: precision value: 98.6142480707698 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bod_Tibt-rus_Cyrl) type: mteb/flores config: bod_Tibt-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 1.0869565217391304 - type: f1 value: 0.9224649610442628 - type: main_score value: 0.9224649610442628 - type: precision value: 0.8894275740459898 - type: recall value: 1.0869565217391304 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (fij_Latn-rus_Cyrl) type: mteb/flores config: fij_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 63.24110671936759 - type: f1 value: 60.373189068189525 - type: main_score value: 60.373189068189525 - type: precision value: 59.32326368115546 - type: recall value: 63.24110671936759 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (isl_Latn-rus_Cyrl) type: mteb/flores config: isl_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.03162055335969 - type: f1 value: 87.3102634715907 - type: main_score value: 87.3102634715907 - type: precision value: 86.65991814698712 - type: recall value: 89.03162055335969 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kon_Latn-rus_Cyrl) type: mteb/flores config: kon_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 73.91304347826086 - type: f1 value: 71.518235523573 - type: main_score value: 71.518235523573 - type: precision value: 70.58714102449801 - type: recall value: 73.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mni_Beng-rus_Cyrl) type: mteb/flores config: mni_Beng-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 29.545454545454547 - type: f1 value: 27.59513619889114 - type: main_score value: 27.59513619889114 - type: precision value: 26.983849851025344 - type: recall value: 29.545454545454547 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ron_Latn-rus_Cyrl) type: mteb/flores config: ron_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.2094861660079 - type: main_score value: 99.2094861660079 - type: precision value: 99.1106719367589 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (szl_Latn-rus_Cyrl) type: mteb/flores config: szl_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 86.26482213438736 - type: f1 value: 85.18912031587512 - type: main_score value: 85.18912031587512 - type: precision value: 84.77199409959775 - type: recall value: 86.26482213438736 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (vec_Latn-rus_Cyrl) type: mteb/flores config: vec_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 85.67193675889328 - type: f1 value: 84.62529734716581 - type: main_score value: 84.62529734716581 - type: precision value: 84.2611422440705 - type: recall value: 85.67193675889328 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (amh_Ethi-rus_Cyrl) type: mteb/flores config: amh_Ethi-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.76284584980237 - type: f1 value: 93.91735076517685 - type: main_score value: 93.91735076517685 - type: precision value: 93.57553798858147 - type: recall value: 94.76284584980237 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bos_Latn-rus_Cyrl) type: mteb/flores config: bos_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 99.05655938264634 - type: main_score value: 99.05655938264634 - type: precision value: 99.01185770750988 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (fin_Latn-rus_Cyrl) type: mteb/flores config: fin_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.43741765480895 - type: main_score value: 97.43741765480895 - type: precision value: 97.1590909090909 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ita_Latn-rus_Cyrl) type: mteb/flores config: ita_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.70355731225297 - type: f1 value: 99.60474308300395 - type: main_score value: 99.60474308300395 - type: precision value: 99.55533596837944 - type: recall value: 99.70355731225297 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kor_Hang-rus_Cyrl) type: mteb/flores config: kor_Hang-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.33201581027669 - type: f1 value: 96.49868247694334 - type: main_score value: 96.49868247694334 - type: precision value: 96.10507246376811 - type: recall value: 97.33201581027669 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mos_Latn-rus_Cyrl) type: mteb/flores config: mos_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 34.683794466403164 - type: f1 value: 32.766819308009076 - type: main_score value: 32.766819308009076 - type: precision value: 32.1637493670237 - type: recall value: 34.683794466403164 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (run_Latn-rus_Cyrl) type: mteb/flores config: run_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 83.399209486166 - type: f1 value: 81.10578750604326 - type: main_score value: 81.10578750604326 - type: precision value: 80.16763162673529 - type: recall value: 83.399209486166 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tam_Taml-rus_Cyrl) type: mteb/flores config: tam_Taml-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.41897233201581 - type: f1 value: 98.01548089591567 - type: main_score value: 98.01548089591567 - type: precision value: 97.84020327498588 - type: recall value: 98.41897233201581 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (vie_Latn-rus_Cyrl) type: mteb/flores config: vie_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.1106719367589 - type: f1 value: 98.81422924901186 - type: main_score value: 98.81422924901186 - type: precision value: 98.66600790513834 - type: recall value: 99.1106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (apc_Arab-rus_Cyrl) type: mteb/flores config: apc_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.87351778656127 - type: f1 value: 92.10803689064558 - type: main_score value: 92.10803689064558 - type: precision value: 91.30434782608695 - type: recall value: 93.87351778656127 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bug_Latn-rus_Cyrl) type: mteb/flores config: bug_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 57.608695652173914 - type: f1 value: 54.95878654927162 - type: main_score value: 54.95878654927162 - type: precision value: 54.067987427805654 - type: recall value: 57.608695652173914 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (fon_Latn-rus_Cyrl) type: mteb/flores config: fon_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 61.95652173913043 - type: f1 value: 58.06537275812945 - type: main_score value: 58.06537275812945 - type: precision value: 56.554057596959204 - type: recall value: 61.95652173913043 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (jav_Latn-rus_Cyrl) type: mteb/flores config: jav_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.47826086956522 - type: f1 value: 92.4784405318002 - type: main_score value: 92.4784405318002 - type: precision value: 92.09168143201127 - type: recall value: 93.47826086956522 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lao_Laoo-rus_Cyrl) type: mteb/flores config: lao_Laoo-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.10671936758892 - type: f1 value: 89.76104922745239 - type: main_score value: 89.76104922745239 - type: precision value: 89.24754593232855 - type: recall value: 91.10671936758892 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mri_Latn-rus_Cyrl) type: mteb/flores config: mri_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 71.14624505928853 - type: f1 value: 68.26947125119062 - type: main_score value: 68.26947125119062 - type: precision value: 67.15942311051006 - type: recall value: 71.14624505928853 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ace_Arab) type: mteb/flores config: rus_Cyrl-ace_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 19.565217391304348 - type: f1 value: 16.321465000323805 - type: main_score value: 16.321465000323805 - type: precision value: 15.478527409347508 - type: recall value: 19.565217391304348 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bam_Latn) type: mteb/flores config: rus_Cyrl-bam_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 73.41897233201581 - type: f1 value: 68.77366228182746 - type: main_score value: 68.77366228182746 - type: precision value: 66.96012924273795 - type: recall value: 73.41897233201581 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-dzo_Tibt) type: mteb/flores config: rus_Cyrl-dzo_Tibt split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 0.592885375494071 - type: f1 value: 0.02458062426370458 - type: main_score value: 0.02458062426370458 - type: precision value: 0.012824114724683876 - type: recall value: 0.592885375494071 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-hin_Deva) type: mteb/flores config: rus_Cyrl-hin_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.90118577075098 - type: f1 value: 99.86824769433464 - type: main_score value: 99.86824769433464 - type: precision value: 99.85177865612648 - type: recall value: 99.90118577075098 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-khm_Khmr) type: mteb/flores config: rus_Cyrl-khm_Khmr split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.13438735177866 - type: f1 value: 96.24505928853755 - type: main_score value: 96.24505928853755 - type: precision value: 95.81686429512516 - type: recall value: 97.13438735177866 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mag_Deva) type: mteb/flores config: rus_Cyrl-mag_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.50592885375494 - type: f1 value: 99.35770750988142 - type: main_score value: 99.35770750988142 - type: precision value: 99.29183135704875 - type: recall value: 99.50592885375494 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-pap_Latn) type: mteb/flores config: rus_Cyrl-pap_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.93675889328063 - type: f1 value: 96.05072463768116 - type: main_score value: 96.05072463768116 - type: precision value: 95.66040843214758 - type: recall value: 96.93675889328063 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-sot_Latn) type: mteb/flores config: rus_Cyrl-sot_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.67588932806325 - type: f1 value: 91.7786561264822 - type: main_score value: 91.7786561264822 - type: precision value: 90.91238471673255 - type: recall value: 93.67588932806325 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tur_Latn) type: mteb/flores config: rus_Cyrl-tur_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.68247694334651 - type: main_score value: 98.68247694334651 - type: precision value: 98.51778656126481 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ace_Latn) type: mteb/flores config: rus_Cyrl-ace_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 74.1106719367589 - type: f1 value: 70.21737923911836 - type: main_score value: 70.21737923911836 - type: precision value: 68.7068791410511 - type: recall value: 74.1106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ban_Latn) type: mteb/flores config: rus_Cyrl-ban_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.7193675889328 - type: f1 value: 78.76470334510617 - type: main_score value: 78.76470334510617 - type: precision value: 77.76208475761422 - type: recall value: 81.7193675889328 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ell_Grek) type: mteb/flores config: rus_Cyrl-ell_Grek split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.3201581027668 - type: f1 value: 97.76021080368908 - type: main_score value: 97.76021080368908 - type: precision value: 97.48023715415019 - type: recall value: 98.3201581027668 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-hne_Deva) type: mteb/flores config: rus_Cyrl-hne_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.51778656126481 - type: f1 value: 98.0566534914361 - type: main_score value: 98.0566534914361 - type: precision value: 97.82608695652173 - type: recall value: 98.51778656126481 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kik_Latn) type: mteb/flores config: rus_Cyrl-kik_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 80.73122529644269 - type: f1 value: 76.42689244220864 - type: main_score value: 76.42689244220864 - type: precision value: 74.63877909530083 - type: recall value: 80.73122529644269 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mai_Deva) type: mteb/flores config: rus_Cyrl-mai_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.56719367588933 - type: main_score value: 98.56719367588933 - type: precision value: 98.40250329380763 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-pbt_Arab) type: mteb/flores config: rus_Cyrl-pbt_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.5296442687747 - type: f1 value: 96.73913043478261 - type: main_score value: 96.73913043478261 - type: precision value: 96.36034255599473 - type: recall value: 97.5296442687747 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-spa_Latn) type: mteb/flores config: rus_Cyrl-spa_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.20948616600789 - type: main_score value: 99.20948616600789 - type: precision value: 99.1106719367589 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-twi_Latn) type: mteb/flores config: rus_Cyrl-twi_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 82.01581027667984 - type: f1 value: 78.064787822953 - type: main_score value: 78.064787822953 - type: precision value: 76.43272186750448 - type: recall value: 82.01581027667984 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-acm_Arab) type: mteb/flores config: rus_Cyrl-acm_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.3201581027668 - type: f1 value: 97.76021080368908 - type: main_score value: 97.76021080368908 - type: precision value: 97.48023715415019 - type: recall value: 98.3201581027668 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bel_Cyrl) type: mteb/flores config: rus_Cyrl-bel_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.22134387351778 - type: f1 value: 97.67786561264822 - type: main_score value: 97.67786561264822 - type: precision value: 97.4308300395257 - type: recall value: 98.22134387351778 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-eng_Latn) type: mteb/flores config: rus_Cyrl-eng_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.70355731225297 - type: f1 value: 99.60474308300395 - type: main_score value: 99.60474308300395 - type: precision value: 99.55533596837944 - type: recall value: 99.70355731225297 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-hrv_Latn) type: mteb/flores config: rus_Cyrl-hrv_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.1106719367589 - type: f1 value: 98.83069828722002 - type: main_score value: 98.83069828722002 - type: precision value: 98.69894598155466 - type: recall value: 99.1106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kin_Latn) type: mteb/flores config: rus_Cyrl-kin_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.37944664031622 - type: f1 value: 91.53162055335969 - type: main_score value: 91.53162055335969 - type: precision value: 90.71475625823452 - type: recall value: 93.37944664031622 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mal_Mlym) type: mteb/flores config: rus_Cyrl-mal_Mlym split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.07773386034255 - type: main_score value: 99.07773386034255 - type: precision value: 98.96245059288538 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-pes_Arab) type: mteb/flores config: rus_Cyrl-pes_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.30368906455863 - type: main_score value: 98.30368906455863 - type: precision value: 98.10606060606061 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-srd_Latn) type: mteb/flores config: rus_Cyrl-srd_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.03162055335969 - type: f1 value: 86.11048371917937 - type: main_score value: 86.11048371917937 - type: precision value: 84.86001317523056 - type: recall value: 89.03162055335969 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tzm_Tfng) type: mteb/flores config: rus_Cyrl-tzm_Tfng split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 12.351778656126482 - type: f1 value: 10.112177999067715 - type: main_score value: 10.112177999067715 - type: precision value: 9.53495885438645 - type: recall value: 12.351778656126482 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-acq_Arab) type: mteb/flores config: rus_Cyrl-acq_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.55072463768116 - type: main_score value: 98.55072463768116 - type: precision value: 98.36956521739131 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bem_Latn) type: mteb/flores config: rus_Cyrl-bem_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 73.22134387351778 - type: f1 value: 68.30479412989295 - type: main_score value: 68.30479412989295 - type: precision value: 66.40073447632736 - type: recall value: 73.22134387351778 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-epo_Latn) type: mteb/flores config: rus_Cyrl-epo_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.1106719367589 - type: f1 value: 98.81422924901186 - type: main_score value: 98.81422924901186 - type: precision value: 98.66600790513834 - type: recall value: 99.1106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-hun_Latn) type: mteb/flores config: rus_Cyrl-hun_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.83794466403161 - type: f1 value: 95.88274044795784 - type: main_score value: 95.88274044795784 - type: precision value: 95.45454545454545 - type: recall value: 96.83794466403161 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kir_Cyrl) type: mteb/flores config: rus_Cyrl-kir_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.34387351778656 - type: f1 value: 95.49280429715212 - type: main_score value: 95.49280429715212 - type: precision value: 95.14163372859026 - type: recall value: 96.34387351778656 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mar_Deva) type: mteb/flores config: rus_Cyrl-mar_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.28722002635047 - type: main_score value: 98.28722002635047 - type: precision value: 98.07312252964427 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-plt_Latn) type: mteb/flores config: rus_Cyrl-plt_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 88.04347826086956 - type: f1 value: 85.14328063241106 - type: main_score value: 85.14328063241106 - type: precision value: 83.96339168078298 - type: recall value: 88.04347826086956 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-srp_Cyrl) type: mteb/flores config: rus_Cyrl-srp_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.2094861660079 - type: main_score value: 99.2094861660079 - type: precision value: 99.1106719367589 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-uig_Arab) type: mteb/flores config: rus_Cyrl-uig_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.19367588932806 - type: f1 value: 89.98541313758706 - type: main_score value: 89.98541313758706 - type: precision value: 89.01021080368906 - type: recall value: 92.19367588932806 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-aeb_Arab) type: mteb/flores config: rus_Cyrl-aeb_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.8498023715415 - type: f1 value: 94.63109354413703 - type: main_score value: 94.63109354413703 - type: precision value: 94.05467720685111 - type: recall value: 95.8498023715415 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ben_Beng) type: mteb/flores config: rus_Cyrl-ben_Beng split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.2094861660079 - type: main_score value: 99.2094861660079 - type: precision value: 99.1106719367589 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-est_Latn) type: mteb/flores config: rus_Cyrl-est_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.55335968379447 - type: f1 value: 94.2588932806324 - type: main_score value: 94.2588932806324 - type: precision value: 93.65118577075098 - type: recall value: 95.55335968379447 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-hye_Armn) type: mteb/flores config: rus_Cyrl-hye_Armn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.28722002635045 - type: main_score value: 98.28722002635045 - type: precision value: 98.07312252964427 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kmb_Latn) type: mteb/flores config: rus_Cyrl-kmb_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 54.24901185770751 - type: f1 value: 49.46146674116913 - type: main_score value: 49.46146674116913 - type: precision value: 47.81033799314432 - type: recall value: 54.24901185770751 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-min_Arab) type: mteb/flores config: rus_Cyrl-min_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 15.810276679841898 - type: f1 value: 13.271207641419332 - type: main_score value: 13.271207641419332 - type: precision value: 12.510673148766033 - type: recall value: 15.810276679841898 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-pol_Latn) type: mteb/flores config: rus_Cyrl-pol_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.32674571805006 - type: main_score value: 98.32674571805006 - type: precision value: 98.14723320158103 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ssw_Latn) type: mteb/flores config: rus_Cyrl-ssw_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 80.8300395256917 - type: f1 value: 76.51717847370023 - type: main_score value: 76.51717847370023 - type: precision value: 74.74143610013175 - type: recall value: 80.8300395256917 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ukr_Cyrl) type: mteb/flores config: rus_Cyrl-ukr_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.60474308300395 - type: f1 value: 99.4729907773386 - type: main_score value: 99.4729907773386 - type: precision value: 99.40711462450594 - type: recall value: 99.60474308300395 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-afr_Latn) type: mteb/flores config: rus_Cyrl-afr_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.1106719367589 - type: f1 value: 98.81422924901186 - type: main_score value: 98.81422924901186 - type: precision value: 98.66600790513834 - type: recall value: 99.1106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bho_Deva) type: mteb/flores config: rus_Cyrl-bho_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.6403162055336 - type: f1 value: 95.56982872200265 - type: main_score value: 95.56982872200265 - type: precision value: 95.0592885375494 - type: recall value: 96.6403162055336 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-eus_Latn) type: mteb/flores config: rus_Cyrl-eus_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.62845849802372 - type: f1 value: 96.9038208168643 - type: main_score value: 96.9038208168643 - type: precision value: 96.55797101449275 - type: recall value: 97.62845849802372 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ibo_Latn) type: mteb/flores config: rus_Cyrl-ibo_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.2292490118577 - type: f1 value: 86.35234330886506 - type: main_score value: 86.35234330886506 - type: precision value: 85.09881422924902 - type: recall value: 89.2292490118577 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kmr_Latn) type: mteb/flores config: rus_Cyrl-kmr_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 83.49802371541502 - type: f1 value: 79.23630717108978 - type: main_score value: 79.23630717108978 - type: precision value: 77.48188405797102 - type: recall value: 83.49802371541502 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-min_Latn) type: mteb/flores config: rus_Cyrl-min_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 79.34782608695652 - type: f1 value: 75.31689928429059 - type: main_score value: 75.31689928429059 - type: precision value: 73.91519410541149 - type: recall value: 79.34782608695652 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-por_Latn) type: mteb/flores config: rus_Cyrl-por_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.54150197628458 - type: f1 value: 95.53218520609825 - type: main_score value: 95.53218520609825 - type: precision value: 95.07575757575756 - type: recall value: 96.54150197628458 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-sun_Latn) type: mteb/flores config: rus_Cyrl-sun_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.2806324110672 - type: f1 value: 91.56973461321287 - type: main_score value: 91.56973461321287 - type: precision value: 90.84396334890405 - type: recall value: 93.2806324110672 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-umb_Latn) type: mteb/flores config: rus_Cyrl-umb_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 51.87747035573123 - type: f1 value: 46.36591778884269 - type: main_score value: 46.36591778884269 - type: precision value: 44.57730391234227 - type: recall value: 51.87747035573123 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ajp_Arab) type: mteb/flores config: rus_Cyrl-ajp_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.30368906455863 - type: main_score value: 98.30368906455863 - type: precision value: 98.10606060606061 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bjn_Arab) type: mteb/flores config: rus_Cyrl-bjn_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 14.82213438735178 - type: f1 value: 12.365434276616856 - type: main_score value: 12.365434276616856 - type: precision value: 11.802079517180589 - type: recall value: 14.82213438735178 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ewe_Latn) type: mteb/flores config: rus_Cyrl-ewe_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 71.44268774703558 - type: f1 value: 66.74603174603175 - type: main_score value: 66.74603174603175 - type: precision value: 64.99933339607253 - type: recall value: 71.44268774703558 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ilo_Latn) type: mteb/flores config: rus_Cyrl-ilo_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 85.86956521739131 - type: f1 value: 83.00139015960917 - type: main_score value: 83.00139015960917 - type: precision value: 81.91411396574439 - type: recall value: 85.86956521739131 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-knc_Arab) type: mteb/flores config: rus_Cyrl-knc_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 14.525691699604742 - type: f1 value: 12.618283715726806 - type: main_score value: 12.618283715726806 - type: precision value: 12.048458493742352 - type: recall value: 14.525691699604742 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mkd_Cyrl) type: mteb/flores config: rus_Cyrl-mkd_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.22595520421606 - type: main_score value: 99.22595520421606 - type: precision value: 99.14361001317523 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-prs_Arab) type: mteb/flores config: rus_Cyrl-prs_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.07773386034255 - type: main_score value: 99.07773386034255 - type: precision value: 98.96245059288538 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-swe_Latn) type: mteb/flores config: rus_Cyrl-swe_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.07773386034256 - type: main_score value: 99.07773386034256 - type: precision value: 98.96245059288538 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-urd_Arab) type: mteb/flores config: rus_Cyrl-urd_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.61660079051383 - type: f1 value: 98.15546772068511 - type: main_score value: 98.15546772068511 - type: precision value: 97.92490118577075 - type: recall value: 98.61660079051383 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-aka_Latn) type: mteb/flores config: rus_Cyrl-aka_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.02766798418972 - type: f1 value: 76.73277809147375 - type: main_score value: 76.73277809147375 - type: precision value: 74.97404165882426 - type: recall value: 81.02766798418972 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bjn_Latn) type: mteb/flores config: rus_Cyrl-bjn_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 86.7588932806324 - type: f1 value: 83.92064566965753 - type: main_score value: 83.92064566965753 - type: precision value: 82.83734079929732 - type: recall value: 86.7588932806324 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-fao_Latn) type: mteb/flores config: rus_Cyrl-fao_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 88.43873517786561 - type: f1 value: 85.48136645962732 - type: main_score value: 85.48136645962732 - type: precision value: 84.23418972332016 - type: recall value: 88.43873517786561 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ind_Latn) type: mteb/flores config: rus_Cyrl-ind_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.68247694334651 - type: main_score value: 98.68247694334651 - type: precision value: 98.51778656126481 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-knc_Latn) type: mteb/flores config: rus_Cyrl-knc_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 45.8498023715415 - type: f1 value: 40.112030865489366 - type: main_score value: 40.112030865489366 - type: precision value: 38.28262440050776 - type: recall value: 45.8498023715415 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mlt_Latn) type: mteb/flores config: rus_Cyrl-mlt_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.18181818181817 - type: f1 value: 91.30787690570298 - type: main_score value: 91.30787690570298 - type: precision value: 90.4983060417843 - type: recall value: 93.18181818181817 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-quy_Latn) type: mteb/flores config: rus_Cyrl-quy_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 62.450592885375485 - type: f1 value: 57.28742975628178 - type: main_score value: 57.28742975628178 - type: precision value: 55.56854987623269 - type: recall value: 62.450592885375485 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-swh_Latn) type: mteb/flores config: rus_Cyrl-swh_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.3201581027668 - type: f1 value: 97.77667984189723 - type: main_score value: 97.77667984189723 - type: precision value: 97.51317523056655 - type: recall value: 98.3201581027668 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-uzn_Latn) type: mteb/flores config: rus_Cyrl-uzn_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.12252964426878 - type: f1 value: 97.59081498211933 - type: main_score value: 97.59081498211933 - type: precision value: 97.34848484848484 - type: recall value: 98.12252964426878 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-als_Latn) type: mteb/flores config: rus_Cyrl-als_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.09420289855073 - type: main_score value: 99.09420289855073 - type: precision value: 98.99538866930172 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bod_Tibt) type: mteb/flores config: rus_Cyrl-bod_Tibt split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 11.561264822134387 - type: f1 value: 8.121312045385636 - type: main_score value: 8.121312045385636 - type: precision value: 7.350577020893972 - type: recall value: 11.561264822134387 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-fij_Latn) type: mteb/flores config: rus_Cyrl-fij_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 72.23320158102767 - type: f1 value: 67.21000233846082 - type: main_score value: 67.21000233846082 - type: precision value: 65.3869439739005 - type: recall value: 72.23320158102767 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-isl_Latn) type: mteb/flores config: rus_Cyrl-isl_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.99604743083005 - type: f1 value: 89.75955204216073 - type: main_score value: 89.75955204216073 - type: precision value: 88.7598814229249 - type: recall value: 91.99604743083005 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kon_Latn) type: mteb/flores config: rus_Cyrl-kon_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.81818181818183 - type: f1 value: 77.77800098452272 - type: main_score value: 77.77800098452272 - type: precision value: 76.1521268586486 - type: recall value: 81.81818181818183 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mni_Beng) type: mteb/flores config: rus_Cyrl-mni_Beng split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 54.74308300395256 - type: f1 value: 48.97285299254615 - type: main_score value: 48.97285299254615 - type: precision value: 46.95125742968299 - type: recall value: 54.74308300395256 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ron_Latn) type: mteb/flores config: rus_Cyrl-ron_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.22134387351778 - type: f1 value: 97.64492753623189 - type: main_score value: 97.64492753623189 - type: precision value: 97.36495388669302 - type: recall value: 98.22134387351778 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-szl_Latn) type: mteb/flores config: rus_Cyrl-szl_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.09486166007905 - type: f1 value: 90.10375494071147 - type: main_score value: 90.10375494071147 - type: precision value: 89.29606625258798 - type: recall value: 92.09486166007905 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-vec_Latn) type: mteb/flores config: rus_Cyrl-vec_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.4901185770751 - type: f1 value: 90.51430453604365 - type: main_score value: 90.51430453604365 - type: precision value: 89.69367588932808 - type: recall value: 92.4901185770751 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-amh_Ethi) type: mteb/flores config: rus_Cyrl-amh_Ethi split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.82608695652173 - type: f1 value: 97.11791831357048 - type: main_score value: 97.11791831357048 - type: precision value: 96.77206851119894 - type: recall value: 97.82608695652173 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bos_Latn) type: mteb/flores config: rus_Cyrl-bos_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.55072463768116 - type: main_score value: 98.55072463768116 - type: precision value: 98.36956521739131 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-fin_Latn) type: mteb/flores config: rus_Cyrl-fin_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.65217391304348 - type: f1 value: 94.4235836627141 - type: main_score value: 94.4235836627141 - type: precision value: 93.84881422924902 - type: recall value: 95.65217391304348 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ita_Latn) type: mteb/flores config: rus_Cyrl-ita_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.55072463768117 - type: main_score value: 98.55072463768117 - type: precision value: 98.36956521739131 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kor_Hang) type: mteb/flores config: rus_Cyrl-kor_Hang split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.55335968379447 - type: f1 value: 94.15349143610013 - type: main_score value: 94.15349143610013 - type: precision value: 93.49472990777339 - type: recall value: 95.55335968379447 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mos_Latn) type: mteb/flores config: rus_Cyrl-mos_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 43.67588932806324 - type: f1 value: 38.84849721190082 - type: main_score value: 38.84849721190082 - type: precision value: 37.43294462099682 - type: recall value: 43.67588932806324 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-run_Latn) type: mteb/flores config: rus_Cyrl-run_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 90.21739130434783 - type: f1 value: 87.37483530961792 - type: main_score value: 87.37483530961792 - type: precision value: 86.07872200263506 - type: recall value: 90.21739130434783 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tam_Taml) type: mteb/flores config: rus_Cyrl-tam_Taml split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.2094861660079 - type: main_score value: 99.2094861660079 - type: precision value: 99.1106719367589 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-vie_Latn) type: mteb/flores config: rus_Cyrl-vie_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.03557312252964 - type: f1 value: 96.13636363636364 - type: main_score value: 96.13636363636364 - type: precision value: 95.70981554677206 - type: recall value: 97.03557312252964 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-apc_Arab) type: mteb/flores config: rus_Cyrl-apc_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.12252964426878 - type: f1 value: 97.49670619235836 - type: main_score value: 97.49670619235836 - type: precision value: 97.18379446640316 - type: recall value: 98.12252964426878 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bug_Latn) type: mteb/flores config: rus_Cyrl-bug_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 67.29249011857708 - type: f1 value: 62.09268717667927 - type: main_score value: 62.09268717667927 - type: precision value: 60.28554009748714 - type: recall value: 67.29249011857708 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-fon_Latn) type: mteb/flores config: rus_Cyrl-fon_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 63.43873517786561 - type: f1 value: 57.66660107569199 - type: main_score value: 57.66660107569199 - type: precision value: 55.66676396919363 - type: recall value: 63.43873517786561 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-jav_Latn) type: mteb/flores config: rus_Cyrl-jav_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.46640316205533 - type: f1 value: 92.89384528514964 - type: main_score value: 92.89384528514964 - type: precision value: 92.19367588932806 - type: recall value: 94.46640316205533 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lao_Laoo) type: mteb/flores config: rus_Cyrl-lao_Laoo split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.23320158102767 - type: f1 value: 96.40974967061922 - type: main_score value: 96.40974967061922 - type: precision value: 96.034255599473 - type: recall value: 97.23320158102767 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mri_Latn) type: mteb/flores config: rus_Cyrl-mri_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 76.77865612648222 - type: f1 value: 73.11286539547409 - type: main_score value: 73.11286539547409 - type: precision value: 71.78177214337046 - type: recall value: 76.77865612648222 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-taq_Latn) type: mteb/flores config: rus_Cyrl-taq_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 41.99604743083004 - type: f1 value: 37.25127063318763 - type: main_score value: 37.25127063318763 - type: precision value: 35.718929186985726 - type: recall value: 41.99604743083004 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-war_Latn) type: mteb/flores config: rus_Cyrl-war_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.55335968379447 - type: f1 value: 94.1699604743083 - type: main_score value: 94.1699604743083 - type: precision value: 93.52766798418972 - type: recall value: 95.55335968379447 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-arb_Arab) type: mteb/flores config: rus_Cyrl-arb_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.60474308300395 - type: f1 value: 99.4729907773386 - type: main_score value: 99.4729907773386 - type: precision value: 99.40711462450594 - type: recall value: 99.60474308300395 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bul_Cyrl) type: mteb/flores config: rus_Cyrl-bul_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.70355731225297 - type: f1 value: 99.60474308300395 - type: main_score value: 99.60474308300395 - type: precision value: 99.55533596837944 - type: recall value: 99.70355731225297 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-fra_Latn) type: mteb/flores config: rus_Cyrl-fra_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.60474308300395 - type: f1 value: 99.47299077733861 - type: main_score value: 99.47299077733861 - type: precision value: 99.40711462450594 - type: recall value: 99.60474308300395 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-jpn_Jpan) type: mteb/flores config: rus_Cyrl-jpn_Jpan split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.44268774703558 - type: f1 value: 95.30632411067194 - type: main_score value: 95.30632411067194 - type: precision value: 94.76284584980237 - type: recall value: 96.44268774703558 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lij_Latn) type: mteb/flores config: rus_Cyrl-lij_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 90.21739130434783 - type: f1 value: 87.4703557312253 - type: main_score value: 87.4703557312253 - type: precision value: 86.29611330698287 - type: recall value: 90.21739130434783 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-mya_Mymr) type: mteb/flores config: rus_Cyrl-mya_Mymr split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.364953886693 - type: main_score value: 97.364953886693 - type: precision value: 97.03557312252964 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-sag_Latn) type: mteb/flores config: rus_Cyrl-sag_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 54.841897233201585 - type: f1 value: 49.61882037503349 - type: main_score value: 49.61882037503349 - type: precision value: 47.831968755881796 - type: recall value: 54.841897233201585 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-taq_Tfng) type: mteb/flores config: rus_Cyrl-taq_Tfng split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 15.316205533596838 - type: f1 value: 11.614836360389717 - type: main_score value: 11.614836360389717 - type: precision value: 10.741446193235223 - type: recall value: 15.316205533596838 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-wol_Latn) type: mteb/flores config: rus_Cyrl-wol_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 67.88537549407114 - type: f1 value: 62.2536417249856 - type: main_score value: 62.2536417249856 - type: precision value: 60.27629128666678 - type: recall value: 67.88537549407114 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-arb_Latn) type: mteb/flores config: rus_Cyrl-arb_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 27.766798418972332 - type: f1 value: 23.39674889624077 - type: main_score value: 23.39674889624077 - type: precision value: 22.28521155585345 - type: recall value: 27.766798418972332 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-cat_Latn) type: mteb/flores config: rus_Cyrl-cat_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.23320158102767 - type: f1 value: 96.42151326933936 - type: main_score value: 96.42151326933936 - type: precision value: 96.04743083003953 - type: recall value: 97.23320158102767 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-fur_Latn) type: mteb/flores config: rus_Cyrl-fur_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 88.63636363636364 - type: f1 value: 85.80792396009788 - type: main_score value: 85.80792396009788 - type: precision value: 84.61508901726293 - type: recall value: 88.63636363636364 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kab_Latn) type: mteb/flores config: rus_Cyrl-kab_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 48.12252964426877 - type: f1 value: 43.05387582971066 - type: main_score value: 43.05387582971066 - type: precision value: 41.44165117538212 - type: recall value: 48.12252964426877 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lim_Latn) type: mteb/flores config: rus_Cyrl-lim_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.81818181818183 - type: f1 value: 77.81676163099087 - type: main_score value: 77.81676163099087 - type: precision value: 76.19565217391305 - type: recall value: 81.81818181818183 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-nld_Latn) type: mteb/flores config: rus_Cyrl-nld_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.33201581027669 - type: f1 value: 96.4756258234519 - type: main_score value: 96.4756258234519 - type: precision value: 96.06389986824769 - type: recall value: 97.33201581027669 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-san_Deva) type: mteb/flores config: rus_Cyrl-san_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.47826086956522 - type: f1 value: 91.70289855072463 - type: main_score value: 91.70289855072463 - type: precision value: 90.9370882740448 - type: recall value: 93.47826086956522 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tat_Cyrl) type: mteb/flores config: rus_Cyrl-tat_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.72727272727273 - type: f1 value: 97.00263504611331 - type: main_score value: 97.00263504611331 - type: precision value: 96.65678524374177 - type: recall value: 97.72727272727273 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-xho_Latn) type: mteb/flores config: rus_Cyrl-xho_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.08300395256917 - type: f1 value: 91.12977602108036 - type: main_score value: 91.12977602108036 - type: precision value: 90.22562582345192 - type: recall value: 93.08300395256917 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ars_Arab) type: mteb/flores config: rus_Cyrl-ars_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.40711462450594 - type: f1 value: 99.2094861660079 - type: main_score value: 99.2094861660079 - type: precision value: 99.1106719367589 - type: recall value: 99.40711462450594 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ceb_Latn) type: mteb/flores config: rus_Cyrl-ceb_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.65217391304348 - type: f1 value: 94.3544137022398 - type: main_score value: 94.3544137022398 - type: precision value: 93.76646903820817 - type: recall value: 95.65217391304348 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-fuv_Latn) type: mteb/flores config: rus_Cyrl-fuv_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 51.18577075098815 - type: f1 value: 44.5990252610806 - type: main_score value: 44.5990252610806 - type: precision value: 42.34331599450177 - type: recall value: 51.18577075098815 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kac_Latn) type: mteb/flores config: rus_Cyrl-kac_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 46.93675889328063 - type: f1 value: 41.79004018701787 - type: main_score value: 41.79004018701787 - type: precision value: 40.243355662392624 - type: recall value: 46.93675889328063 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lin_Latn) type: mteb/flores config: rus_Cyrl-lin_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.50197628458498 - type: f1 value: 89.1205533596838 - type: main_score value: 89.1205533596838 - type: precision value: 88.07147562582345 - type: recall value: 91.50197628458498 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-nno_Latn) type: mteb/flores config: rus_Cyrl-nno_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.81422924901186 - type: f1 value: 98.41897233201581 - type: main_score value: 98.41897233201581 - type: precision value: 98.22134387351778 - type: recall value: 98.81422924901186 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-sat_Olck) type: mteb/flores config: rus_Cyrl-sat_Olck split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 2.371541501976284 - type: f1 value: 1.0726274943087382 - type: main_score value: 1.0726274943087382 - type: precision value: 0.875279634748803 - type: recall value: 2.371541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tel_Telu) type: mteb/flores config: rus_Cyrl-tel_Telu split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.68247694334651 - type: main_score value: 98.68247694334651 - type: precision value: 98.51778656126481 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ydd_Hebr) type: mteb/flores config: rus_Cyrl-ydd_Hebr split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.42687747035573 - type: f1 value: 86.47609636740073 - type: main_score value: 86.47609636740073 - type: precision value: 85.13669301712781 - type: recall value: 89.42687747035573 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ary_Arab) type: mteb/flores config: rus_Cyrl-ary_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.82213438735178 - type: f1 value: 87.04545454545456 - type: main_score value: 87.04545454545456 - type: precision value: 85.76910408432148 - type: recall value: 89.82213438735178 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ces_Latn) type: mteb/flores config: rus_Cyrl-ces_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.9459815546772 - type: main_score value: 98.9459815546772 - type: precision value: 98.81422924901186 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-gaz_Latn) type: mteb/flores config: rus_Cyrl-gaz_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 64.9209486166008 - type: f1 value: 58.697458119394874 - type: main_score value: 58.697458119394874 - type: precision value: 56.43402189597842 - type: recall value: 64.9209486166008 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kam_Latn) type: mteb/flores config: rus_Cyrl-kam_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 59.18972332015811 - type: f1 value: 53.19031511966295 - type: main_score value: 53.19031511966295 - type: precision value: 51.08128357343655 - type: recall value: 59.18972332015811 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lit_Latn) type: mteb/flores config: rus_Cyrl-lit_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.54150197628458 - type: f1 value: 95.5368906455863 - type: main_score value: 95.5368906455863 - type: precision value: 95.0592885375494 - type: recall value: 96.54150197628458 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-nob_Latn) type: mteb/flores config: rus_Cyrl-nob_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.12252964426878 - type: f1 value: 97.51317523056655 - type: main_score value: 97.51317523056655 - type: precision value: 97.2167325428195 - type: recall value: 98.12252964426878 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-scn_Latn) type: mteb/flores config: rus_Cyrl-scn_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 84.0909090909091 - type: f1 value: 80.37000439174352 - type: main_score value: 80.37000439174352 - type: precision value: 78.83994628559846 - type: recall value: 84.0909090909091 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tgk_Cyrl) type: mteb/flores config: rus_Cyrl-tgk_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.68774703557312 - type: f1 value: 90.86344814605684 - type: main_score value: 90.86344814605684 - type: precision value: 90.12516469038208 - type: recall value: 92.68774703557312 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-yor_Latn) type: mteb/flores config: rus_Cyrl-yor_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 72.13438735177866 - type: f1 value: 66.78759646150951 - type: main_score value: 66.78759646150951 - type: precision value: 64.85080192096002 - type: recall value: 72.13438735177866 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-arz_Arab) type: mteb/flores config: rus_Cyrl-arz_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.364953886693 - type: main_score value: 97.364953886693 - type: precision value: 97.03557312252964 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-cjk_Latn) type: mteb/flores config: rus_Cyrl-cjk_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 51.976284584980235 - type: f1 value: 46.468762353149714 - type: main_score value: 46.468762353149714 - type: precision value: 44.64073366247278 - type: recall value: 51.976284584980235 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-gla_Latn) type: mteb/flores config: rus_Cyrl-gla_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 79.74308300395256 - type: f1 value: 75.55611165294958 - type: main_score value: 75.55611165294958 - type: precision value: 73.95033408620365 - type: recall value: 79.74308300395256 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kan_Knda) type: mteb/flores config: rus_Cyrl-kan_Knda split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.96245059288538 - type: main_score value: 98.96245059288538 - type: precision value: 98.84716732542819 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lmo_Latn) type: mteb/flores config: rus_Cyrl-lmo_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 82.41106719367589 - type: f1 value: 78.56413514022209 - type: main_score value: 78.56413514022209 - type: precision value: 77.15313068573938 - type: recall value: 82.41106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-npi_Deva) type: mteb/flores config: rus_Cyrl-npi_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.3201581027668 - type: main_score value: 98.3201581027668 - type: precision value: 98.12252964426878 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-shn_Mymr) type: mteb/flores config: rus_Cyrl-shn_Mymr split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 57.11462450592886 - type: f1 value: 51.51361369197337 - type: main_score value: 51.51361369197337 - type: precision value: 49.71860043649573 - type: recall value: 57.11462450592886 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tgl_Latn) type: mteb/flores config: rus_Cyrl-tgl_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.82608695652173 - type: f1 value: 97.18379446640316 - type: main_score value: 97.18379446640316 - type: precision value: 96.88735177865613 - type: recall value: 97.82608695652173 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-yue_Hant) type: mteb/flores config: rus_Cyrl-yue_Hant split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.09420289855072 - type: main_score value: 99.09420289855072 - type: precision value: 98.9953886693017 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-asm_Beng) type: mteb/flores config: rus_Cyrl-asm_Beng split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.55335968379447 - type: f1 value: 94.16007905138339 - type: main_score value: 94.16007905138339 - type: precision value: 93.50296442687747 - type: recall value: 95.55335968379447 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ckb_Arab) type: mteb/flores config: rus_Cyrl-ckb_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.88537549407114 - type: f1 value: 90.76745718050066 - type: main_score value: 90.76745718050066 - type: precision value: 89.80072463768116 - type: recall value: 92.88537549407114 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-gle_Latn) type: mteb/flores config: rus_Cyrl-gle_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.699604743083 - type: f1 value: 89.40899680030115 - type: main_score value: 89.40899680030115 - type: precision value: 88.40085638998683 - type: recall value: 91.699604743083 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kas_Arab) type: mteb/flores config: rus_Cyrl-kas_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 88.3399209486166 - type: f1 value: 85.14351590438548 - type: main_score value: 85.14351590438548 - type: precision value: 83.72364953886692 - type: recall value: 88.3399209486166 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ltg_Latn) type: mteb/flores config: rus_Cyrl-ltg_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 83.399209486166 - type: f1 value: 79.88408934061107 - type: main_score value: 79.88408934061107 - type: precision value: 78.53794509179885 - type: recall value: 83.399209486166 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-nso_Latn) type: mteb/flores config: rus_Cyrl-nso_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.20553359683794 - type: f1 value: 88.95406635525212 - type: main_score value: 88.95406635525212 - type: precision value: 88.01548089591567 - type: recall value: 91.20553359683794 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-sin_Sinh) type: mteb/flores config: rus_Cyrl-sin_Sinh split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.56719367588933 - type: main_score value: 98.56719367588933 - type: precision value: 98.40250329380763 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tha_Thai) type: mteb/flores config: rus_Cyrl-tha_Thai split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.94861660079052 - type: f1 value: 94.66403162055336 - type: main_score value: 94.66403162055336 - type: precision value: 94.03820816864295 - type: recall value: 95.94861660079052 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-zho_Hans) type: mteb/flores config: rus_Cyrl-zho_Hans split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.4308300395257 - type: f1 value: 96.5909090909091 - type: main_score value: 96.5909090909091 - type: precision value: 96.17918313570487 - type: recall value: 97.4308300395257 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ast_Latn) type: mteb/flores config: rus_Cyrl-ast_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.46640316205533 - type: f1 value: 92.86890645586297 - type: main_score value: 92.86890645586297 - type: precision value: 92.14756258234519 - type: recall value: 94.46640316205533 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-crh_Latn) type: mteb/flores config: rus_Cyrl-crh_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.66403162055336 - type: f1 value: 93.2663592446201 - type: main_score value: 93.2663592446201 - type: precision value: 92.66716073781292 - type: recall value: 94.66403162055336 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-glg_Latn) type: mteb/flores config: rus_Cyrl-glg_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.81422924901186 - type: f1 value: 98.46837944664031 - type: main_score value: 98.46837944664031 - type: precision value: 98.3201581027668 - type: recall value: 98.81422924901186 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kas_Deva) type: mteb/flores config: rus_Cyrl-kas_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 69.1699604743083 - type: f1 value: 63.05505292906477 - type: main_score value: 63.05505292906477 - type: precision value: 60.62594108789761 - type: recall value: 69.1699604743083 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ltz_Latn) type: mteb/flores config: rus_Cyrl-ltz_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.40316205533597 - type: f1 value: 89.26571616789009 - type: main_score value: 89.26571616789009 - type: precision value: 88.40179747788443 - type: recall value: 91.40316205533597 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-nus_Latn) type: mteb/flores config: rus_Cyrl-nus_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 38.93280632411067 - type: f1 value: 33.98513032905371 - type: main_score value: 33.98513032905371 - type: precision value: 32.56257884802308 - type: recall value: 38.93280632411067 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-slk_Latn) type: mteb/flores config: rus_Cyrl-slk_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.42094861660078 - type: main_score value: 97.42094861660078 - type: precision value: 97.14262187088273 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tir_Ethi) type: mteb/flores config: rus_Cyrl-tir_Ethi split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.30434782608695 - type: f1 value: 88.78129117259552 - type: main_score value: 88.78129117259552 - type: precision value: 87.61528326745717 - type: recall value: 91.30434782608695 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-zho_Hant) type: mteb/flores config: rus_Cyrl-zho_Hant split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.1106719367589 - type: f1 value: 98.81422924901186 - type: main_score value: 98.81422924901186 - type: precision value: 98.66600790513834 - type: recall value: 99.1106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-awa_Deva) type: mteb/flores config: rus_Cyrl-awa_Deva split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.12252964426878 - type: f1 value: 97.70092226613966 - type: main_score value: 97.70092226613966 - type: precision value: 97.50494071146245 - type: recall value: 98.12252964426878 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-cym_Latn) type: mteb/flores config: rus_Cyrl-cym_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.94861660079052 - type: f1 value: 94.74308300395256 - type: main_score value: 94.74308300395256 - type: precision value: 94.20289855072464 - type: recall value: 95.94861660079052 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-grn_Latn) type: mteb/flores config: rus_Cyrl-grn_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 77.96442687747036 - type: f1 value: 73.64286789187975 - type: main_score value: 73.64286789187975 - type: precision value: 71.99324893260821 - type: recall value: 77.96442687747036 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kat_Geor) type: mteb/flores config: rus_Cyrl-kat_Geor split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.56719367588933 - type: main_score value: 98.56719367588933 - type: precision value: 98.40250329380764 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lua_Latn) type: mteb/flores config: rus_Cyrl-lua_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 72.03557312252964 - type: f1 value: 67.23928163404449 - type: main_score value: 67.23928163404449 - type: precision value: 65.30797101449275 - type: recall value: 72.03557312252964 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-nya_Latn) type: mteb/flores config: rus_Cyrl-nya_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.29249011857708 - type: f1 value: 90.0494071146245 - type: main_score value: 90.0494071146245 - type: precision value: 89.04808959156786 - type: recall value: 92.29249011857708 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-slv_Latn) type: mteb/flores config: rus_Cyrl-slv_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.30368906455863 - type: main_score value: 98.30368906455863 - type: precision value: 98.10606060606061 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tpi_Latn) type: mteb/flores config: rus_Cyrl-tpi_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 80.53359683794467 - type: f1 value: 76.59481822525301 - type: main_score value: 76.59481822525301 - type: precision value: 75.12913223140497 - type: recall value: 80.53359683794467 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-zsm_Latn) type: mteb/flores config: rus_Cyrl-zsm_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.33201581027669 - type: f1 value: 96.58620365142104 - type: main_score value: 96.58620365142104 - type: precision value: 96.26152832674572 - type: recall value: 97.33201581027669 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ayr_Latn) type: mteb/flores config: rus_Cyrl-ayr_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 45.55335968379446 - type: f1 value: 40.13076578531388 - type: main_score value: 40.13076578531388 - type: precision value: 38.398064362362355 - type: recall value: 45.55335968379446 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-dan_Latn) type: mteb/flores config: rus_Cyrl-dan_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.68247694334651 - type: main_score value: 98.68247694334651 - type: precision value: 98.51778656126481 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-guj_Gujr) type: mteb/flores config: rus_Cyrl-guj_Gujr split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.68247694334651 - type: main_score value: 98.68247694334651 - type: precision value: 98.51778656126481 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kaz_Cyrl) type: mteb/flores config: rus_Cyrl-kaz_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.81422924901186 - type: f1 value: 98.43544137022398 - type: main_score value: 98.43544137022398 - type: precision value: 98.25428194993412 - type: recall value: 98.81422924901186 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lug_Latn) type: mteb/flores config: rus_Cyrl-lug_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 82.21343873517787 - type: f1 value: 77.97485726833554 - type: main_score value: 77.97485726833554 - type: precision value: 76.22376717485415 - type: recall value: 82.21343873517787 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-oci_Latn) type: mteb/flores config: rus_Cyrl-oci_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.87351778656127 - type: f1 value: 92.25319969885187 - type: main_score value: 92.25319969885187 - type: precision value: 91.5638528138528 - type: recall value: 93.87351778656127 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-smo_Latn) type: mteb/flores config: rus_Cyrl-smo_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 84.88142292490119 - type: f1 value: 81.24364765669114 - type: main_score value: 81.24364765669114 - type: precision value: 79.69991416137661 - type: recall value: 84.88142292490119 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tsn_Latn) type: mteb/flores config: rus_Cyrl-tsn_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 87.05533596837944 - type: f1 value: 83.90645586297761 - type: main_score value: 83.90645586297761 - type: precision value: 82.56752305665349 - type: recall value: 87.05533596837944 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-zul_Latn) type: mteb/flores config: rus_Cyrl-zul_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.15810276679841 - type: f1 value: 93.77140974967062 - type: main_score value: 93.77140974967062 - type: precision value: 93.16534914361002 - type: recall value: 95.15810276679841 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-azb_Arab) type: mteb/flores config: rus_Cyrl-azb_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.91699604743083 - type: f1 value: 77.18050065876152 - type: main_score value: 77.18050065876152 - type: precision value: 75.21519543258673 - type: recall value: 81.91699604743083 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-deu_Latn) type: mteb/flores config: rus_Cyrl-deu_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.50592885375494 - type: f1 value: 99.34123847167325 - type: main_score value: 99.34123847167325 - type: precision value: 99.2588932806324 - type: recall value: 99.50592885375494 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-hat_Latn) type: mteb/flores config: rus_Cyrl-hat_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.00790513833992 - type: f1 value: 88.69126043039086 - type: main_score value: 88.69126043039086 - type: precision value: 87.75774044795784 - type: recall value: 91.00790513833992 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kbp_Latn) type: mteb/flores config: rus_Cyrl-kbp_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 47.233201581027664 - type: f1 value: 43.01118618096943 - type: main_score value: 43.01118618096943 - type: precision value: 41.739069205043556 - type: recall value: 47.233201581027664 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-luo_Latn) type: mteb/flores config: rus_Cyrl-luo_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 60.47430830039525 - type: f1 value: 54.83210565429816 - type: main_score value: 54.83210565429816 - type: precision value: 52.81630744284779 - type: recall value: 60.47430830039525 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-ory_Orya) type: mteb/flores config: rus_Cyrl-ory_Orya split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.1106719367589 - type: f1 value: 98.83069828722003 - type: main_score value: 98.83069828722003 - type: precision value: 98.69894598155467 - type: recall value: 99.1106719367589 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-sna_Latn) type: mteb/flores config: rus_Cyrl-sna_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.72332015810277 - type: f1 value: 87.30013645774514 - type: main_score value: 87.30013645774514 - type: precision value: 86.25329380764163 - type: recall value: 89.72332015810277 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tso_Latn) type: mteb/flores config: rus_Cyrl-tso_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 84.38735177865613 - type: f1 value: 80.70424744337788 - type: main_score value: 80.70424744337788 - type: precision value: 79.18560606060606 - type: recall value: 84.38735177865613 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-azj_Latn) type: mteb/flores config: rus_Cyrl-azj_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.33201581027669 - type: f1 value: 96.56455862977602 - type: main_score value: 96.56455862977602 - type: precision value: 96.23682476943345 - type: recall value: 97.33201581027669 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-dik_Latn) type: mteb/flores config: rus_Cyrl-dik_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 46.047430830039524 - type: f1 value: 40.05513069495283 - type: main_score value: 40.05513069495283 - type: precision value: 38.072590197096126 - type: recall value: 46.047430830039524 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-hau_Latn) type: mteb/flores config: rus_Cyrl-hau_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 87.94466403162056 - type: f1 value: 84.76943346508563 - type: main_score value: 84.76943346508563 - type: precision value: 83.34486166007905 - type: recall value: 87.94466403162056 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-kea_Latn) type: mteb/flores config: rus_Cyrl-kea_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.42687747035573 - type: f1 value: 86.83803021747684 - type: main_score value: 86.83803021747684 - type: precision value: 85.78416149068323 - type: recall value: 89.42687747035573 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lus_Latn) type: mteb/flores config: rus_Cyrl-lus_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 68.97233201581028 - type: f1 value: 64.05480726292745 - type: main_score value: 64.05480726292745 - type: precision value: 62.42670749487858 - type: recall value: 68.97233201581028 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-pag_Latn) type: mteb/flores config: rus_Cyrl-pag_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 78.75494071146245 - type: f1 value: 74.58573558401933 - type: main_score value: 74.58573558401933 - type: precision value: 73.05532028358115 - type: recall value: 78.75494071146245 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-snd_Arab) type: mteb/flores config: rus_Cyrl-snd_Arab split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.8498023715415 - type: f1 value: 94.56521739130434 - type: main_score value: 94.56521739130434 - type: precision value: 93.97233201581028 - type: recall value: 95.8498023715415 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tuk_Latn) type: mteb/flores config: rus_Cyrl-tuk_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 68.08300395256917 - type: f1 value: 62.93565240205557 - type: main_score value: 62.93565240205557 - type: precision value: 61.191590257043934 - type: recall value: 68.08300395256917 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-bak_Cyrl) type: mteb/flores config: rus_Cyrl-bak_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.04743083003953 - type: f1 value: 94.86824769433464 - type: main_score value: 94.86824769433464 - type: precision value: 94.34288537549406 - type: recall value: 96.04743083003953 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-dyu_Latn) type: mteb/flores config: rus_Cyrl-dyu_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 37.45059288537549 - type: f1 value: 31.670482312800807 - type: main_score value: 31.670482312800807 - type: precision value: 29.99928568357422 - type: recall value: 37.45059288537549 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-heb_Hebr) type: mteb/flores config: rus_Cyrl-heb_Hebr split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.23320158102767 - type: f1 value: 96.38998682476942 - type: main_score value: 96.38998682476942 - type: precision value: 95.99802371541502 - type: recall value: 97.23320158102767 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-khk_Cyrl) type: mteb/flores config: rus_Cyrl-khk_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.41897233201581 - type: f1 value: 98.00724637681158 - type: main_score value: 98.00724637681158 - type: precision value: 97.82938076416336 - type: recall value: 98.41897233201581 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-lvs_Latn) type: mteb/flores config: rus_Cyrl-lvs_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.4308300395257 - type: f1 value: 96.61396574440053 - type: main_score value: 96.61396574440053 - type: precision value: 96.2203557312253 - type: recall value: 97.4308300395257 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-pan_Guru) type: mteb/flores config: rus_Cyrl-pan_Guru split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.07773386034256 - type: main_score value: 99.07773386034256 - type: precision value: 98.96245059288538 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-som_Latn) type: mteb/flores config: rus_Cyrl-som_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 87.74703557312253 - type: f1 value: 84.52898550724638 - type: main_score value: 84.52898550724638 - type: precision value: 83.09288537549409 - type: recall value: 87.74703557312253 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (rus_Cyrl-tum_Latn) type: mteb/flores config: rus_Cyrl-tum_Latn split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 87.15415019762845 - type: f1 value: 83.85069640504425 - type: main_score value: 83.85069640504425 - type: precision value: 82.43671183888576 - type: recall value: 87.15415019762845 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (taq_Latn-rus_Cyrl) type: mteb/flores config: taq_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 28.55731225296443 - type: f1 value: 26.810726360049568 - type: main_score value: 26.810726360049568 - type: precision value: 26.260342858265577 - type: recall value: 28.55731225296443 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (war_Latn-rus_Cyrl) type: mteb/flores config: war_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.86166007905138 - type: f1 value: 94.03147083483051 - type: main_score value: 94.03147083483051 - type: precision value: 93.70653606003322 - type: recall value: 94.86166007905138 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (arb_Arab-rus_Cyrl) type: mteb/flores config: arb_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.34387351778656 - type: f1 value: 95.23056653491436 - type: main_score value: 95.23056653491436 - type: precision value: 94.70520421607378 - type: recall value: 96.34387351778656 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bul_Cyrl-rus_Cyrl) type: mteb/flores config: bul_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.90118577075098 - type: f1 value: 99.86824769433464 - type: main_score value: 99.86824769433464 - type: precision value: 99.85177865612648 - type: recall value: 99.90118577075098 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (fra_Latn-rus_Cyrl) type: mteb/flores config: fra_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.9459815546772 - type: main_score value: 98.9459815546772 - type: precision value: 98.81422924901186 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (jpn_Jpan-rus_Cyrl) type: mteb/flores config: jpn_Jpan-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.3201581027668 - type: f1 value: 97.76021080368905 - type: main_score value: 97.76021080368905 - type: precision value: 97.48023715415019 - type: recall value: 98.3201581027668 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lij_Latn-rus_Cyrl) type: mteb/flores config: lij_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 83.49802371541502 - type: f1 value: 81.64800059239636 - type: main_score value: 81.64800059239636 - type: precision value: 80.9443055878478 - type: recall value: 83.49802371541502 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (mya_Mymr-rus_Cyrl) type: mteb/flores config: mya_Mymr-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 90.21739130434783 - type: f1 value: 88.76776366313682 - type: main_score value: 88.76776366313682 - type: precision value: 88.18370446119435 - type: recall value: 90.21739130434783 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (sag_Latn-rus_Cyrl) type: mteb/flores config: sag_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 41.699604743083 - type: f1 value: 39.53066322643847 - type: main_score value: 39.53066322643847 - type: precision value: 38.822876239229274 - type: recall value: 41.699604743083 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (taq_Tfng-rus_Cyrl) type: mteb/flores config: taq_Tfng-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 10.67193675889328 - type: f1 value: 9.205744965817951 - type: main_score value: 9.205744965817951 - type: precision value: 8.85195219073817 - type: recall value: 10.67193675889328 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (wol_Latn-rus_Cyrl) type: mteb/flores config: wol_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 63.537549407114625 - type: f1 value: 60.65190727391827 - type: main_score value: 60.65190727391827 - type: precision value: 59.61144833427442 - type: recall value: 63.537549407114625 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (arb_Latn-rus_Cyrl) type: mteb/flores config: arb_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 13.142292490118576 - type: f1 value: 12.372910318176764 - type: main_score value: 12.372910318176764 - type: precision value: 12.197580895919188 - type: recall value: 13.142292490118576 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (cat_Latn-rus_Cyrl) type: mteb/flores config: cat_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.80599472990777 - type: main_score value: 98.80599472990777 - type: precision value: 98.72953133822698 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (fur_Latn-rus_Cyrl) type: mteb/flores config: fur_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.02766798418972 - type: f1 value: 79.36184294084613 - type: main_score value: 79.36184294084613 - type: precision value: 78.69187826527705 - type: recall value: 81.02766798418972 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kab_Latn-rus_Cyrl) type: mteb/flores config: kab_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 34.387351778656125 - type: f1 value: 32.02306921576947 - type: main_score value: 32.02306921576947 - type: precision value: 31.246670347137467 - type: recall value: 34.387351778656125 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lim_Latn-rus_Cyrl) type: mteb/flores config: lim_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 78.26086956521739 - type: f1 value: 75.90239449214359 - type: main_score value: 75.90239449214359 - type: precision value: 75.02211430745493 - type: recall value: 78.26086956521739 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (nld_Latn-rus_Cyrl) type: mteb/flores config: nld_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.9459815546772 - type: main_score value: 98.9459815546772 - type: precision value: 98.81422924901186 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (san_Deva-rus_Cyrl) type: mteb/flores config: san_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 87.94466403162056 - type: f1 value: 86.68928897189767 - type: main_score value: 86.68928897189767 - type: precision value: 86.23822997079216 - type: recall value: 87.94466403162056 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tat_Cyrl-rus_Cyrl) type: mteb/flores config: tat_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.03557312252964 - type: f1 value: 96.4167365353136 - type: main_score value: 96.4167365353136 - type: precision value: 96.16847826086958 - type: recall value: 97.03557312252964 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (xho_Latn-rus_Cyrl) type: mteb/flores config: xho_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 86.95652173913044 - type: f1 value: 85.5506497283435 - type: main_score value: 85.5506497283435 - type: precision value: 84.95270479733395 - type: recall value: 86.95652173913044 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ars_Arab-rus_Cyrl) type: mteb/flores config: ars_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 96.6403162055336 - type: f1 value: 95.60935441370223 - type: main_score value: 95.60935441370223 - type: precision value: 95.13339920948617 - type: recall value: 96.6403162055336 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ceb_Latn-rus_Cyrl) type: mteb/flores config: ceb_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.7509881422925 - type: f1 value: 95.05209198303827 - type: main_score value: 95.05209198303827 - type: precision value: 94.77662283368805 - type: recall value: 95.7509881422925 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (fuv_Latn-rus_Cyrl) type: mteb/flores config: fuv_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 45.25691699604743 - type: f1 value: 42.285666666742365 - type: main_score value: 42.285666666742365 - type: precision value: 41.21979853402283 - type: recall value: 45.25691699604743 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kac_Latn-rus_Cyrl) type: mteb/flores config: kac_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 34.683794466403164 - type: f1 value: 33.3235346229031 - type: main_score value: 33.3235346229031 - type: precision value: 32.94673924616852 - type: recall value: 34.683794466403164 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lin_Latn-rus_Cyrl) type: mteb/flores config: lin_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 86.85770750988142 - type: f1 value: 85.1867110799439 - type: main_score value: 85.1867110799439 - type: precision value: 84.53038212173273 - type: recall value: 86.85770750988142 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (nno_Latn-rus_Cyrl) type: mteb/flores config: nno_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.4308300395257 - type: f1 value: 96.78383210991906 - type: main_score value: 96.78383210991906 - type: precision value: 96.51185770750989 - type: recall value: 97.4308300395257 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (sat_Olck-rus_Cyrl) type: mteb/flores config: sat_Olck-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 1.185770750988142 - type: f1 value: 1.0279253129117258 - type: main_score value: 1.0279253129117258 - type: precision value: 1.0129746819135175 - type: recall value: 1.185770750988142 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tel_Telu-rus_Cyrl) type: mteb/flores config: tel_Telu-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.12252964426878 - type: f1 value: 97.61198945981555 - type: main_score value: 97.61198945981555 - type: precision value: 97.401185770751 - type: recall value: 98.12252964426878 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ydd_Hebr-rus_Cyrl) type: mteb/flores config: ydd_Hebr-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 75.8893280632411 - type: f1 value: 74.00244008018511 - type: main_score value: 74.00244008018511 - type: precision value: 73.25683020960382 - type: recall value: 75.8893280632411 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ary_Arab-rus_Cyrl) type: mteb/flores config: ary_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 86.56126482213439 - type: f1 value: 83.72796285839765 - type: main_score value: 83.72796285839765 - type: precision value: 82.65014273166447 - type: recall value: 86.56126482213439 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ces_Latn-rus_Cyrl) type: mteb/flores config: ces_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.60474308300395 - type: f1 value: 99.4729907773386 - type: main_score value: 99.4729907773386 - type: precision value: 99.40711462450594 - type: recall value: 99.60474308300395 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (gaz_Latn-rus_Cyrl) type: mteb/flores config: gaz_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 42.58893280632411 - type: f1 value: 40.75832866805978 - type: main_score value: 40.75832866805978 - type: precision value: 40.14285046917723 - type: recall value: 42.58893280632411 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kam_Latn-rus_Cyrl) type: mteb/flores config: kam_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 45.25691699604743 - type: f1 value: 42.6975518029456 - type: main_score value: 42.6975518029456 - type: precision value: 41.87472710984596 - type: recall value: 45.25691699604743 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lit_Latn-rus_Cyrl) type: mteb/flores config: lit_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.33201581027669 - type: f1 value: 96.62384716732542 - type: main_score value: 96.62384716732542 - type: precision value: 96.3175230566535 - type: recall value: 97.33201581027669 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (nob_Latn-rus_Cyrl) type: mteb/flores config: nob_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.30368906455863 - type: main_score value: 98.30368906455863 - type: precision value: 98.10606060606061 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (scn_Latn-rus_Cyrl) type: mteb/flores config: scn_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 70.45454545454545 - type: f1 value: 68.62561022640075 - type: main_score value: 68.62561022640075 - type: precision value: 67.95229103411222 - type: recall value: 70.45454545454545 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tgk_Cyrl-rus_Cyrl) type: mteb/flores config: tgk_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.4901185770751 - type: f1 value: 91.58514492753623 - type: main_score value: 91.58514492753623 - type: precision value: 91.24759298672342 - type: recall value: 92.4901185770751 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (yor_Latn-rus_Cyrl) type: mteb/flores config: yor_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 67.98418972332016 - type: f1 value: 64.72874247330768 - type: main_score value: 64.72874247330768 - type: precision value: 63.450823399938685 - type: recall value: 67.98418972332016 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (arz_Arab-rus_Cyrl) type: mteb/flores config: arz_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 94.56521739130434 - type: f1 value: 93.07971014492755 - type: main_score value: 93.07971014492755 - type: precision value: 92.42753623188406 - type: recall value: 94.56521739130434 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (cjk_Latn-rus_Cyrl) type: mteb/flores config: cjk_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 38.63636363636363 - type: f1 value: 36.25747140862938 - type: main_score value: 36.25747140862938 - type: precision value: 35.49101355074723 - type: recall value: 38.63636363636363 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (gla_Latn-rus_Cyrl) type: mteb/flores config: gla_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 69.26877470355731 - type: f1 value: 66.11797423328613 - type: main_score value: 66.11797423328613 - type: precision value: 64.89369649409694 - type: recall value: 69.26877470355731 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kan_Knda-rus_Cyrl) type: mteb/flores config: kan_Knda-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.51505740636176 - type: main_score value: 97.51505740636176 - type: precision value: 97.30731225296442 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lmo_Latn-rus_Cyrl) type: mteb/flores config: lmo_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 73.3201581027668 - type: f1 value: 71.06371608677273 - type: main_score value: 71.06371608677273 - type: precision value: 70.26320288266223 - type: recall value: 73.3201581027668 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (npi_Deva-rus_Cyrl) type: mteb/flores config: npi_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.82608695652173 - type: f1 value: 97.36645107198466 - type: main_score value: 97.36645107198466 - type: precision value: 97.1772068511199 - type: recall value: 97.82608695652173 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (shn_Mymr-rus_Cyrl) type: mteb/flores config: shn_Mymr-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 39.426877470355734 - type: f1 value: 37.16728785513024 - type: main_score value: 37.16728785513024 - type: precision value: 36.56918548278505 - type: recall value: 39.426877470355734 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tgl_Latn-rus_Cyrl) type: mteb/flores config: tgl_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.92490118577075 - type: f1 value: 97.6378693769998 - type: main_score value: 97.6378693769998 - type: precision value: 97.55371440154047 - type: recall value: 97.92490118577075 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (yue_Hant-rus_Cyrl) type: mteb/flores config: yue_Hant-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.92490118577075 - type: f1 value: 97.3833051006964 - type: main_score value: 97.3833051006964 - type: precision value: 97.1590909090909 - type: recall value: 97.92490118577075 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (asm_Beng-rus_Cyrl) type: mteb/flores config: asm_Beng-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.78656126482213 - type: f1 value: 91.76917395296842 - type: main_score value: 91.76917395296842 - type: precision value: 91.38292866553736 - type: recall value: 92.78656126482213 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ckb_Arab-rus_Cyrl) type: mteb/flores config: ckb_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 80.8300395256917 - type: f1 value: 79.17664345468799 - type: main_score value: 79.17664345468799 - type: precision value: 78.5622171683459 - type: recall value: 80.8300395256917 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (gle_Latn-rus_Cyrl) type: mteb/flores config: gle_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 85.86956521739131 - type: f1 value: 84.45408265372492 - type: main_score value: 84.45408265372492 - type: precision value: 83.8774340026703 - type: recall value: 85.86956521739131 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kas_Arab-rus_Cyrl) type: mteb/flores config: kas_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 76.28458498023716 - type: f1 value: 74.11216313578267 - type: main_score value: 74.11216313578267 - type: precision value: 73.2491277759584 - type: recall value: 76.28458498023716 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ltg_Latn-rus_Cyrl) type: mteb/flores config: ltg_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 71.14624505928853 - type: f1 value: 68.69245357723618 - type: main_score value: 68.69245357723618 - type: precision value: 67.8135329666459 - type: recall value: 71.14624505928853 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (nso_Latn-rus_Cyrl) type: mteb/flores config: nso_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 87.64822134387352 - type: f1 value: 85.98419219986725 - type: main_score value: 85.98419219986725 - type: precision value: 85.32513873917036 - type: recall value: 87.64822134387352 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (sin_Sinh-rus_Cyrl) type: mteb/flores config: sin_Sinh-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.62845849802372 - type: f1 value: 97.10144927536231 - type: main_score value: 97.10144927536231 - type: precision value: 96.87986585219788 - type: recall value: 97.62845849802372 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tha_Thai-rus_Cyrl) type: mteb/flores config: tha_Thai-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.71541501976284 - type: f1 value: 98.28722002635045 - type: main_score value: 98.28722002635045 - type: precision value: 98.07312252964427 - type: recall value: 98.71541501976284 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (zho_Hans-rus_Cyrl) type: mteb/flores config: zho_Hans-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.68247694334651 - type: main_score value: 98.68247694334651 - type: precision value: 98.51778656126481 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ast_Latn-rus_Cyrl) type: mteb/flores config: ast_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.65217391304348 - type: f1 value: 94.90649683857505 - type: main_score value: 94.90649683857505 - type: precision value: 94.61352657004831 - type: recall value: 95.65217391304348 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (crh_Latn-rus_Cyrl) type: mteb/flores config: crh_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 93.08300395256917 - type: f1 value: 92.20988998886428 - type: main_score value: 92.20988998886428 - type: precision value: 91.85631013694254 - type: recall value: 93.08300395256917 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (glg_Latn-rus_Cyrl) type: mteb/flores config: glg_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.55335968379447 - type: f1 value: 95.18006148440931 - type: main_score value: 95.18006148440931 - type: precision value: 95.06540560888386 - type: recall value: 95.55335968379447 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kas_Deva-rus_Cyrl) type: mteb/flores config: kas_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 55.03952569169961 - type: f1 value: 52.19871938895554 - type: main_score value: 52.19871938895554 - type: precision value: 51.17660971469557 - type: recall value: 55.03952569169961 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ltz_Latn-rus_Cyrl) type: mteb/flores config: ltz_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 87.64822134387352 - type: f1 value: 86.64179841897234 - type: main_score value: 86.64179841897234 - type: precision value: 86.30023235431587 - type: recall value: 87.64822134387352 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (nus_Latn-rus_Cyrl) type: mteb/flores config: nus_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 27.4703557312253 - type: f1 value: 25.703014277858088 - type: main_score value: 25.703014277858088 - type: precision value: 25.194105476917315 - type: recall value: 27.4703557312253 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (slk_Latn-rus_Cyrl) type: mteb/flores config: slk_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.1106719367589 - type: main_score value: 99.1106719367589 - type: precision value: 99.02832674571805 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tir_Ethi-rus_Cyrl) type: mteb/flores config: tir_Ethi-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 80.73122529644269 - type: f1 value: 78.66903754775608 - type: main_score value: 78.66903754775608 - type: precision value: 77.86431694163612 - type: recall value: 80.73122529644269 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (zho_Hant-rus_Cyrl) type: mteb/flores config: zho_Hant-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.22134387351778 - type: f1 value: 97.66798418972333 - type: main_score value: 97.66798418972333 - type: precision value: 97.40612648221344 - type: recall value: 98.22134387351778 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (awa_Deva-rus_Cyrl) type: mteb/flores config: awa_Deva-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.5296442687747 - type: f1 value: 96.94224857268335 - type: main_score value: 96.94224857268335 - type: precision value: 96.68560606060606 - type: recall value: 97.5296442687747 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (cym_Latn-rus_Cyrl) type: mteb/flores config: cym_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 92.68774703557312 - type: f1 value: 91.69854302097961 - type: main_score value: 91.69854302097961 - type: precision value: 91.31236846157795 - type: recall value: 92.68774703557312 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (grn_Latn-rus_Cyrl) type: mteb/flores config: grn_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 64.13043478260869 - type: f1 value: 61.850586118740004 - type: main_score value: 61.850586118740004 - type: precision value: 61.0049495186209 - type: recall value: 64.13043478260869 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kat_Geor-rus_Cyrl) type: mteb/flores config: kat_Geor-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.59881422924902 - type: main_score value: 97.59881422924902 - type: precision value: 97.42534036012296 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lua_Latn-rus_Cyrl) type: mteb/flores config: lua_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 63.63636363636363 - type: f1 value: 60.9709122526128 - type: main_score value: 60.9709122526128 - type: precision value: 60.03915902282226 - type: recall value: 63.63636363636363 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (nya_Latn-rus_Cyrl) type: mteb/flores config: nya_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 89.2292490118577 - type: f1 value: 87.59723824473149 - type: main_score value: 87.59723824473149 - type: precision value: 86.90172707867349 - type: recall value: 89.2292490118577 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (slv_Latn-rus_Cyrl) type: mteb/flores config: slv_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.01185770750988 - type: f1 value: 98.74835309617917 - type: main_score value: 98.74835309617917 - type: precision value: 98.63636363636364 - type: recall value: 99.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tpi_Latn-rus_Cyrl) type: mteb/flores config: tpi_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 77.37154150197628 - type: f1 value: 75.44251611276084 - type: main_score value: 75.44251611276084 - type: precision value: 74.78103665109595 - type: recall value: 77.37154150197628 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (zsm_Latn-rus_Cyrl) type: mteb/flores config: zsm_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.2094861660079 - type: f1 value: 98.96245059288538 - type: main_score value: 98.96245059288538 - type: precision value: 98.8471673254282 - type: recall value: 99.2094861660079 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ayr_Latn-rus_Cyrl) type: mteb/flores config: ayr_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 27.766798418972332 - type: f1 value: 26.439103195281312 - type: main_score value: 26.439103195281312 - type: precision value: 26.052655604573964 - type: recall value: 27.766798418972332 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (dan_Latn-rus_Cyrl) type: mteb/flores config: dan_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.30830039525692 - type: f1 value: 99.07773386034255 - type: main_score value: 99.07773386034255 - type: precision value: 98.96245059288538 - type: recall value: 99.30830039525692 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (guj_Gujr-rus_Cyrl) type: mteb/flores config: guj_Gujr-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.82608695652173 - type: f1 value: 97.26449275362317 - type: main_score value: 97.26449275362317 - type: precision value: 97.02498588368154 - type: recall value: 97.82608695652173 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kaz_Cyrl-rus_Cyrl) type: mteb/flores config: kaz_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.5296442687747 - type: f1 value: 97.03557312252964 - type: main_score value: 97.03557312252964 - type: precision value: 96.85022158342316 - type: recall value: 97.5296442687747 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lug_Latn-rus_Cyrl) type: mteb/flores config: lug_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 68.57707509881423 - type: f1 value: 65.93361605820395 - type: main_score value: 65.93361605820395 - type: precision value: 64.90348248593789 - type: recall value: 68.57707509881423 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (oci_Latn-rus_Cyrl) type: mteb/flores config: oci_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 86.26482213438736 - type: f1 value: 85.33176417155623 - type: main_score value: 85.33176417155623 - type: precision value: 85.00208833384637 - type: recall value: 86.26482213438736 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (smo_Latn-rus_Cyrl) type: mteb/flores config: smo_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 77.96442687747036 - type: f1 value: 75.70960450188885 - type: main_score value: 75.70960450188885 - type: precision value: 74.8312632736777 - type: recall value: 77.96442687747036 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tsn_Latn-rus_Cyrl) type: mteb/flores config: tsn_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 84.38735177865613 - type: f1 value: 82.13656376349225 - type: main_score value: 82.13656376349225 - type: precision value: 81.16794543904518 - type: recall value: 84.38735177865613 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (zul_Latn-rus_Cyrl) type: mteb/flores config: zul_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 90.21739130434783 - type: f1 value: 88.77570602050753 - type: main_score value: 88.77570602050753 - type: precision value: 88.15978104021582 - type: recall value: 90.21739130434783 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (azb_Arab-rus_Cyrl) type: mteb/flores config: azb_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 65.71146245059289 - type: f1 value: 64.18825390221271 - type: main_score value: 64.18825390221271 - type: precision value: 63.66811154793568 - type: recall value: 65.71146245059289 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (deu_Latn-rus_Cyrl) type: mteb/flores config: deu_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 99.70355731225297 - type: f1 value: 99.60474308300395 - type: main_score value: 99.60474308300395 - type: precision value: 99.55533596837944 - type: recall value: 99.70355731225297 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (hat_Latn-rus_Cyrl) type: mteb/flores config: hat_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 86.7588932806324 - type: f1 value: 85.86738623695146 - type: main_score value: 85.86738623695146 - type: precision value: 85.55235467420822 - type: recall value: 86.7588932806324 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kbp_Latn-rus_Cyrl) type: mteb/flores config: kbp_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 34.88142292490119 - type: f1 value: 32.16511669463015 - type: main_score value: 32.16511669463015 - type: precision value: 31.432098549546318 - type: recall value: 34.88142292490119 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (luo_Latn-rus_Cyrl) type: mteb/flores config: luo_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 52.27272727272727 - type: f1 value: 49.60489626836975 - type: main_score value: 49.60489626836975 - type: precision value: 48.69639631803339 - type: recall value: 52.27272727272727 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (ory_Orya-rus_Cyrl) type: mteb/flores config: ory_Orya-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.82608695652173 - type: f1 value: 97.27437417654808 - type: main_score value: 97.27437417654808 - type: precision value: 97.04968944099377 - type: recall value: 97.82608695652173 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (sna_Latn-rus_Cyrl) type: mteb/flores config: sna_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 85.37549407114624 - type: f1 value: 83.09911316305177 - type: main_score value: 83.09911316305177 - type: precision value: 82.1284950958864 - type: recall value: 85.37549407114624 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tso_Latn-rus_Cyrl) type: mteb/flores config: tso_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 82.90513833992095 - type: f1 value: 80.28290385503824 - type: main_score value: 80.28290385503824 - type: precision value: 79.23672543237761 - type: recall value: 82.90513833992095 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (azj_Latn-rus_Cyrl) type: mteb/flores config: azj_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.02371541501977 - type: f1 value: 97.49200075287031 - type: main_score value: 97.49200075287031 - type: precision value: 97.266139657444 - type: recall value: 98.02371541501977 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (dik_Latn-rus_Cyrl) type: mteb/flores config: dik_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 38.43873517786561 - type: f1 value: 35.78152442955223 - type: main_score value: 35.78152442955223 - type: precision value: 34.82424325078237 - type: recall value: 38.43873517786561 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (hau_Latn-rus_Cyrl) type: mteb/flores config: hau_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.42292490118577 - type: f1 value: 79.24612283124593 - type: main_score value: 79.24612283124593 - type: precision value: 78.34736070751448 - type: recall value: 81.42292490118577 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (kea_Latn-rus_Cyrl) type: mteb/flores config: kea_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 81.62055335968378 - type: f1 value: 80.47015182884748 - type: main_score value: 80.47015182884748 - type: precision value: 80.02671028885862 - type: recall value: 81.62055335968378 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lus_Latn-rus_Cyrl) type: mteb/flores config: lus_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 62.74703557312253 - type: f1 value: 60.53900079111122 - type: main_score value: 60.53900079111122 - type: precision value: 59.80024202850289 - type: recall value: 62.74703557312253 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (pag_Latn-rus_Cyrl) type: mteb/flores config: pag_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 74.01185770750988 - type: f1 value: 72.57280648279529 - type: main_score value: 72.57280648279529 - type: precision value: 71.99952968456789 - type: recall value: 74.01185770750988 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (snd_Arab-rus_Cyrl) type: mteb/flores config: snd_Arab-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 91.30434782608695 - type: f1 value: 90.24653499445358 - type: main_score value: 90.24653499445358 - type: precision value: 89.83134068200232 - type: recall value: 91.30434782608695 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tuk_Latn-rus_Cyrl) type: mteb/flores config: tuk_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 47.62845849802372 - type: f1 value: 45.812928836644254 - type: main_score value: 45.812928836644254 - type: precision value: 45.23713833170355 - type: recall value: 47.62845849802372 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (bak_Cyrl-rus_Cyrl) type: mteb/flores config: bak_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.8498023715415 - type: f1 value: 95.18904459615922 - type: main_score value: 95.18904459615922 - type: precision value: 94.92812441182006 - type: recall value: 95.8498023715415 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (dyu_Latn-rus_Cyrl) type: mteb/flores config: dyu_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 29.64426877470356 - type: f1 value: 27.287335193938166 - type: main_score value: 27.287335193938166 - type: precision value: 26.583996026587492 - type: recall value: 29.64426877470356 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (heb_Hebr-rus_Cyrl) type: mteb/flores config: heb_Hebr-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 98.91304347826086 - type: f1 value: 98.55072463768116 - type: main_score value: 98.55072463768116 - type: precision value: 98.36956521739131 - type: recall value: 98.91304347826086 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (khk_Cyrl-rus_Cyrl) type: mteb/flores config: khk_Cyrl-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 95.15810276679841 - type: f1 value: 94.44009547764487 - type: main_score value: 94.44009547764487 - type: precision value: 94.16579797014579 - type: recall value: 95.15810276679841 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (lvs_Latn-rus_Cyrl) type: mteb/flores config: lvs_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.92490118577075 - type: f1 value: 97.51467241585817 - type: main_score value: 97.51467241585817 - type: precision value: 97.36166007905138 - type: recall value: 97.92490118577075 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (pan_Guru-rus_Cyrl) type: mteb/flores config: pan_Guru-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 97.92490118577075 - type: f1 value: 97.42918313570486 - type: main_score value: 97.42918313570486 - type: precision value: 97.22261434217955 - type: recall value: 97.92490118577075 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (som_Latn-rus_Cyrl) type: mteb/flores config: som_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 75.69169960474308 - type: f1 value: 73.7211667065916 - type: main_score value: 73.7211667065916 - type: precision value: 72.95842401892384 - type: recall value: 75.69169960474308 - task: type: BitextMining dataset: name: MTEB FloresBitextMining (tum_Latn-rus_Cyrl) type: mteb/flores config: tum_Latn-rus_Cyrl split: devtest revision: e6b647fcb6299a2f686f742f4d4c023e553ea67e metrics: - type: accuracy value: 85.67193675889328 - type: f1 value: 82.9296066252588 - type: main_score value: 82.9296066252588 - type: precision value: 81.77330225447936 - type: recall value: 85.67193675889328 - task: type: Classification dataset: name: MTEB GeoreviewClassification (default) type: ai-forever/georeview-classification config: default split: test revision: 3765c0d1de6b7d264bc459433c45e5a75513839c metrics: - type: accuracy value: 44.6630859375 - type: f1 value: 42.607425073610536 - type: f1_weighted value: 42.60639474586065 - type: main_score value: 44.6630859375 - task: type: Clustering dataset: name: MTEB GeoreviewClusteringP2P (default) type: ai-forever/georeview-clustering-p2p config: default split: test revision: 97a313c8fc85b47f13f33e7e9a95c1ad888c7fec metrics: - type: main_score value: 58.15951247070825 - type: v_measure value: 58.15951247070825 - type: v_measure_std value: 0.6739615788288809 - task: type: Classification dataset: name: MTEB HeadlineClassification (default) type: ai-forever/headline-classification config: default split: test revision: 2fe05ee6b5832cda29f2ef7aaad7b7fe6a3609eb metrics: - type: accuracy value: 73.935546875 - type: f1 value: 73.8654872186846 - type: f1_weighted value: 73.86733122685095 - type: main_score value: 73.935546875 - task: type: Classification dataset: name: MTEB InappropriatenessClassification (default) type: ai-forever/inappropriateness-classification config: default split: test revision: 601651fdc45ef243751676e62dd7a19f491c0285 metrics: - type: accuracy value: 59.16015624999999 - type: ap value: 55.52276605836938 - type: ap_weighted value: 55.52276605836938 - type: f1 value: 58.614248199637956 - type: f1_weighted value: 58.614248199637956 - type: main_score value: 59.16015624999999 - task: type: Classification dataset: name: MTEB KinopoiskClassification (default) type: ai-forever/kinopoisk-sentiment-classification config: default split: test revision: 5911f26666ac11af46cb9c6849d0dc80a378af24 metrics: - type: accuracy value: 49.959999999999994 - type: f1 value: 48.4900332316098 - type: f1_weighted value: 48.4900332316098 - type: main_score value: 49.959999999999994 - task: type: Classification dataset: name: MTEB LanguageClassification (default) type: papluca/language-identification config: default split: test revision: aa56583bf2bc52b0565770607d6fc3faebecf9e2 metrics: - type: accuracy value: 71.005859375 - type: f1 value: 69.63481100303348 - type: f1_weighted value: 69.64640413409529 - type: main_score value: 71.005859375 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (ru) type: reciTAL/mlsum config: ru split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 42.11280087032343 - type: v_measure value: 42.11280087032343 - type: v_measure_std value: 6.7619971723605135 - type: main_score value: 43.00112546945811 - type: v_measure value: 43.00112546945811 - type: v_measure_std value: 1.4740560414835675 - type: main_score value: 39.81446080575161 - type: v_measure value: 39.81446080575161 - type: v_measure_std value: 7.125661320308298 - type: main_score value: 39.29659668980239 - type: v_measure value: 39.29659668980239 - type: v_measure_std value: 2.6570502923023094 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (ru) type: Shitao/MLDR config: ru split: dev revision: d67138e705d963e346253a80e59676ddb418810a metrics: - type: main_score value: 38.671 - type: map_at_1 value: 30.0 - type: map_at_10 value: 36.123 - type: map_at_100 value: 36.754999999999995 - type: map_at_1000 value: 36.806 - type: map_at_20 value: 36.464 - type: map_at_3 value: 35.25 - type: map_at_5 value: 35.8 - type: mrr_at_1 value: 30.0 - type: mrr_at_10 value: 36.122817460317464 - type: mrr_at_100 value: 36.75467016625293 - type: mrr_at_1000 value: 36.80612724920882 - type: mrr_at_20 value: 36.46359681984682 - type: mrr_at_3 value: 35.25 - type: mrr_at_5 value: 35.800000000000004 - type: nauc_map_at_1000_diff1 value: 55.61987610843598 - type: nauc_map_at_1000_max value: 52.506795017152186 - type: nauc_map_at_1000_std value: 2.95487192066911 - type: nauc_map_at_100_diff1 value: 55.598419532054734 - type: nauc_map_at_100_max value: 52.48192017040307 - type: nauc_map_at_100_std value: 2.930120252521189 - type: nauc_map_at_10_diff1 value: 56.02309155375198 - type: nauc_map_at_10_max value: 52.739573233234424 - type: nauc_map_at_10_std value: 2.4073432421641545 - type: nauc_map_at_1_diff1 value: 52.57059856776112 - type: nauc_map_at_1_max value: 50.55668152952304 - type: nauc_map_at_1_std value: 1.6572084853398048 - type: nauc_map_at_20_diff1 value: 55.75769029917031 - type: nauc_map_at_20_max value: 52.53663737242853 - type: nauc_map_at_20_std value: 2.8489192879814 - type: nauc_map_at_3_diff1 value: 56.90294128342709 - type: nauc_map_at_3_max value: 53.10608389782041 - type: nauc_map_at_3_std value: 1.4909731657889491 - type: nauc_map_at_5_diff1 value: 56.1258315436073 - type: nauc_map_at_5_max value: 52.398078357541564 - type: nauc_map_at_5_std value: 1.8256862015101467 - type: nauc_mrr_at_1000_diff1 value: 55.61987610843598 - type: nauc_mrr_at_1000_max value: 52.506795017152186 - type: nauc_mrr_at_1000_std value: 2.95487192066911 - type: nauc_mrr_at_100_diff1 value: 55.598419532054734 - type: nauc_mrr_at_100_max value: 52.48192017040307 - type: nauc_mrr_at_100_std value: 2.930120252521189 - type: nauc_mrr_at_10_diff1 value: 56.02309155375198 - type: nauc_mrr_at_10_max value: 52.739573233234424 - type: nauc_mrr_at_10_std value: 2.4073432421641545 - type: nauc_mrr_at_1_diff1 value: 52.57059856776112 - type: nauc_mrr_at_1_max value: 50.55668152952304 - type: nauc_mrr_at_1_std value: 1.6572084853398048 - type: nauc_mrr_at_20_diff1 value: 55.75769029917031 - type: nauc_mrr_at_20_max value: 52.53663737242853 - type: nauc_mrr_at_20_std value: 2.8489192879814 - type: nauc_mrr_at_3_diff1 value: 56.90294128342709 - type: nauc_mrr_at_3_max value: 53.10608389782041 - type: nauc_mrr_at_3_std value: 1.4909731657889491 - type: nauc_mrr_at_5_diff1 value: 56.1258315436073 - type: nauc_mrr_at_5_max value: 52.398078357541564 - type: nauc_mrr_at_5_std value: 1.8256862015101467 - type: nauc_ndcg_at_1000_diff1 value: 55.30733548408918 - type: nauc_ndcg_at_1000_max value: 53.51143366189318 - type: nauc_ndcg_at_1000_std value: 7.133789405525702 - type: nauc_ndcg_at_100_diff1 value: 54.32209039488095 - type: nauc_ndcg_at_100_max value: 52.67499334461009 - type: nauc_ndcg_at_100_std value: 6.878823275077807 - type: nauc_ndcg_at_10_diff1 value: 56.266780806997716 - type: nauc_ndcg_at_10_max value: 53.52837255793743 - type: nauc_ndcg_at_10_std value: 3.756832592964262 - type: nauc_ndcg_at_1_diff1 value: 52.57059856776112 - type: nauc_ndcg_at_1_max value: 50.55668152952304 - type: nauc_ndcg_at_1_std value: 1.6572084853398048 - type: nauc_ndcg_at_20_diff1 value: 55.39255420432796 - type: nauc_ndcg_at_20_max value: 52.946114684072235 - type: nauc_ndcg_at_20_std value: 5.414933414031693 - type: nauc_ndcg_at_3_diff1 value: 57.92826624996289 - type: nauc_ndcg_at_3_max value: 53.89907760306972 - type: nauc_ndcg_at_3_std value: 1.6661401245309218 - type: nauc_ndcg_at_5_diff1 value: 56.47508936029308 - type: nauc_ndcg_at_5_max value: 52.66800998045517 - type: nauc_ndcg_at_5_std value: 2.4127296184140423 - type: nauc_precision_at_1000_diff1 value: 57.25924020238401 - type: nauc_precision_at_1000_max value: 65.1132590931922 - type: nauc_precision_at_1000_std value: 40.60788709618145 - type: nauc_precision_at_100_diff1 value: 46.49620002554606 - type: nauc_precision_at_100_max value: 53.02960148167071 - type: nauc_precision_at_100_std value: 28.206028867032863 - type: nauc_precision_at_10_diff1 value: 56.562744749606765 - type: nauc_precision_at_10_max value: 56.00594967783547 - type: nauc_precision_at_10_std value: 8.368379831645163 - type: nauc_precision_at_1_diff1 value: 52.57059856776112 - type: nauc_precision_at_1_max value: 50.55668152952304 - type: nauc_precision_at_1_std value: 1.6572084853398048 - type: nauc_precision_at_20_diff1 value: 53.25915754614111 - type: nauc_precision_at_20_max value: 54.03255118937036 - type: nauc_precision_at_20_std value: 15.161611674272718 - type: nauc_precision_at_3_diff1 value: 60.726785748943854 - type: nauc_precision_at_3_max value: 56.139896875869354 - type: nauc_precision_at_3_std value: 2.2306901035769893 - type: nauc_precision_at_5_diff1 value: 57.1201127525187 - type: nauc_precision_at_5_max value: 53.28665761862506 - type: nauc_precision_at_5_std value: 4.358720050112237 - type: nauc_recall_at_1000_diff1 value: 57.259240202383964 - type: nauc_recall_at_1000_max value: 65.11325909319218 - type: nauc_recall_at_1000_std value: 40.60788709618142 - type: nauc_recall_at_100_diff1 value: 46.49620002554603 - type: nauc_recall_at_100_max value: 53.02960148167071 - type: nauc_recall_at_100_std value: 28.206028867032835 - type: nauc_recall_at_10_diff1 value: 56.562744749606765 - type: nauc_recall_at_10_max value: 56.00594967783549 - type: nauc_recall_at_10_std value: 8.368379831645147 - type: nauc_recall_at_1_diff1 value: 52.57059856776112 - type: nauc_recall_at_1_max value: 50.55668152952304 - type: nauc_recall_at_1_std value: 1.6572084853398048 - type: nauc_recall_at_20_diff1 value: 53.259157546141154 - type: nauc_recall_at_20_max value: 54.03255118937038 - type: nauc_recall_at_20_std value: 15.16161167427274 - type: nauc_recall_at_3_diff1 value: 60.72678574894387 - type: nauc_recall_at_3_max value: 56.13989687586933 - type: nauc_recall_at_3_std value: 2.2306901035770066 - type: nauc_recall_at_5_diff1 value: 57.12011275251864 - type: nauc_recall_at_5_max value: 53.28665761862502 - type: nauc_recall_at_5_std value: 4.3587200501122245 - type: ndcg_at_1 value: 30.0 - type: ndcg_at_10 value: 38.671 - type: ndcg_at_100 value: 42.173 - type: ndcg_at_1000 value: 44.016 - type: ndcg_at_20 value: 39.845000000000006 - type: ndcg_at_3 value: 36.863 - type: ndcg_at_5 value: 37.874 - type: precision_at_1 value: 30.0 - type: precision_at_10 value: 4.65 - type: precision_at_100 value: 0.64 - type: precision_at_1000 value: 0.08 - type: precision_at_20 value: 2.55 - type: precision_at_3 value: 13.833 - type: precision_at_5 value: 8.799999999999999 - type: recall_at_1 value: 30.0 - type: recall_at_10 value: 46.5 - type: recall_at_100 value: 64.0 - type: recall_at_1000 value: 79.5 - type: recall_at_20 value: 51.0 - type: recall_at_3 value: 41.5 - type: recall_at_5 value: 44.0 - task: type: Classification dataset: name: MTEB MultilingualSentimentClassification (rus) type: mteb/multilingual-sentiment-classification config: rus split: test revision: 2b9b4d10fc589af67794141fe8cbd3739de1eb33 metrics: - type: accuracy value: 79.52710495963092 - type: ap value: 84.5713457178972 - type: ap_weighted value: 84.5713457178972 - type: f1 value: 77.88661181524105 - type: f1_weighted value: 79.87563079922718 - type: main_score value: 79.52710495963092 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (arb_Arab-rus_Cyrl) type: mteb/NTREX config: arb_Arab-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 86.47971957936905 - type: f1 value: 82.79864240805654 - type: main_score value: 82.79864240805654 - type: precision value: 81.21485800128767 - type: recall value: 86.47971957936905 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (bel_Cyrl-rus_Cyrl) type: mteb/NTREX config: bel_Cyrl-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.84226339509264 - type: f1 value: 93.56399067465667 - type: main_score value: 93.56399067465667 - type: precision value: 93.01619095309631 - type: recall value: 94.84226339509264 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (ben_Beng-rus_Cyrl) type: mteb/NTREX config: ben_Beng-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 92.18828242363544 - type: f1 value: 90.42393889620612 - type: main_score value: 90.42393889620612 - type: precision value: 89.67904925153297 - type: recall value: 92.18828242363544 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (bos_Latn-rus_Cyrl) type: mteb/NTREX config: bos_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.69203805708563 - type: f1 value: 93.37172425304624 - type: main_score value: 93.37172425304624 - type: precision value: 92.79204521067315 - type: recall value: 94.69203805708563 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (bul_Cyrl-rus_Cyrl) type: mteb/NTREX config: bul_Cyrl-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.99549323985978 - type: f1 value: 96.13086296110833 - type: main_score value: 96.13086296110833 - type: precision value: 95.72441996327827 - type: recall value: 96.99549323985978 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (ces_Latn-rus_Cyrl) type: mteb/NTREX config: ces_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.94391587381071 - type: f1 value: 94.90680465142157 - type: main_score value: 94.90680465142157 - type: precision value: 94.44541812719079 - type: recall value: 95.94391587381071 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (deu_Latn-rus_Cyrl) type: mteb/NTREX config: deu_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.09414121181773 - type: f1 value: 94.94408279085295 - type: main_score value: 94.94408279085295 - type: precision value: 94.41245201135037 - type: recall value: 96.09414121181773 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (ell_Grek-rus_Cyrl) type: mteb/NTREX config: ell_Grek-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.19429143715573 - type: f1 value: 95.12101485561676 - type: main_score value: 95.12101485561676 - type: precision value: 94.60440660991488 - type: recall value: 96.19429143715573 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (eng_Latn-rus_Cyrl) type: mteb/NTREX config: eng_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.49474211316975 - type: f1 value: 95.46581777428045 - type: main_score value: 95.46581777428045 - type: precision value: 94.98414288098814 - type: recall value: 96.49474211316975 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (fas_Arab-rus_Cyrl) type: mteb/NTREX config: fas_Arab-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.44166249374061 - type: f1 value: 92.92383018972905 - type: main_score value: 92.92383018972905 - type: precision value: 92.21957936905358 - type: recall value: 94.44166249374061 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (fin_Latn-rus_Cyrl) type: mteb/NTREX config: fin_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 92.18828242363544 - type: f1 value: 90.2980661468393 - type: main_score value: 90.2980661468393 - type: precision value: 89.42580537472877 - type: recall value: 92.18828242363544 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (fra_Latn-rus_Cyrl) type: mteb/NTREX config: fra_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.84376564847271 - type: f1 value: 94.81054915706895 - type: main_score value: 94.81054915706895 - type: precision value: 94.31369276136427 - type: recall value: 95.84376564847271 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (heb_Hebr-rus_Cyrl) type: mteb/NTREX config: heb_Hebr-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.89233850776164 - type: f1 value: 93.42513770655985 - type: main_score value: 93.42513770655985 - type: precision value: 92.73493573693875 - type: recall value: 94.89233850776164 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (hin_Deva-rus_Cyrl) type: mteb/NTREX config: hin_Deva-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.23985978968453 - type: f1 value: 91.52816526376867 - type: main_score value: 91.52816526376867 - type: precision value: 90.76745946425466 - type: recall value: 93.23985978968453 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (hrv_Latn-rus_Cyrl) type: mteb/NTREX config: hrv_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.99098647971958 - type: f1 value: 92.36354531797697 - type: main_score value: 92.36354531797697 - type: precision value: 91.63228970439788 - type: recall value: 93.99098647971958 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (hun_Latn-rus_Cyrl) type: mteb/NTREX config: hun_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.64046069103655 - type: f1 value: 92.05224503421799 - type: main_score value: 92.05224503421799 - type: precision value: 91.33998616973079 - type: recall value: 93.64046069103655 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (ind_Latn-rus_Cyrl) type: mteb/NTREX config: ind_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 91.68753129694541 - type: f1 value: 89.26222667334335 - type: main_score value: 89.26222667334335 - type: precision value: 88.14638624603572 - type: recall value: 91.68753129694541 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (jpn_Jpan-rus_Cyrl) type: mteb/NTREX config: jpn_Jpan-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 91.28693039559339 - type: f1 value: 89.21161763348957 - type: main_score value: 89.21161763348957 - type: precision value: 88.31188340952988 - type: recall value: 91.28693039559339 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (kor_Hang-rus_Cyrl) type: mteb/NTREX config: kor_Hang-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 89.53430145217827 - type: f1 value: 86.88322165788365 - type: main_score value: 86.88322165788365 - type: precision value: 85.73950211030831 - type: recall value: 89.53430145217827 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (lit_Latn-rus_Cyrl) type: mteb/NTREX config: lit_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 90.28542814221332 - type: f1 value: 88.10249103814452 - type: main_score value: 88.10249103814452 - type: precision value: 87.17689323973752 - type: recall value: 90.28542814221332 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (mkd_Cyrl-rus_Cyrl) type: mteb/NTREX config: mkd_Cyrl-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.04256384576865 - type: f1 value: 93.65643703650713 - type: main_score value: 93.65643703650713 - type: precision value: 93.02036387915207 - type: recall value: 95.04256384576865 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (nld_Latn-rus_Cyrl) type: mteb/NTREX config: nld_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.39308963445168 - type: f1 value: 94.16207644800535 - type: main_score value: 94.16207644800535 - type: precision value: 93.582516632091 - type: recall value: 95.39308963445168 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (pol_Latn-rus_Cyrl) type: mteb/NTREX config: pol_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.7436154231347 - type: f1 value: 94.5067601402103 - type: main_score value: 94.5067601402103 - type: precision value: 93.91587381071608 - type: recall value: 95.7436154231347 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (por_Latn-rus_Cyrl) type: mteb/NTREX config: por_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 65.89884827240861 - type: f1 value: 64.61805459419219 - type: main_score value: 64.61805459419219 - type: precision value: 64.07119451106485 - type: recall value: 65.89884827240861 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-arb_Arab) type: mteb/NTREX config: rus_Cyrl-arb_Arab split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.2413620430646 - type: f1 value: 92.67663399861698 - type: main_score value: 92.67663399861698 - type: precision value: 91.94625271240193 - type: recall value: 94.2413620430646 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-bel_Cyrl) type: mteb/NTREX config: rus_Cyrl-bel_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.89233850776164 - type: f1 value: 93.40343849106993 - type: main_score value: 93.40343849106993 - type: precision value: 92.74077783341679 - type: recall value: 94.89233850776164 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-ben_Beng) type: mteb/NTREX config: rus_Cyrl-ben_Beng split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.2914371557336 - type: f1 value: 92.62226673343348 - type: main_score value: 92.62226673343348 - type: precision value: 91.84610248706393 - type: recall value: 94.2914371557336 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-bos_Latn) type: mteb/NTREX config: rus_Cyrl-bos_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.69354031046569 - type: f1 value: 94.50418051319403 - type: main_score value: 94.50418051319403 - type: precision value: 93.95843765648473 - type: recall value: 95.69354031046569 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-bul_Cyrl) type: mteb/NTREX config: rus_Cyrl-bul_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.89384076114172 - type: f1 value: 94.66199298948423 - type: main_score value: 94.66199298948423 - type: precision value: 94.08028709731263 - type: recall value: 95.89384076114172 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-ces_Latn) type: mteb/NTREX config: rus_Cyrl-ces_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.94091136705057 - type: f1 value: 92.3746731207923 - type: main_score value: 92.3746731207923 - type: precision value: 91.66207644800535 - type: recall value: 93.94091136705057 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-deu_Latn) type: mteb/NTREX config: rus_Cyrl-deu_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.94391587381071 - type: f1 value: 94.76214321482223 - type: main_score value: 94.76214321482223 - type: precision value: 94.20380570856285 - type: recall value: 95.94391587381071 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-ell_Grek) type: mteb/NTREX config: rus_Cyrl-ell_Grek split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.44316474712068 - type: f1 value: 94.14788849941579 - type: main_score value: 94.14788849941579 - type: precision value: 93.54197963612084 - type: recall value: 95.44316474712068 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-eng_Latn) type: mteb/NTREX config: rus_Cyrl-eng_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 98.14722083124687 - type: f1 value: 97.57135703555333 - type: main_score value: 97.57135703555333 - type: precision value: 97.2959439158738 - type: recall value: 98.14722083124687 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-fas_Arab) type: mteb/NTREX config: rus_Cyrl-fas_Arab split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.64196294441662 - type: f1 value: 93.24653647137372 - type: main_score value: 93.24653647137372 - type: precision value: 92.60724419963279 - type: recall value: 94.64196294441662 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-fin_Latn) type: mteb/NTREX config: rus_Cyrl-fin_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 87.98197295943916 - type: f1 value: 85.23368385912201 - type: main_score value: 85.23368385912201 - type: precision value: 84.08159858835873 - type: recall value: 87.98197295943916 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-fra_Latn) type: mteb/NTREX config: rus_Cyrl-fra_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.24436654982473 - type: f1 value: 95.07093974294774 - type: main_score value: 95.07093974294774 - type: precision value: 94.49591053246536 - type: recall value: 96.24436654982473 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-heb_Hebr) type: mteb/NTREX config: rus_Cyrl-heb_Hebr split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 91.08662994491738 - type: f1 value: 88.5161074945752 - type: main_score value: 88.5161074945752 - type: precision value: 87.36187614755467 - type: recall value: 91.08662994491738 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-hin_Deva) type: mteb/NTREX config: rus_Cyrl-hin_Deva split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.04256384576865 - type: f1 value: 93.66382907694876 - type: main_score value: 93.66382907694876 - type: precision value: 93.05291270238692 - type: recall value: 95.04256384576865 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-hrv_Latn) type: mteb/NTREX config: rus_Cyrl-hrv_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.14271407110667 - type: f1 value: 93.7481221832749 - type: main_score value: 93.7481221832749 - type: precision value: 93.10930681736892 - type: recall value: 95.14271407110667 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-hun_Latn) type: mteb/NTREX config: rus_Cyrl-hun_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 90.18527791687532 - type: f1 value: 87.61415933423946 - type: main_score value: 87.61415933423946 - type: precision value: 86.5166400394242 - type: recall value: 90.18527791687532 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-ind_Latn) type: mteb/NTREX config: rus_Cyrl-ind_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.69053580370556 - type: f1 value: 91.83608746453012 - type: main_score value: 91.83608746453012 - type: precision value: 90.97145718577868 - type: recall value: 93.69053580370556 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-jpn_Jpan) type: mteb/NTREX config: rus_Cyrl-jpn_Jpan split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 89.48422633950926 - type: f1 value: 86.91271033534429 - type: main_score value: 86.91271033534429 - type: precision value: 85.82671626487351 - type: recall value: 89.48422633950926 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-kor_Hang) type: mteb/NTREX config: rus_Cyrl-kor_Hang split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 88.4827240861292 - type: f1 value: 85.35080398375342 - type: main_score value: 85.35080398375342 - type: precision value: 83.9588549490903 - type: recall value: 88.4827240861292 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-lit_Latn) type: mteb/NTREX config: rus_Cyrl-lit_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 90.33550325488233 - type: f1 value: 87.68831819157307 - type: main_score value: 87.68831819157307 - type: precision value: 86.51524906407231 - type: recall value: 90.33550325488233 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-mkd_Cyrl) type: mteb/NTREX config: rus_Cyrl-mkd_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.94391587381071 - type: f1 value: 94.90402270071775 - type: main_score value: 94.90402270071775 - type: precision value: 94.43915873810715 - type: recall value: 95.94391587381071 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-nld_Latn) type: mteb/NTREX config: rus_Cyrl-nld_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 92.98948422633951 - type: f1 value: 91.04323151393756 - type: main_score value: 91.04323151393756 - type: precision value: 90.14688699716241 - type: recall value: 92.98948422633951 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-pol_Latn) type: mteb/NTREX config: rus_Cyrl-pol_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.34151226840261 - type: f1 value: 92.8726422967785 - type: main_score value: 92.8726422967785 - type: precision value: 92.19829744616925 - type: recall value: 94.34151226840261 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-por_Latn) type: mteb/NTREX config: rus_Cyrl-por_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 86.17926890335504 - type: f1 value: 82.7304882287356 - type: main_score value: 82.7304882287356 - type: precision value: 81.28162481817964 - type: recall value: 86.17926890335504 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-slk_Latn) type: mteb/NTREX config: rus_Cyrl-slk_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 92.7391086629945 - type: f1 value: 90.75112669003506 - type: main_score value: 90.75112669003506 - type: precision value: 89.8564513436822 - type: recall value: 92.7391086629945 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-slv_Latn) type: mteb/NTREX config: rus_Cyrl-slv_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 92.8893340010015 - type: f1 value: 91.05992321816058 - type: main_score value: 91.05992321816058 - type: precision value: 90.22589439715128 - type: recall value: 92.8893340010015 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-spa_Latn) type: mteb/NTREX config: rus_Cyrl-spa_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.49474211316975 - type: f1 value: 95.4715406442998 - type: main_score value: 95.4715406442998 - type: precision value: 94.9799699549324 - type: recall value: 96.49474211316975 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-srp_Cyrl) type: mteb/NTREX config: rus_Cyrl-srp_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 81.07160741111667 - type: f1 value: 76.55687285507015 - type: main_score value: 76.55687285507015 - type: precision value: 74.71886401030116 - type: recall value: 81.07160741111667 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-srp_Latn) type: mteb/NTREX config: rus_Cyrl-srp_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.14271407110667 - type: f1 value: 93.73302377809138 - type: main_score value: 93.73302377809138 - type: precision value: 93.06960440660991 - type: recall value: 95.14271407110667 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-swa_Latn) type: mteb/NTREX config: rus_Cyrl-swa_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.79218828242364 - type: f1 value: 93.25988983475212 - type: main_score value: 93.25988983475212 - type: precision value: 92.53463528626273 - type: recall value: 94.79218828242364 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-swe_Latn) type: mteb/NTREX config: rus_Cyrl-swe_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.04256384576865 - type: f1 value: 93.58704723752295 - type: main_score value: 93.58704723752295 - type: precision value: 92.91437155733601 - type: recall value: 95.04256384576865 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-tam_Taml) type: mteb/NTREX config: rus_Cyrl-tam_Taml split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.28993490235354 - type: f1 value: 91.63912535469872 - type: main_score value: 91.63912535469872 - type: precision value: 90.87738750983617 - type: recall value: 93.28993490235354 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-tur_Latn) type: mteb/NTREX config: rus_Cyrl-tur_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.74061091637456 - type: f1 value: 91.96628275746953 - type: main_score value: 91.96628275746953 - type: precision value: 91.15923885828742 - type: recall value: 93.74061091637456 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-ukr_Cyrl) type: mteb/NTREX config: rus_Cyrl-ukr_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.99399098647972 - type: f1 value: 94.89567684860624 - type: main_score value: 94.89567684860624 - type: precision value: 94.37072275079286 - type: recall value: 95.99399098647972 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-vie_Latn) type: mteb/NTREX config: rus_Cyrl-vie_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 91.4371557336004 - type: f1 value: 88.98681355366382 - type: main_score value: 88.98681355366382 - type: precision value: 87.89183775663496 - type: recall value: 91.4371557336004 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-zho_Hant) type: mteb/NTREX config: rus_Cyrl-zho_Hant split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 92.7891837756635 - type: f1 value: 90.79047142141783 - type: main_score value: 90.79047142141783 - type: precision value: 89.86980470706058 - type: recall value: 92.7891837756635 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (rus_Cyrl-zul_Latn) type: mteb/NTREX config: rus_Cyrl-zul_Latn split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 87.43114672008012 - type: f1 value: 84.04618833011422 - type: main_score value: 84.04618833011422 - type: precision value: 82.52259341393041 - type: recall value: 87.43114672008012 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (slk_Latn-rus_Cyrl) type: mteb/NTREX config: slk_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.34301452178268 - type: f1 value: 94.20392493502158 - type: main_score value: 94.20392493502158 - type: precision value: 93.67384409948257 - type: recall value: 95.34301452178268 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (slv_Latn-rus_Cyrl) type: mteb/NTREX config: slv_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 92.23835753630446 - type: f1 value: 90.5061759305625 - type: main_score value: 90.5061759305625 - type: precision value: 89.74231188051918 - type: recall value: 92.23835753630446 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (spa_Latn-rus_Cyrl) type: mteb/NTREX config: spa_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.54481722583876 - type: f1 value: 95.54665331330328 - type: main_score value: 95.54665331330328 - type: precision value: 95.06342847604739 - type: recall value: 96.54481722583876 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (srp_Cyrl-rus_Cyrl) type: mteb/NTREX config: srp_Cyrl-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 83.62543815723585 - type: f1 value: 80.77095672699816 - type: main_score value: 80.77095672699816 - type: precision value: 79.74674313056886 - type: recall value: 83.62543815723585 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (srp_Latn-rus_Cyrl) type: mteb/NTREX config: srp_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 94.44166249374061 - type: f1 value: 93.00733206591994 - type: main_score value: 93.00733206591994 - type: precision value: 92.37203026762366 - type: recall value: 94.44166249374061 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (swa_Latn-rus_Cyrl) type: mteb/NTREX config: swa_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 90.23535302954431 - type: f1 value: 87.89596482636041 - type: main_score value: 87.89596482636041 - type: precision value: 86.87060227370694 - type: recall value: 90.23535302954431 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (swe_Latn-rus_Cyrl) type: mteb/NTREX config: swe_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 95.44316474712068 - type: f1 value: 94.1896177599733 - type: main_score value: 94.1896177599733 - type: precision value: 93.61542313470206 - type: recall value: 95.44316474712068 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (tam_Taml-rus_Cyrl) type: mteb/NTREX config: tam_Taml-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 89.68452679018529 - type: f1 value: 87.37341160650037 - type: main_score value: 87.37341160650037 - type: precision value: 86.38389402285247 - type: recall value: 89.68452679018529 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (tur_Latn-rus_Cyrl) type: mteb/NTREX config: tur_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.89083625438157 - type: f1 value: 92.33892505424804 - type: main_score value: 92.33892505424804 - type: precision value: 91.63125640842216 - type: recall value: 93.89083625438157 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (ukr_Cyrl-rus_Cyrl) type: mteb/NTREX config: ukr_Cyrl-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 96.14421632448673 - type: f1 value: 95.11028447433054 - type: main_score value: 95.11028447433054 - type: precision value: 94.62944416624937 - type: recall value: 96.14421632448673 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (vie_Latn-rus_Cyrl) type: mteb/NTREX config: vie_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 93.79068602904357 - type: f1 value: 92.14989150392256 - type: main_score value: 92.14989150392256 - type: precision value: 91.39292271740945 - type: recall value: 93.79068602904357 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (zho_Hant-rus_Cyrl) type: mteb/NTREX config: zho_Hant-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 89.13370055082625 - type: f1 value: 86.51514618639217 - type: main_score value: 86.51514618639217 - type: precision value: 85.383920035898 - type: recall value: 89.13370055082625 - task: type: BitextMining dataset: name: MTEB NTREXBitextMining (zul_Latn-rus_Cyrl) type: mteb/NTREX config: zul_Latn-rus_Cyrl split: test revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33 metrics: - type: accuracy value: 81.17175763645467 - type: f1 value: 77.72331766047338 - type: main_score value: 77.72331766047338 - type: precision value: 76.24629555848075 - type: recall value: 81.17175763645467 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (ru) type: GEM/opusparcus config: ru split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 73.09136420525657 - type: cosine_accuracy_threshold value: 87.70400881767273 - type: cosine_ap value: 86.51938550599533 - type: cosine_f1 value: 80.84358523725834 - type: cosine_f1_threshold value: 86.90648078918457 - type: cosine_precision value: 73.24840764331209 - type: cosine_recall value: 90.19607843137256 - type: dot_accuracy value: 73.09136420525657 - type: dot_accuracy_threshold value: 87.7040147781372 - type: dot_ap value: 86.51934769946833 - type: dot_f1 value: 80.84358523725834 - type: dot_f1_threshold value: 86.90648078918457 - type: dot_precision value: 73.24840764331209 - type: dot_recall value: 90.19607843137256 - type: euclidean_accuracy value: 73.09136420525657 - type: euclidean_accuracy_threshold value: 49.590304493904114 - type: euclidean_ap value: 86.51934769946833 - type: euclidean_f1 value: 80.84358523725834 - type: euclidean_f1_threshold value: 51.173269748687744 - type: euclidean_precision value: 73.24840764331209 - type: euclidean_recall value: 90.19607843137256 - type: main_score value: 86.51976811057995 - type: manhattan_accuracy value: 73.40425531914893 - type: manhattan_accuracy_threshold value: 757.8278541564941 - type: manhattan_ap value: 86.51976811057995 - type: manhattan_f1 value: 80.92898615453328 - type: manhattan_f1_threshold value: 778.3821105957031 - type: manhattan_precision value: 74.32321575061526 - type: manhattan_recall value: 88.8235294117647 - type: max_ap value: 86.51976811057995 - type: max_f1 value: 80.92898615453328 - type: max_precision value: 74.32321575061526 - type: max_recall value: 90.19607843137256 - type: similarity_accuracy value: 73.09136420525657 - type: similarity_accuracy_threshold value: 87.70400881767273 - type: similarity_ap value: 86.51938550599533 - type: similarity_f1 value: 80.84358523725834 - type: similarity_f1_threshold value: 86.90648078918457 - type: similarity_precision value: 73.24840764331209 - type: similarity_recall value: 90.19607843137256 - task: type: Retrieval dataset: name: MTEB PublicHealthQA (russian) type: xhluca/publichealth-qa config: russian split: test revision: main metrics: - type: main_score value: 79.303 - type: map_at_1 value: 61.538000000000004 - type: map_at_10 value: 74.449 - type: map_at_100 value: 74.687 - type: map_at_1000 value: 74.687 - type: map_at_20 value: 74.589 - type: map_at_3 value: 73.333 - type: map_at_5 value: 74.256 - type: mrr_at_1 value: 61.53846153846154 - type: mrr_at_10 value: 74.44871794871794 - type: mrr_at_100 value: 74.68730304304074 - type: mrr_at_1000 value: 74.68730304304074 - type: mrr_at_20 value: 74.58857808857809 - type: mrr_at_3 value: 73.33333333333333 - type: mrr_at_5 value: 74.25641025641025 - type: nauc_map_at_1000_diff1 value: 61.375798048778506 - type: nauc_map_at_1000_max value: 51.37093181241067 - type: nauc_map_at_1000_std value: 41.735794471409015 - type: nauc_map_at_100_diff1 value: 61.375798048778506 - type: nauc_map_at_100_max value: 51.37093181241067 - type: nauc_map_at_100_std value: 41.735794471409015 - type: nauc_map_at_10_diff1 value: 61.12796039757213 - type: nauc_map_at_10_max value: 51.843445267118014 - type: nauc_map_at_10_std value: 42.243121474939365 - type: nauc_map_at_1_diff1 value: 66.39100974909151 - type: nauc_map_at_1_max value: 44.77165601342703 - type: nauc_map_at_1_std value: 32.38542979413408 - type: nauc_map_at_20_diff1 value: 61.16611123434347 - type: nauc_map_at_20_max value: 51.52605092407306 - type: nauc_map_at_20_std value: 41.94787773313971 - type: nauc_map_at_3_diff1 value: 61.40157474408937 - type: nauc_map_at_3_max value: 51.47230077853947 - type: nauc_map_at_3_std value: 42.63540269440141 - type: nauc_map_at_5_diff1 value: 61.07631147583098 - type: nauc_map_at_5_max value: 52.02626939341523 - type: nauc_map_at_5_std value: 42.511607332150334 - type: nauc_mrr_at_1000_diff1 value: 61.375798048778506 - type: nauc_mrr_at_1000_max value: 51.37093181241067 - type: nauc_mrr_at_1000_std value: 41.735794471409015 - type: nauc_mrr_at_100_diff1 value: 61.375798048778506 - type: nauc_mrr_at_100_max value: 51.37093181241067 - type: nauc_mrr_at_100_std value: 41.735794471409015 - type: nauc_mrr_at_10_diff1 value: 61.12796039757213 - type: nauc_mrr_at_10_max value: 51.843445267118014 - type: nauc_mrr_at_10_std value: 42.243121474939365 - type: nauc_mrr_at_1_diff1 value: 66.39100974909151 - type: nauc_mrr_at_1_max value: 44.77165601342703 - type: nauc_mrr_at_1_std value: 32.38542979413408 - type: nauc_mrr_at_20_diff1 value: 61.16611123434347 - type: nauc_mrr_at_20_max value: 51.52605092407306 - type: nauc_mrr_at_20_std value: 41.94787773313971 - type: nauc_mrr_at_3_diff1 value: 61.40157474408937 - type: nauc_mrr_at_3_max value: 51.47230077853947 - type: nauc_mrr_at_3_std value: 42.63540269440141 - type: nauc_mrr_at_5_diff1 value: 61.07631147583098 - type: nauc_mrr_at_5_max value: 52.02626939341523 - type: nauc_mrr_at_5_std value: 42.511607332150334 - type: nauc_ndcg_at_1000_diff1 value: 60.54821630436157 - type: nauc_ndcg_at_1000_max value: 52.584328363863634 - type: nauc_ndcg_at_1000_std value: 43.306961101645946 - type: nauc_ndcg_at_100_diff1 value: 60.54821630436157 - type: nauc_ndcg_at_100_max value: 52.584328363863634 - type: nauc_ndcg_at_100_std value: 43.306961101645946 - type: nauc_ndcg_at_10_diff1 value: 58.800340278109886 - type: nauc_ndcg_at_10_max value: 55.31050771670664 - type: nauc_ndcg_at_10_std value: 46.40931672942848 - type: nauc_ndcg_at_1_diff1 value: 66.39100974909151 - type: nauc_ndcg_at_1_max value: 44.77165601342703 - type: nauc_ndcg_at_1_std value: 32.38542979413408 - type: nauc_ndcg_at_20_diff1 value: 58.88690479697946 - type: nauc_ndcg_at_20_max value: 54.19269661177923 - type: nauc_ndcg_at_20_std value: 45.39305589413174 - type: nauc_ndcg_at_3_diff1 value: 59.61866351451574 - type: nauc_ndcg_at_3_max value: 54.23992718744033 - type: nauc_ndcg_at_3_std value: 46.997379274101 - type: nauc_ndcg_at_5_diff1 value: 58.70739588066225 - type: nauc_ndcg_at_5_max value: 55.76766902539152 - type: nauc_ndcg_at_5_std value: 47.10553115762958 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_100_diff1 value: .nan - type: nauc_precision_at_100_max value: .nan - type: nauc_precision_at_100_std value: .nan - type: nauc_precision_at_10_diff1 value: 35.72622112397501 - type: nauc_precision_at_10_max value: 89.84297108673948 - type: nauc_precision_at_10_std value: 86.60269192422707 - type: nauc_precision_at_1_diff1 value: 66.39100974909151 - type: nauc_precision_at_1_max value: 44.77165601342703 - type: nauc_precision_at_1_std value: 32.38542979413408 - type: nauc_precision_at_20_diff1 value: 29.188449183726433 - type: nauc_precision_at_20_max value: 86.45729478231968 - type: nauc_precision_at_20_std value: 86.45729478231968 - type: nauc_precision_at_3_diff1 value: 50.294126629236224 - type: nauc_precision_at_3_max value: 68.98223127174579 - type: nauc_precision_at_3_std value: 70.31195520376356 - type: nauc_precision_at_5_diff1 value: 39.648884288124385 - type: nauc_precision_at_5_max value: 86.3409770687935 - type: nauc_precision_at_5_std value: 83.74875373878356 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_10_diff1 value: 35.72622112397516 - type: nauc_recall_at_10_max value: 89.84297108673968 - type: nauc_recall_at_10_std value: 86.60269192422749 - type: nauc_recall_at_1_diff1 value: 66.39100974909151 - type: nauc_recall_at_1_max value: 44.77165601342703 - type: nauc_recall_at_1_std value: 32.38542979413408 - type: nauc_recall_at_20_diff1 value: 29.188449183726323 - type: nauc_recall_at_20_max value: 86.45729478231985 - type: nauc_recall_at_20_std value: 86.45729478231985 - type: nauc_recall_at_3_diff1 value: 50.29412662923603 - type: nauc_recall_at_3_max value: 68.98223127174562 - type: nauc_recall_at_3_std value: 70.31195520376346 - type: nauc_recall_at_5_diff1 value: 39.64888428812445 - type: nauc_recall_at_5_max value: 86.34097706879359 - type: nauc_recall_at_5_std value: 83.74875373878366 - type: ndcg_at_1 value: 61.538000000000004 - type: ndcg_at_10 value: 79.303 - type: ndcg_at_100 value: 80.557 - type: ndcg_at_1000 value: 80.557 - type: ndcg_at_20 value: 79.732 - type: ndcg_at_3 value: 77.033 - type: ndcg_at_5 value: 78.818 - type: precision_at_1 value: 61.538000000000004 - type: precision_at_10 value: 9.385 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.769 - type: precision_at_3 value: 29.231 - type: precision_at_5 value: 18.462 - type: recall_at_1 value: 61.538000000000004 - type: recall_at_10 value: 93.84599999999999 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 95.38499999999999 - type: recall_at_3 value: 87.69200000000001 - type: recall_at_5 value: 92.308 - task: type: STS dataset: name: MTEB RUParaPhraserSTS (default) type: merionum/ru_paraphraser config: default split: test revision: 43265056790b8f7c59e0139acb4be0a8dad2c8f4 metrics: - type: cosine_pearson value: 64.73554596215753 - type: cosine_spearman value: 70.45849652271855 - type: euclidean_pearson value: 68.08069844834267 - type: euclidean_spearman value: 70.45854872959124 - type: main_score value: 70.45849652271855 - type: manhattan_pearson value: 67.88325986519624 - type: manhattan_spearman value: 70.21131896834542 - type: pearson value: 64.73554596215753 - type: spearman value: 70.45849652271855 - task: type: Retrieval dataset: name: MTEB RiaNewsRetrieval (default) type: ai-forever/ria-news-retrieval config: default split: test revision: 82374b0bbacda6114f39ff9c5b925fa1512ca5d7 metrics: - type: main_score value: 70.00999999999999 - type: map_at_1 value: 55.97 - type: map_at_10 value: 65.59700000000001 - type: map_at_100 value: 66.057 - type: map_at_1000 value: 66.074 - type: map_at_20 value: 65.892 - type: map_at_3 value: 63.74999999999999 - type: map_at_5 value: 64.84299999999999 - type: mrr_at_1 value: 55.88999999999999 - type: mrr_at_10 value: 65.55873015872977 - type: mrr_at_100 value: 66.01891495129716 - type: mrr_at_1000 value: 66.03538391493299 - type: mrr_at_20 value: 65.85351193431555 - type: mrr_at_3 value: 63.7133333333329 - type: mrr_at_5 value: 64.80483333333268 - type: nauc_map_at_1000_diff1 value: 65.95332946436318 - type: nauc_map_at_1000_max value: 28.21204156197811 - type: nauc_map_at_1000_std value: -13.139245767083743 - type: nauc_map_at_100_diff1 value: 65.94763105024367 - type: nauc_map_at_100_max value: 28.212832170078205 - type: nauc_map_at_100_std value: -13.131425849370665 - type: nauc_map_at_10_diff1 value: 65.88455089448388 - type: nauc_map_at_10_max value: 28.13555838776792 - type: nauc_map_at_10_std value: -13.326989827081023 - type: nauc_map_at_1_diff1 value: 69.31275711813979 - type: nauc_map_at_1_max value: 26.386708520283758 - type: nauc_map_at_1_std value: -14.434616447245464 - type: nauc_map_at_20_diff1 value: 65.91227032605677 - type: nauc_map_at_20_max value: 28.20538655600886 - type: nauc_map_at_20_std value: -13.191148834410274 - type: nauc_map_at_3_diff1 value: 66.0051677952641 - type: nauc_map_at_3_max value: 28.25443420019022 - type: nauc_map_at_3_std value: -13.893284109029558 - type: nauc_map_at_5_diff1 value: 65.89784348297898 - type: nauc_map_at_5_max value: 28.26449765184183 - type: nauc_map_at_5_std value: -13.506692912805008 - type: nauc_mrr_at_1000_diff1 value: 66.06599513750889 - type: nauc_mrr_at_1000_max value: 28.191556650722287 - type: nauc_mrr_at_1000_std value: -13.098487982930276 - type: nauc_mrr_at_100_diff1 value: 66.0602307977725 - type: nauc_mrr_at_100_max value: 28.19235936624514 - type: nauc_mrr_at_100_std value: -13.09069677716269 - type: nauc_mrr_at_10_diff1 value: 65.99546819079403 - type: nauc_mrr_at_10_max value: 28.11556170120022 - type: nauc_mrr_at_10_std value: -13.286711073897553 - type: nauc_mrr_at_1_diff1 value: 69.49541040517995 - type: nauc_mrr_at_1_max value: 26.354622707276153 - type: nauc_mrr_at_1_std value: -14.358839778104695 - type: nauc_mrr_at_20_diff1 value: 66.02427154257936 - type: nauc_mrr_at_20_max value: 28.18509383563462 - type: nauc_mrr_at_20_std value: -13.150543398429 - type: nauc_mrr_at_3_diff1 value: 66.11258119082618 - type: nauc_mrr_at_3_max value: 28.239510722224004 - type: nauc_mrr_at_3_std value: -13.857249251136269 - type: nauc_mrr_at_5_diff1 value: 66.00633786765626 - type: nauc_mrr_at_5_max value: 28.244875152193032 - type: nauc_mrr_at_5_std value: -13.467206028704434 - type: nauc_ndcg_at_1000_diff1 value: 65.02876183314446 - type: nauc_ndcg_at_1000_max value: 29.109368390197194 - type: nauc_ndcg_at_1000_std value: -11.56514359821697 - type: nauc_ndcg_at_100_diff1 value: 64.85837726893713 - type: nauc_ndcg_at_100_max value: 29.19990133137256 - type: nauc_ndcg_at_100_std value: -11.17450348161257 - type: nauc_ndcg_at_10_diff1 value: 64.53842705024796 - type: nauc_ndcg_at_10_max value: 28.748734006088526 - type: nauc_ndcg_at_10_std value: -12.331395505957063 - type: nauc_ndcg_at_1_diff1 value: 69.31275711813979 - type: nauc_ndcg_at_1_max value: 26.386708520283758 - type: nauc_ndcg_at_1_std value: -14.434616447245464 - type: nauc_ndcg_at_20_diff1 value: 64.59017606740504 - type: nauc_ndcg_at_20_max value: 29.047332048898017 - type: nauc_ndcg_at_20_std value: -11.746548770195954 - type: nauc_ndcg_at_3_diff1 value: 64.87900935713822 - type: nauc_ndcg_at_3_max value: 28.953157521204403 - type: nauc_ndcg_at_3_std value: -13.639947228880942 - type: nauc_ndcg_at_5_diff1 value: 64.61466953479034 - type: nauc_ndcg_at_5_max value: 29.01899321868392 - type: nauc_ndcg_at_5_std value: -12.85356404799802 - type: nauc_precision_at_1000_diff1 value: 48.85481417002382 - type: nauc_precision_at_1000_max value: 57.129837326696375 - type: nauc_precision_at_1000_std value: 37.889524999906435 - type: nauc_precision_at_100_diff1 value: 53.374672326788264 - type: nauc_precision_at_100_max value: 43.819333062207974 - type: nauc_precision_at_100_std value: 21.387064885769362 - type: nauc_precision_at_10_diff1 value: 57.66571169774445 - type: nauc_precision_at_10_max value: 31.779694837242033 - type: nauc_precision_at_10_std value: -6.6248399147180255 - type: nauc_precision_at_1_diff1 value: 69.31275711813979 - type: nauc_precision_at_1_max value: 26.386708520283758 - type: nauc_precision_at_1_std value: -14.434616447245464 - type: nauc_precision_at_20_diff1 value: 55.93570036001682 - type: nauc_precision_at_20_max value: 34.98640173388743 - type: nauc_precision_at_20_std value: -0.36518465159326174 - type: nauc_precision_at_3_diff1 value: 60.94100093991508 - type: nauc_precision_at_3_max value: 31.422239034357673 - type: nauc_precision_at_3_std value: -12.72576556537896 - type: nauc_precision_at_5_diff1 value: 59.450505195434054 - type: nauc_precision_at_5_max value: 32.07638712418377 - type: nauc_precision_at_5_std value: -10.024459103498598 - type: nauc_recall_at_1000_diff1 value: 48.854814170024184 - type: nauc_recall_at_1000_max value: 57.129837326697164 - type: nauc_recall_at_1000_std value: 37.88952499990672 - type: nauc_recall_at_100_diff1 value: 53.37467232678822 - type: nauc_recall_at_100_max value: 43.8193330622079 - type: nauc_recall_at_100_std value: 21.387064885769398 - type: nauc_recall_at_10_diff1 value: 57.66571169774447 - type: nauc_recall_at_10_max value: 31.779694837242133 - type: nauc_recall_at_10_std value: -6.62483991471789 - type: nauc_recall_at_1_diff1 value: 69.31275711813979 - type: nauc_recall_at_1_max value: 26.386708520283758 - type: nauc_recall_at_1_std value: -14.434616447245464 - type: nauc_recall_at_20_diff1 value: 55.93570036001682 - type: nauc_recall_at_20_max value: 34.986401733887554 - type: nauc_recall_at_20_std value: -0.3651846515931506 - type: nauc_recall_at_3_diff1 value: 60.94100093991499 - type: nauc_recall_at_3_max value: 31.422239034357606 - type: nauc_recall_at_3_std value: -12.725765565378966 - type: nauc_recall_at_5_diff1 value: 59.450505195434125 - type: nauc_recall_at_5_max value: 32.07638712418387 - type: nauc_recall_at_5_std value: -10.024459103498472 - type: ndcg_at_1 value: 55.97 - type: ndcg_at_10 value: 70.00999999999999 - type: ndcg_at_100 value: 72.20100000000001 - type: ndcg_at_1000 value: 72.65599999999999 - type: ndcg_at_20 value: 71.068 - type: ndcg_at_3 value: 66.228 - type: ndcg_at_5 value: 68.191 - type: precision_at_1 value: 55.97 - type: precision_at_10 value: 8.373999999999999 - type: precision_at_100 value: 0.9390000000000001 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 4.3950000000000005 - type: precision_at_3 value: 24.46 - type: precision_at_5 value: 15.626000000000001 - type: recall_at_1 value: 55.97 - type: recall_at_10 value: 83.74000000000001 - type: recall_at_100 value: 93.87 - type: recall_at_1000 value: 97.49 - type: recall_at_20 value: 87.89 - type: recall_at_3 value: 73.38 - type: recall_at_5 value: 78.13 - task: type: Reranking dataset: name: MTEB RuBQReranking (default) type: ai-forever/rubq-reranking config: default split: test revision: 2e96b8f098fa4b0950fc58eacadeb31c0d0c7fa2 metrics: - type: main_score value: 71.44929565043827 - type: map value: 71.44929565043827 - type: mrr value: 77.78391820945014 - type: nAUC_map_diff1 value: 38.140840668080244 - type: nAUC_map_max value: 27.54328688105381 - type: nAUC_map_std value: 16.81572082284672 - type: nAUC_mrr_diff1 value: 44.51350415961509 - type: nAUC_mrr_max value: 36.491182016669754 - type: nAUC_mrr_std value: 22.47139593052269 - task: type: Retrieval dataset: name: MTEB RuBQRetrieval (default) type: ai-forever/rubq-retrieval config: default split: test revision: e19b6ffa60b3bc248e0b41f4cc37c26a55c2a67b metrics: - type: main_score value: 68.529 - type: map_at_1 value: 42.529 - type: map_at_10 value: 60.864 - type: map_at_100 value: 61.868 - type: map_at_1000 value: 61.907000000000004 - type: map_at_20 value: 61.596 - type: map_at_3 value: 55.701 - type: map_at_5 value: 58.78 - type: mrr_at_1 value: 60.57919621749409 - type: mrr_at_10 value: 70.55614188149649 - type: mrr_at_100 value: 70.88383816664494 - type: mrr_at_1000 value: 70.89719252668833 - type: mrr_at_20 value: 70.79839750105347 - type: mrr_at_3 value: 68.4594168636722 - type: mrr_at_5 value: 69.67100078802214 - type: nauc_map_at_1000_diff1 value: 40.67438785660885 - type: nauc_map_at_1000_max value: 32.79981738507424 - type: nauc_map_at_1000_std value: -6.873402600044831 - type: nauc_map_at_100_diff1 value: 40.65643664443284 - type: nauc_map_at_100_max value: 32.81594799919249 - type: nauc_map_at_100_std value: -6.8473246794498195 - type: nauc_map_at_10_diff1 value: 40.39048268484908 - type: nauc_map_at_10_max value: 32.403242161479525 - type: nauc_map_at_10_std value: -7.344413799841244 - type: nauc_map_at_1_diff1 value: 44.36306892906905 - type: nauc_map_at_1_max value: 25.61348630699028 - type: nauc_map_at_1_std value: -8.713074613333902 - type: nauc_map_at_20_diff1 value: 40.530326570124615 - type: nauc_map_at_20_max value: 32.74028319323205 - type: nauc_map_at_20_std value: -7.008180779820569 - type: nauc_map_at_3_diff1 value: 40.764924859364044 - type: nauc_map_at_3_max value: 29.809671682025336 - type: nauc_map_at_3_std value: -9.205620202725564 - type: nauc_map_at_5_diff1 value: 40.88599496021476 - type: nauc_map_at_5_max value: 32.1701894666848 - type: nauc_map_at_5_std value: -7.801251849010623 - type: nauc_mrr_at_1000_diff1 value: 48.64181373540728 - type: nauc_mrr_at_1000_max value: 40.136947990653546 - type: nauc_mrr_at_1000_std value: -7.250260497468805 - type: nauc_mrr_at_100_diff1 value: 48.63349902496212 - type: nauc_mrr_at_100_max value: 40.14510559704008 - type: nauc_mrr_at_100_std value: -7.228702374801103 - type: nauc_mrr_at_10_diff1 value: 48.58580560194813 - type: nauc_mrr_at_10_max value: 40.15075599433366 - type: nauc_mrr_at_10_std value: -7.267928771548688 - type: nauc_mrr_at_1_diff1 value: 51.47535097164919 - type: nauc_mrr_at_1_max value: 38.23579750430856 - type: nauc_mrr_at_1_std value: -9.187785187137633 - type: nauc_mrr_at_20_diff1 value: 48.58688378336222 - type: nauc_mrr_at_20_max value: 40.13408744088299 - type: nauc_mrr_at_20_std value: -7.283132775160146 - type: nauc_mrr_at_3_diff1 value: 48.66833005454742 - type: nauc_mrr_at_3_max value: 40.07987333638038 - type: nauc_mrr_at_3_std value: -7.738819947521418 - type: nauc_mrr_at_5_diff1 value: 48.76536305941537 - type: nauc_mrr_at_5_max value: 40.381929739522185 - type: nauc_mrr_at_5_std value: -7.592858318378928 - type: nauc_ndcg_at_1000_diff1 value: 41.67304442004693 - type: nauc_ndcg_at_1000_max value: 35.84126926253235 - type: nauc_ndcg_at_1000_std value: -4.78971011604655 - type: nauc_ndcg_at_100_diff1 value: 41.16918850185783 - type: nauc_ndcg_at_100_max value: 36.082461962326505 - type: nauc_ndcg_at_100_std value: -4.092442251697269 - type: nauc_ndcg_at_10_diff1 value: 40.300065598615205 - type: nauc_ndcg_at_10_max value: 34.87866296788365 - type: nauc_ndcg_at_10_std value: -5.866529277842453 - type: nauc_ndcg_at_1_diff1 value: 51.74612915209495 - type: nauc_ndcg_at_1_max value: 37.71907067970078 - type: nauc_ndcg_at_1_std value: -9.064124266098696 - type: nauc_ndcg_at_20_diff1 value: 40.493949850214584 - type: nauc_ndcg_at_20_max value: 35.69331503650286 - type: nauc_ndcg_at_20_std value: -4.995310342975443 - type: nauc_ndcg_at_3_diff1 value: 41.269443212112364 - type: nauc_ndcg_at_3_max value: 32.572844460953334 - type: nauc_ndcg_at_3_std value: -9.063015396458791 - type: nauc_ndcg_at_5_diff1 value: 41.37039652522888 - type: nauc_ndcg_at_5_max value: 34.67416011393571 - type: nauc_ndcg_at_5_std value: -7.106845569862319 - type: nauc_precision_at_1000_diff1 value: -9.571769961090155 - type: nauc_precision_at_1000_max value: 5.574782583417188 - type: nauc_precision_at_1000_std value: 7.28333847923847 - type: nauc_precision_at_100_diff1 value: -7.7405012003383735 - type: nauc_precision_at_100_max value: 9.67745355070353 - type: nauc_precision_at_100_std value: 9.327890294080992 - type: nauc_precision_at_10_diff1 value: -1.006879647532931 - type: nauc_precision_at_10_max value: 15.899825481231064 - type: nauc_precision_at_10_std value: 4.2284084852153105 - type: nauc_precision_at_1_diff1 value: 51.74612915209495 - type: nauc_precision_at_1_max value: 37.71907067970078 - type: nauc_precision_at_1_std value: -9.064124266098696 - type: nauc_precision_at_20_diff1 value: -4.982301544401409 - type: nauc_precision_at_20_max value: 13.241674471380568 - type: nauc_precision_at_20_std value: 7.052280133821539 - type: nauc_precision_at_3_diff1 value: 15.442614376387374 - type: nauc_precision_at_3_max value: 25.12695418083 - type: nauc_precision_at_3_std value: -3.1150066697920638 - type: nauc_precision_at_5_diff1 value: 8.381026072692444 - type: nauc_precision_at_5_max value: 22.839056540604822 - type: nauc_precision_at_5_std value: 1.5126905486524331 - type: nauc_recall_at_1000_diff1 value: -0.8869709920433502 - type: nauc_recall_at_1000_max value: 45.092324433377264 - type: nauc_recall_at_1000_std value: 62.21264093315108 - type: nauc_recall_at_100_diff1 value: 16.036715011075714 - type: nauc_recall_at_100_max value: 39.79963411771158 - type: nauc_recall_at_100_std value: 28.41850069503361 - type: nauc_recall_at_10_diff1 value: 25.189622794479998 - type: nauc_recall_at_10_max value: 30.82355277039427 - type: nauc_recall_at_10_std value: 0.0964544736531047 - type: nauc_recall_at_1_diff1 value: 44.36306892906905 - type: nauc_recall_at_1_max value: 25.61348630699028 - type: nauc_recall_at_1_std value: -8.713074613333902 - type: nauc_recall_at_20_diff1 value: 20.43424504746087 - type: nauc_recall_at_20_max value: 33.96010554649377 - type: nauc_recall_at_20_std value: 6.900984030301936 - type: nauc_recall_at_3_diff1 value: 33.86531858793492 - type: nauc_recall_at_3_max value: 27.725692256711188 - type: nauc_recall_at_3_std value: -8.533124289305709 - type: nauc_recall_at_5_diff1 value: 32.006964557701686 - type: nauc_recall_at_5_max value: 31.493370659289806 - type: nauc_recall_at_5_std value: -4.8639793547793255 - type: ndcg_at_1 value: 60.461 - type: ndcg_at_10 value: 68.529 - type: ndcg_at_100 value: 71.664 - type: ndcg_at_1000 value: 72.396 - type: ndcg_at_20 value: 70.344 - type: ndcg_at_3 value: 61.550000000000004 - type: ndcg_at_5 value: 64.948 - type: precision_at_1 value: 60.461 - type: precision_at_10 value: 13.28 - type: precision_at_100 value: 1.555 - type: precision_at_1000 value: 0.164 - type: precision_at_20 value: 7.216 - type: precision_at_3 value: 33.077 - type: precision_at_5 value: 23.014000000000003 - type: recall_at_1 value: 42.529 - type: recall_at_10 value: 81.169 - type: recall_at_100 value: 93.154 - type: recall_at_1000 value: 98.18299999999999 - type: recall_at_20 value: 87.132 - type: recall_at_3 value: 63.905 - type: recall_at_5 value: 71.967 - task: type: Classification dataset: name: MTEB RuReviewsClassification (default) type: ai-forever/ru-reviews-classification config: default split: test revision: f6d2c31f4dc6b88f468552750bfec05b4b41b05a metrics: - type: accuracy value: 61.17675781250001 - type: f1 value: 60.354535346041374 - type: f1_weighted value: 60.35437313166116 - type: main_score value: 61.17675781250001 - task: type: STS dataset: name: MTEB RuSTSBenchmarkSTS (default) type: ai-forever/ru-stsbenchmark-sts config: default split: test revision: 7cf24f325c6da6195df55bef3d86b5e0616f3018 metrics: - type: cosine_pearson value: 78.1301041727274 - type: cosine_spearman value: 78.08238025421747 - type: euclidean_pearson value: 77.35224254583635 - type: euclidean_spearman value: 78.08235336582496 - type: main_score value: 78.08238025421747 - type: manhattan_pearson value: 77.24138550052075 - type: manhattan_spearman value: 77.98199107904142 - type: pearson value: 78.1301041727274 - type: spearman value: 78.08238025421747 - task: type: Classification dataset: name: MTEB RuSciBenchGRNTIClassification (default) type: ai-forever/ru-scibench-grnti-classification config: default split: test revision: 673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1 metrics: - type: accuracy value: 54.990234375 - type: f1 value: 53.537019057131374 - type: f1_weighted value: 53.552745354520766 - type: main_score value: 54.990234375 - task: type: Clustering dataset: name: MTEB RuSciBenchGRNTIClusteringP2P (default) type: ai-forever/ru-scibench-grnti-classification config: default split: test revision: 673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1 metrics: - type: main_score value: 50.775228895355106 - type: v_measure value: 50.775228895355106 - type: v_measure_std value: 0.9533571150165796 - task: type: Classification dataset: name: MTEB RuSciBenchOECDClassification (default) type: ai-forever/ru-scibench-oecd-classification config: default split: test revision: 26c88e99dcaba32bb45d0e1bfc21902337f6d471 metrics: - type: accuracy value: 41.71875 - type: f1 value: 39.289100975858304 - type: f1_weighted value: 39.29257829217775 - type: main_score value: 41.71875 - task: type: Clustering dataset: name: MTEB RuSciBenchOECDClusteringP2P (default) type: ai-forever/ru-scibench-oecd-classification config: default split: test revision: 26c88e99dcaba32bb45d0e1bfc21902337f6d471 metrics: - type: main_score value: 45.10904808834516 - type: v_measure value: 45.10904808834516 - type: v_measure_std value: 1.0572643410157534 - task: type: Classification dataset: name: MTEB SIB200Classification (rus_Cyrl) type: mteb/sib200 config: rus_Cyrl split: test revision: a74d7350ea12af010cfb1c21e34f1f81fd2e615b metrics: - type: accuracy value: 66.36363636363637 - type: f1 value: 64.6940336621617 - type: f1_weighted value: 66.43317771876966 - type: main_score value: 66.36363636363637 - task: type: Clustering dataset: name: MTEB SIB200ClusteringS2S (rus_Cyrl) type: mteb/sib200 config: rus_Cyrl split: test revision: a74d7350ea12af010cfb1c21e34f1f81fd2e615b metrics: - type: main_score value: 33.99178497314711 - type: v_measure value: 33.99178497314711 - type: v_measure_std value: 4.036337464043786 - task: type: STS dataset: name: MTEB STS22.v2 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: d31f33a128469b20e357535c39b82fb3c3f6f2bd metrics: - type: cosine_pearson value: 50.724322379215934 - type: cosine_spearman value: 59.90449732164651 - type: euclidean_pearson value: 50.227545226784024 - type: euclidean_spearman value: 59.898906527601085 - type: main_score value: 59.90449732164651 - type: manhattan_pearson value: 50.21762139819405 - type: manhattan_spearman value: 59.761039813759 - type: pearson value: 50.724322379215934 - type: spearman value: 59.90449732164651 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (ru) type: mteb/stsb_multi_mt config: ru split: dev revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 78.43928769569945 - type: cosine_spearman value: 78.23961768018884 - type: euclidean_pearson value: 77.4718694027985 - type: euclidean_spearman value: 78.23887044760475 - type: main_score value: 78.23961768018884 - type: manhattan_pearson value: 77.34517128089547 - type: manhattan_spearman value: 78.1146477340426 - type: pearson value: 78.43928769569945 - type: spearman value: 78.23961768018884 - task: type: MultilabelClassification dataset: name: MTEB SensitiveTopicsClassification (default) type: ai-forever/sensitive-topics-classification config: default split: test revision: 416b34a802308eac30e4192afc0ff99bb8dcc7f2 metrics: - type: accuracy value: 22.8125 - type: f1 value: 17.31969589593409 - type: lrap value: 33.82412380642287 - type: main_score value: 22.8125 - task: type: PairClassification dataset: name: MTEB TERRa (default) type: ai-forever/terra-pairclassification config: default split: dev revision: 7b58f24536063837d644aab9a023c62199b2a612 metrics: - type: cosine_accuracy value: 57.32899022801303 - type: cosine_accuracy_threshold value: 85.32201051712036 - type: cosine_ap value: 55.14264553720072 - type: cosine_f1 value: 66.83544303797468 - type: cosine_f1_threshold value: 85.32201051712036 - type: cosine_precision value: 54.54545454545454 - type: cosine_recall value: 86.27450980392157 - type: dot_accuracy value: 57.32899022801303 - type: dot_accuracy_threshold value: 85.32201051712036 - type: dot_ap value: 55.14264553720072 - type: dot_f1 value: 66.83544303797468 - type: dot_f1_threshold value: 85.32201051712036 - type: dot_precision value: 54.54545454545454 - type: dot_recall value: 86.27450980392157 - type: euclidean_accuracy value: 57.32899022801303 - type: euclidean_accuracy_threshold value: 54.18117046356201 - type: euclidean_ap value: 55.14264553720072 - type: euclidean_f1 value: 66.83544303797468 - type: euclidean_f1_threshold value: 54.18117046356201 - type: euclidean_precision value: 54.54545454545454 - type: euclidean_recall value: 86.27450980392157 - type: main_score value: 55.14264553720072 - type: manhattan_accuracy value: 57.32899022801303 - type: manhattan_accuracy_threshold value: 828.8480758666992 - type: manhattan_ap value: 55.077974053622555 - type: manhattan_f1 value: 66.82352941176471 - type: manhattan_f1_threshold value: 885.6784820556641 - type: manhattan_precision value: 52.20588235294118 - type: manhattan_recall value: 92.81045751633987 - type: max_ap value: 55.14264553720072 - type: max_f1 value: 66.83544303797468 - type: max_precision value: 54.54545454545454 - type: max_recall value: 92.81045751633987 - type: similarity_accuracy value: 57.32899022801303 - type: similarity_accuracy_threshold value: 85.32201051712036 - type: similarity_ap value: 55.14264553720072 - type: similarity_f1 value: 66.83544303797468 - type: similarity_f1_threshold value: 85.32201051712036 - type: similarity_precision value: 54.54545454545454 - type: similarity_recall value: 86.27450980392157 - task: type: PairClassification dataset: name: MTEB XNLI (ru) type: mteb/xnli config: ru split: test revision: 09698e0180d87dc247ca447d3a1248b931ac0cdb metrics: - type: cosine_accuracy value: 67.6923076923077 - type: cosine_accuracy_threshold value: 87.6681923866272 - type: cosine_ap value: 73.18693800863593 - type: cosine_f1 value: 70.40641099026904 - type: cosine_f1_threshold value: 85.09706258773804 - type: cosine_precision value: 57.74647887323944 - type: cosine_recall value: 90.17595307917888 - type: dot_accuracy value: 67.6923076923077 - type: dot_accuracy_threshold value: 87.66818642616272 - type: dot_ap value: 73.18693800863593 - type: dot_f1 value: 70.40641099026904 - type: dot_f1_threshold value: 85.09706258773804 - type: dot_precision value: 57.74647887323944 - type: dot_recall value: 90.17595307917888 - type: euclidean_accuracy value: 67.6923076923077 - type: euclidean_accuracy_threshold value: 49.662476778030396 - type: euclidean_ap value: 73.18693800863593 - type: euclidean_f1 value: 70.40641099026904 - type: euclidean_f1_threshold value: 54.59475517272949 - type: euclidean_precision value: 57.74647887323944 - type: euclidean_recall value: 90.17595307917888 - type: main_score value: 73.18693800863593 - type: manhattan_accuracy value: 67.54578754578755 - type: manhattan_accuracy_threshold value: 777.1001815795898 - type: manhattan_ap value: 72.98861474758783 - type: manhattan_f1 value: 70.6842435655995 - type: manhattan_f1_threshold value: 810.3782653808594 - type: manhattan_precision value: 61.80021953896817 - type: manhattan_recall value: 82.55131964809385 - type: max_ap value: 73.18693800863593 - type: max_f1 value: 70.6842435655995 - type: max_precision value: 61.80021953896817 - type: max_recall value: 90.17595307917888 - type: similarity_accuracy value: 67.6923076923077 - type: similarity_accuracy_threshold value: 87.6681923866272 - type: similarity_ap value: 73.18693800863593 - type: similarity_f1 value: 70.40641099026904 - type: similarity_f1_threshold value: 85.09706258773804 - type: similarity_precision value: 57.74647887323944 - type: similarity_recall value: 90.17595307917888 - task: type: PairClassification dataset: name: MTEB XNLIV2 (russian) type: mteb/xnli2.0-multi-pair config: russian split: test revision: 5b7d477a8c62cdd18e2fed7e015497c20b4371ad metrics: - type: cosine_accuracy value: 68.35164835164835 - type: cosine_accuracy_threshold value: 88.48621845245361 - type: cosine_ap value: 73.10205506215699 - type: cosine_f1 value: 71.28712871287128 - type: cosine_f1_threshold value: 87.00399398803711 - type: cosine_precision value: 61.67023554603854 - type: cosine_recall value: 84.4574780058651 - type: dot_accuracy value: 68.35164835164835 - type: dot_accuracy_threshold value: 88.48622441291809 - type: dot_ap value: 73.10191110714706 - type: dot_f1 value: 71.28712871287128 - type: dot_f1_threshold value: 87.00399398803711 - type: dot_precision value: 61.67023554603854 - type: dot_recall value: 84.4574780058651 - type: euclidean_accuracy value: 68.35164835164835 - type: euclidean_accuracy_threshold value: 47.98704385757446 - type: euclidean_ap value: 73.10205506215699 - type: euclidean_f1 value: 71.28712871287128 - type: euclidean_f1_threshold value: 50.982362031936646 - type: euclidean_precision value: 61.67023554603854 - type: euclidean_recall value: 84.4574780058651 - type: main_score value: 73.10205506215699 - type: manhattan_accuracy value: 67.91208791208791 - type: manhattan_accuracy_threshold value: 746.1360931396484 - type: manhattan_ap value: 72.8954736175069 - type: manhattan_f1 value: 71.1297071129707 - type: manhattan_f1_threshold value: 808.0789566040039 - type: manhattan_precision value: 60.04036326942482 - type: manhattan_recall value: 87.2434017595308 - type: max_ap value: 73.10205506215699 - type: max_f1 value: 71.28712871287128 - type: max_precision value: 61.67023554603854 - type: max_recall value: 87.2434017595308 - type: similarity_accuracy value: 68.35164835164835 - type: similarity_accuracy_threshold value: 88.48621845245361 - type: similarity_ap value: 73.10205506215699 - type: similarity_f1 value: 71.28712871287128 - type: similarity_f1_threshold value: 87.00399398803711 - type: similarity_precision value: 61.67023554603854 - type: similarity_recall value: 84.4574780058651 - task: type: Retrieval dataset: name: MTEB XQuADRetrieval (ru) type: google/xquad config: ru split: validation revision: 51adfef1c1287aab1d2d91b5bead9bcfb9c68583 metrics: - type: main_score value: 95.705 - type: map_at_1 value: 90.802 - type: map_at_10 value: 94.427 - type: map_at_100 value: 94.451 - type: map_at_1000 value: 94.451 - type: map_at_20 value: 94.446 - type: map_at_3 value: 94.121 - type: map_at_5 value: 94.34 - type: mrr_at_1 value: 90.80168776371308 - type: mrr_at_10 value: 94.42659567343111 - type: mrr_at_100 value: 94.45099347521871 - type: mrr_at_1000 value: 94.45099347521871 - type: mrr_at_20 value: 94.44574530017569 - type: mrr_at_3 value: 94.12095639943743 - type: mrr_at_5 value: 94.34036568213786 - type: nauc_map_at_1000_diff1 value: 87.40573202946949 - type: nauc_map_at_1000_max value: 65.56220344468791 - type: nauc_map_at_1000_std value: 8.865583291735863 - type: nauc_map_at_100_diff1 value: 87.40573202946949 - type: nauc_map_at_100_max value: 65.56220344468791 - type: nauc_map_at_100_std value: 8.865583291735863 - type: nauc_map_at_10_diff1 value: 87.43657080570291 - type: nauc_map_at_10_max value: 65.71295628534446 - type: nauc_map_at_10_std value: 9.055399339099655 - type: nauc_map_at_1_diff1 value: 88.08395824560428 - type: nauc_map_at_1_max value: 62.92813192908893 - type: nauc_map_at_1_std value: 6.738987385482432 - type: nauc_map_at_20_diff1 value: 87.40979818966589 - type: nauc_map_at_20_max value: 65.59474346926105 - type: nauc_map_at_20_std value: 8.944420599300914 - type: nauc_map_at_3_diff1 value: 86.97771892161035 - type: nauc_map_at_3_max value: 66.14330030122467 - type: nauc_map_at_3_std value: 8.62516327793521 - type: nauc_map_at_5_diff1 value: 87.30273362211798 - type: nauc_map_at_5_max value: 66.1522476584607 - type: nauc_map_at_5_std value: 9.780940862679724 - type: nauc_mrr_at_1000_diff1 value: 87.40573202946949 - type: nauc_mrr_at_1000_max value: 65.56220344468791 - type: nauc_mrr_at_1000_std value: 8.865583291735863 - type: nauc_mrr_at_100_diff1 value: 87.40573202946949 - type: nauc_mrr_at_100_max value: 65.56220344468791 - type: nauc_mrr_at_100_std value: 8.865583291735863 - type: nauc_mrr_at_10_diff1 value: 87.43657080570291 - type: nauc_mrr_at_10_max value: 65.71295628534446 - type: nauc_mrr_at_10_std value: 9.055399339099655 - type: nauc_mrr_at_1_diff1 value: 88.08395824560428 - type: nauc_mrr_at_1_max value: 62.92813192908893 - type: nauc_mrr_at_1_std value: 6.738987385482432 - type: nauc_mrr_at_20_diff1 value: 87.40979818966589 - type: nauc_mrr_at_20_max value: 65.59474346926105 - type: nauc_mrr_at_20_std value: 8.944420599300914 - type: nauc_mrr_at_3_diff1 value: 86.97771892161035 - type: nauc_mrr_at_3_max value: 66.14330030122467 - type: nauc_mrr_at_3_std value: 8.62516327793521 - type: nauc_mrr_at_5_diff1 value: 87.30273362211798 - type: nauc_mrr_at_5_max value: 66.1522476584607 - type: nauc_mrr_at_5_std value: 9.780940862679724 - type: nauc_ndcg_at_1000_diff1 value: 87.37823158814116 - type: nauc_ndcg_at_1000_max value: 66.00874244792789 - type: nauc_ndcg_at_1000_std value: 9.479929342875067 - type: nauc_ndcg_at_100_diff1 value: 87.37823158814116 - type: nauc_ndcg_at_100_max value: 66.00874244792789 - type: nauc_ndcg_at_100_std value: 9.479929342875067 - type: nauc_ndcg_at_10_diff1 value: 87.54508467181488 - type: nauc_ndcg_at_10_max value: 66.88756470312894 - type: nauc_ndcg_at_10_std value: 10.812624405397022 - type: nauc_ndcg_at_1_diff1 value: 88.08395824560428 - type: nauc_ndcg_at_1_max value: 62.92813192908893 - type: nauc_ndcg_at_1_std value: 6.738987385482432 - type: nauc_ndcg_at_20_diff1 value: 87.42097894104597 - type: nauc_ndcg_at_20_max value: 66.37031898778943 - type: nauc_ndcg_at_20_std value: 10.34862538094813 - type: nauc_ndcg_at_3_diff1 value: 86.50039907157999 - type: nauc_ndcg_at_3_max value: 67.97798288917929 - type: nauc_ndcg_at_3_std value: 10.162410286746852 - type: nauc_ndcg_at_5_diff1 value: 87.13322094568531 - type: nauc_ndcg_at_5_max value: 68.08576118683821 - type: nauc_ndcg_at_5_std value: 12.639637379592855 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_100_diff1 value: 100.0 - type: nauc_precision_at_100_max value: 100.0 - type: nauc_precision_at_100_std value: 100.0 - type: nauc_precision_at_10_diff1 value: 93.46711505595813 - type: nauc_precision_at_10_max value: 100.0 - type: nauc_precision_at_10_std value: 65.42573557179935 - type: nauc_precision_at_1_diff1 value: 88.08395824560428 - type: nauc_precision_at_1_max value: 62.92813192908893 - type: nauc_precision_at_1_std value: 6.738987385482432 - type: nauc_precision_at_20_diff1 value: 91.28948674127133 - type: nauc_precision_at_20_max value: 100.0 - type: nauc_precision_at_20_std value: 90.74278258632364 - type: nauc_precision_at_3_diff1 value: 82.64606115071832 - type: nauc_precision_at_3_max value: 83.26201582412921 - type: nauc_precision_at_3_std value: 23.334013491433762 - type: nauc_precision_at_5_diff1 value: 85.0867539350284 - type: nauc_precision_at_5_max value: 96.57011448655484 - type: nauc_precision_at_5_std value: 56.46869543426768 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_10_diff1 value: 93.46711505595623 - type: nauc_recall_at_10_max value: 100.0 - type: nauc_recall_at_10_std value: 65.42573557180279 - type: nauc_recall_at_1_diff1 value: 88.08395824560428 - type: nauc_recall_at_1_max value: 62.92813192908893 - type: nauc_recall_at_1_std value: 6.738987385482432 - type: nauc_recall_at_20_diff1 value: 91.28948674127474 - type: nauc_recall_at_20_max value: 100.0 - type: nauc_recall_at_20_std value: 90.74278258632704 - type: nauc_recall_at_3_diff1 value: 82.64606115071967 - type: nauc_recall_at_3_max value: 83.26201582413023 - type: nauc_recall_at_3_std value: 23.334013491434007 - type: nauc_recall_at_5_diff1 value: 85.08675393502854 - type: nauc_recall_at_5_max value: 96.57011448655487 - type: nauc_recall_at_5_std value: 56.46869543426658 - type: ndcg_at_1 value: 90.802 - type: ndcg_at_10 value: 95.705 - type: ndcg_at_100 value: 95.816 - type: ndcg_at_1000 value: 95.816 - type: ndcg_at_20 value: 95.771 - type: ndcg_at_3 value: 95.11699999999999 - type: ndcg_at_5 value: 95.506 - type: precision_at_1 value: 90.802 - type: precision_at_10 value: 9.949 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.987 - type: precision_at_3 value: 32.658 - type: precision_at_5 value: 19.781000000000002 - type: recall_at_1 value: 90.802 - type: recall_at_10 value: 99.494 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 99.747 - type: recall_at_3 value: 97.975 - type: recall_at_5 value: 98.90299999999999 --- ## Multilingual-E5-small [Multilingual E5 Text Embeddings: A Technical Report](https://arxiv.org/pdf/2402.05672). Liang Wang, Nan Yang, Xiaolong Huang, Linjun Yang, Rangan Majumder, Furu Wei, arXiv 2024 This model has 12 layers and the embedding size is 384. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ", even for non-English texts. # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右,放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅"] tokenizer = AutoTokenizer.from_pretrained('intfloat/multilingual-e5-small') model = AutoModel.from_pretrained('intfloat/multilingual-e5-small') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Supported Languages This model is initialized from [microsoft/Multilingual-MiniLM-L12-H384](https://huggingface.co/microsoft/Multilingual-MiniLM-L12-H384) and continually trained on a mixture of multilingual datasets. It supports 100 languages from xlm-roberta, but low-resource languages may see performance degradation. ## Training Details **Initialization**: [microsoft/Multilingual-MiniLM-L12-H384](https://huggingface.co/microsoft/Multilingual-MiniLM-L12-H384) **First stage**: contrastive pre-training with weak supervision | Dataset | Weak supervision | # of text pairs | |--------------------------------------------------------------------------------------------------------|---------------------------------------|-----------------| | Filtered [mC4](https://huggingface.co/datasets/mc4) | (title, page content) | 1B | | [CC News](https://huggingface.co/datasets/intfloat/multilingual_cc_news) | (title, news content) | 400M | | [NLLB](https://huggingface.co/datasets/allenai/nllb) | translation pairs | 2.4B | | [Wikipedia](https://huggingface.co/datasets/intfloat/wikipedia) | (hierarchical section title, passage) | 150M | | Filtered [Reddit](https://www.reddit.com/) | (comment, response) | 800M | | [S2ORC](https://github.com/allenai/s2orc) | (title, abstract) and citation pairs | 100M | | [Stackexchange](https://stackexchange.com/) | (question, answer) | 50M | | [xP3](https://huggingface.co/datasets/bigscience/xP3) | (input prompt, response) | 80M | | [Miscellaneous unsupervised SBERT data](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | - | 10M | **Second stage**: supervised fine-tuning | Dataset | Language | # of text pairs | |----------------------------------------------------------------------------------------|--------------|-----------------| | [MS MARCO](https://microsoft.github.io/msmarco/) | English | 500k | | [NQ](https://github.com/facebookresearch/DPR) | English | 70k | | [Trivia QA](https://github.com/facebookresearch/DPR) | English | 60k | | [NLI from SimCSE](https://github.com/princeton-nlp/SimCSE) | English | <300k | | [ELI5](https://huggingface.co/datasets/eli5) | English | 500k | | [DuReader Retrieval](https://github.com/baidu/DuReader/tree/master/DuReader-Retrieval) | Chinese | 86k | | [KILT Fever](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [KILT HotpotQA](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [SQuAD](https://huggingface.co/datasets/squad) | English | 87k | | [Quora](https://huggingface.co/datasets/quora) | English | 150k | | [Mr. TyDi](https://huggingface.co/datasets/castorini/mr-tydi) | 11 languages | 50k | | [MIRACL](https://huggingface.co/datasets/miracl/miracl) | 16 languages | 40k | For all labeled datasets, we only use its training set for fine-tuning. For other training details, please refer to our paper at [https://arxiv.org/pdf/2402.05672](https://arxiv.org/pdf/2402.05672). ## Benchmark Results on [Mr. TyDi](https://arxiv.org/abs/2108.08787) | Model | Avg MRR@10 | | ar | bn | en | fi | id | ja | ko | ru | sw | te | th | |-----------------------|------------|-------|------| --- | --- | --- | --- | --- | --- | --- |------| --- | --- | | BM25 | 33.3 | | 36.7 | 41.3 | 15.1 | 28.8 | 38.2 | 21.7 | 28.1 | 32.9 | 39.6 | 42.4 | 41.7 | | mDPR | 16.7 | | 26.0 | 25.8 | 16.2 | 11.3 | 14.6 | 18.1 | 21.9 | 18.5 | 7.3 | 10.6 | 13.5 | | BM25 + mDPR | 41.7 | | 49.1 | 53.5 | 28.4 | 36.5 | 45.5 | 35.5 | 36.2 | 42.7 | 40.5 | 42.0 | 49.2 | | | | | multilingual-e5-small | 64.4 | | 71.5 | 66.3 | 54.5 | 57.7 | 63.2 | 55.4 | 54.3 | 60.8 | 65.4 | 89.1 | 70.1 | | multilingual-e5-base | 65.9 | | 72.3 | 65.0 | 58.5 | 60.8 | 64.9 | 56.6 | 55.8 | 62.7 | 69.0 | 86.6 | 72.7 | | multilingual-e5-large | **70.5** | | 77.5 | 73.2 | 60.8 | 66.8 | 68.5 | 62.5 | 61.6 | 65.8 | 72.7 | 90.2 | 76.2 | ## MTEB Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/multilingual-e5-small') input_texts = [ 'query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 i s 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or traini ng for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮 ,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右, 放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油 锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅" ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, bitext mining, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2024multilingual, title={Multilingual E5 Text Embeddings: A Technical Report}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Yang, Linjun and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2402.05672}, year={2024} } ``` ## Limitations Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
intfloat/multilingual-e5-large
intfloat
feature-extraction
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "openvino", "xlm-roberta", "mteb", "Sentence Transformers", "sentence-similarity", "feature-extraction", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "arxiv:2402.05672", "arxiv:2108.08787", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-06-30T07:38:19Z"
2025-02-17T03:50:15+00:00
2,239,718
894
--- language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - 'no' - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh license: mit tags: - mteb - Sentence Transformers - sentence-similarity - feature-extraction - sentence-transformers model-index: - name: multilingual-e5-large results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 79.05970149253731 - type: ap value: 43.486574390835635 - type: f1 value: 73.32700092140148 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.22055674518201 - type: ap value: 81.55756710830498 - type: f1 value: 69.28271787752661 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 80.41979010494754 - type: ap value: 29.34879922376344 - type: f1 value: 67.62475449011278 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 77.8372591006424 - type: ap value: 26.557560591210738 - type: f1 value: 64.96619417368707 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.489875 - type: ap value: 90.98758636917603 - type: f1 value: 93.48554819717332 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.564 - type: f1 value: 46.75122173518047 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 45.400000000000006 - type: f1 value: 44.17195682400632 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 43.068 - type: f1 value: 42.38155696855596 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 41.89 - type: f1 value: 40.84407321682663 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.120000000000005 - type: f1 value: 39.522976223819114 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.832 - type: f1 value: 38.0392533394713 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 30.725 - type: map_at_10 value: 46.055 - type: map_at_100 value: 46.900999999999996 - type: map_at_1000 value: 46.911 - type: map_at_3 value: 41.548 - type: map_at_5 value: 44.297 - type: mrr_at_1 value: 31.152 - type: mrr_at_10 value: 46.231 - type: mrr_at_100 value: 47.07 - type: mrr_at_1000 value: 47.08 - type: mrr_at_3 value: 41.738 - type: mrr_at_5 value: 44.468999999999994 - type: ndcg_at_1 value: 30.725 - type: ndcg_at_10 value: 54.379999999999995 - type: ndcg_at_100 value: 58.138 - type: ndcg_at_1000 value: 58.389 - type: ndcg_at_3 value: 45.156 - type: ndcg_at_5 value: 50.123 - type: precision_at_1 value: 30.725 - type: precision_at_10 value: 8.087 - type: precision_at_100 value: 0.9769999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 18.54 - type: precision_at_5 value: 13.542000000000002 - type: recall_at_1 value: 30.725 - type: recall_at_10 value: 80.868 - type: recall_at_100 value: 97.653 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 55.619 - type: recall_at_5 value: 67.71000000000001 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 44.30960650674069 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 38.427074197498996 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 60.28270056031872 - type: mrr value: 74.38332673789738 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 84.05942144105269 - type: cos_sim_spearman value: 82.51212105850809 - type: euclidean_pearson value: 81.95639829909122 - type: euclidean_spearman value: 82.3717564144213 - type: manhattan_pearson value: 81.79273425468256 - type: manhattan_spearman value: 82.20066817871039 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.46764091858039 - type: f1 value: 99.37717466945023 - type: precision value: 99.33194154488518 - type: recall value: 99.46764091858039 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.29407880255337 - type: f1 value: 98.11248073959938 - type: precision value: 98.02443319392472 - type: recall value: 98.29407880255337 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 97.79009352268791 - type: f1 value: 97.5176076665512 - type: precision value: 97.38136473848286 - type: recall value: 97.79009352268791 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.26276987888363 - type: f1 value: 99.20133403545726 - type: precision value: 99.17500438827453 - type: recall value: 99.26276987888363 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.72727272727273 - type: f1 value: 84.67672206031433 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.34220182511161 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 33.4987096128766 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 25.558249999999997 - type: map_at_10 value: 34.44425000000001 - type: map_at_100 value: 35.59833333333333 - type: map_at_1000 value: 35.706916666666665 - type: map_at_3 value: 31.691749999999995 - type: map_at_5 value: 33.252916666666664 - type: mrr_at_1 value: 30.252666666666666 - type: mrr_at_10 value: 38.60675 - type: mrr_at_100 value: 39.42666666666666 - type: mrr_at_1000 value: 39.48408333333334 - type: mrr_at_3 value: 36.17441666666665 - type: mrr_at_5 value: 37.56275 - type: ndcg_at_1 value: 30.252666666666666 - type: ndcg_at_10 value: 39.683 - type: ndcg_at_100 value: 44.68541666666667 - type: ndcg_at_1000 value: 46.94316666666668 - type: ndcg_at_3 value: 34.961749999999995 - type: ndcg_at_5 value: 37.215666666666664 - type: precision_at_1 value: 30.252666666666666 - type: precision_at_10 value: 6.904166666666667 - type: precision_at_100 value: 1.0989999999999995 - type: precision_at_1000 value: 0.14733333333333334 - type: precision_at_3 value: 16.037666666666667 - type: precision_at_5 value: 11.413583333333333 - type: recall_at_1 value: 25.558249999999997 - type: recall_at_10 value: 51.13341666666666 - type: recall_at_100 value: 73.08366666666667 - type: recall_at_1000 value: 88.79483333333334 - type: recall_at_3 value: 37.989083333333326 - type: recall_at_5 value: 43.787833333333325 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 10.338 - type: map_at_10 value: 18.360000000000003 - type: map_at_100 value: 19.942 - type: map_at_1000 value: 20.134 - type: map_at_3 value: 15.174000000000001 - type: map_at_5 value: 16.830000000000002 - type: mrr_at_1 value: 23.257 - type: mrr_at_10 value: 33.768 - type: mrr_at_100 value: 34.707 - type: mrr_at_1000 value: 34.766000000000005 - type: mrr_at_3 value: 30.977 - type: mrr_at_5 value: 32.528 - type: ndcg_at_1 value: 23.257 - type: ndcg_at_10 value: 25.733 - type: ndcg_at_100 value: 32.288 - type: ndcg_at_1000 value: 35.992000000000004 - type: ndcg_at_3 value: 20.866 - type: ndcg_at_5 value: 22.612 - type: precision_at_1 value: 23.257 - type: precision_at_10 value: 8.124 - type: precision_at_100 value: 1.518 - type: precision_at_1000 value: 0.219 - type: precision_at_3 value: 15.679000000000002 - type: precision_at_5 value: 12.117 - type: recall_at_1 value: 10.338 - type: recall_at_10 value: 31.154 - type: recall_at_100 value: 54.161 - type: recall_at_1000 value: 75.21900000000001 - type: recall_at_3 value: 19.427 - type: recall_at_5 value: 24.214 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.498 - type: map_at_10 value: 19.103 - type: map_at_100 value: 27.375 - type: map_at_1000 value: 28.981 - type: map_at_3 value: 13.764999999999999 - type: map_at_5 value: 15.950000000000001 - type: mrr_at_1 value: 65.5 - type: mrr_at_10 value: 74.53800000000001 - type: mrr_at_100 value: 74.71799999999999 - type: mrr_at_1000 value: 74.725 - type: mrr_at_3 value: 72.792 - type: mrr_at_5 value: 73.554 - type: ndcg_at_1 value: 53.37499999999999 - type: ndcg_at_10 value: 41.286 - type: ndcg_at_100 value: 45.972 - type: ndcg_at_1000 value: 53.123 - type: ndcg_at_3 value: 46.172999999999995 - type: ndcg_at_5 value: 43.033 - type: precision_at_1 value: 65.5 - type: precision_at_10 value: 32.725 - type: precision_at_100 value: 10.683 - type: precision_at_1000 value: 1.978 - type: precision_at_3 value: 50 - type: precision_at_5 value: 41.349999999999994 - type: recall_at_1 value: 8.498 - type: recall_at_10 value: 25.070999999999998 - type: recall_at_100 value: 52.383 - type: recall_at_1000 value: 74.91499999999999 - type: recall_at_3 value: 15.207999999999998 - type: recall_at_5 value: 18.563 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 46.5 - type: f1 value: 41.93833713984145 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 67.914 - type: map_at_10 value: 78.10000000000001 - type: map_at_100 value: 78.333 - type: map_at_1000 value: 78.346 - type: map_at_3 value: 76.626 - type: map_at_5 value: 77.627 - type: mrr_at_1 value: 72.74199999999999 - type: mrr_at_10 value: 82.414 - type: mrr_at_100 value: 82.511 - type: mrr_at_1000 value: 82.513 - type: mrr_at_3 value: 81.231 - type: mrr_at_5 value: 82.065 - type: ndcg_at_1 value: 72.74199999999999 - type: ndcg_at_10 value: 82.806 - type: ndcg_at_100 value: 83.677 - type: ndcg_at_1000 value: 83.917 - type: ndcg_at_3 value: 80.305 - type: ndcg_at_5 value: 81.843 - type: precision_at_1 value: 72.74199999999999 - type: precision_at_10 value: 10.24 - type: precision_at_100 value: 1.089 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 31.268 - type: precision_at_5 value: 19.706000000000003 - type: recall_at_1 value: 67.914 - type: recall_at_10 value: 92.889 - type: recall_at_100 value: 96.42699999999999 - type: recall_at_1000 value: 97.92 - type: recall_at_3 value: 86.21 - type: recall_at_5 value: 90.036 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.166 - type: map_at_10 value: 35.57 - type: map_at_100 value: 37.405 - type: map_at_1000 value: 37.564 - type: map_at_3 value: 30.379 - type: map_at_5 value: 33.324 - type: mrr_at_1 value: 43.519000000000005 - type: mrr_at_10 value: 51.556000000000004 - type: mrr_at_100 value: 52.344 - type: mrr_at_1000 value: 52.373999999999995 - type: mrr_at_3 value: 48.868 - type: mrr_at_5 value: 50.319 - type: ndcg_at_1 value: 43.519000000000005 - type: ndcg_at_10 value: 43.803 - type: ndcg_at_100 value: 50.468999999999994 - type: ndcg_at_1000 value: 53.111 - type: ndcg_at_3 value: 38.893 - type: ndcg_at_5 value: 40.653 - type: precision_at_1 value: 43.519000000000005 - type: precision_at_10 value: 12.253 - type: precision_at_100 value: 1.931 - type: precision_at_1000 value: 0.242 - type: precision_at_3 value: 25.617 - type: precision_at_5 value: 19.383 - type: recall_at_1 value: 22.166 - type: recall_at_10 value: 51.6 - type: recall_at_100 value: 76.574 - type: recall_at_1000 value: 92.192 - type: recall_at_3 value: 34.477999999999994 - type: recall_at_5 value: 41.835 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.041 - type: map_at_10 value: 62.961999999999996 - type: map_at_100 value: 63.79899999999999 - type: map_at_1000 value: 63.854 - type: map_at_3 value: 59.399 - type: map_at_5 value: 61.669 - type: mrr_at_1 value: 78.082 - type: mrr_at_10 value: 84.321 - type: mrr_at_100 value: 84.49600000000001 - type: mrr_at_1000 value: 84.502 - type: mrr_at_3 value: 83.421 - type: mrr_at_5 value: 83.977 - type: ndcg_at_1 value: 78.082 - type: ndcg_at_10 value: 71.229 - type: ndcg_at_100 value: 74.10900000000001 - type: ndcg_at_1000 value: 75.169 - type: ndcg_at_3 value: 66.28699999999999 - type: ndcg_at_5 value: 69.084 - type: precision_at_1 value: 78.082 - type: precision_at_10 value: 14.993 - type: precision_at_100 value: 1.7239999999999998 - type: precision_at_1000 value: 0.186 - type: precision_at_3 value: 42.737 - type: precision_at_5 value: 27.843 - type: recall_at_1 value: 39.041 - type: recall_at_10 value: 74.96300000000001 - type: recall_at_100 value: 86.199 - type: recall_at_1000 value: 93.228 - type: recall_at_3 value: 64.105 - type: recall_at_5 value: 69.608 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 90.23160000000001 - type: ap value: 85.5674856808308 - type: f1 value: 90.18033354786317 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 24.091 - type: map_at_10 value: 36.753 - type: map_at_100 value: 37.913000000000004 - type: map_at_1000 value: 37.958999999999996 - type: map_at_3 value: 32.818999999999996 - type: map_at_5 value: 35.171 - type: mrr_at_1 value: 24.742 - type: mrr_at_10 value: 37.285000000000004 - type: mrr_at_100 value: 38.391999999999996 - type: mrr_at_1000 value: 38.431 - type: mrr_at_3 value: 33.440999999999995 - type: mrr_at_5 value: 35.75 - type: ndcg_at_1 value: 24.742 - type: ndcg_at_10 value: 43.698 - type: ndcg_at_100 value: 49.145 - type: ndcg_at_1000 value: 50.23800000000001 - type: ndcg_at_3 value: 35.769 - type: ndcg_at_5 value: 39.961999999999996 - type: precision_at_1 value: 24.742 - type: precision_at_10 value: 6.7989999999999995 - type: precision_at_100 value: 0.95 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 15.096000000000002 - type: precision_at_5 value: 11.183 - type: recall_at_1 value: 24.091 - type: recall_at_10 value: 65.068 - type: recall_at_100 value: 89.899 - type: recall_at_1000 value: 98.16 - type: recall_at_3 value: 43.68 - type: recall_at_5 value: 53.754999999999995 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.66621067031465 - type: f1 value: 93.49622853272142 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 91.94702733164272 - type: f1 value: 91.17043441745282 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.20146764509674 - type: f1 value: 91.98359080555608 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 88.99780770435328 - type: f1 value: 89.19746342724068 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.78486912871998 - type: f1 value: 89.24578823628642 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 88.74502712477394 - type: f1 value: 89.00297573881542 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.9046967624259 - type: f1 value: 59.36787125785957 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 74.5280360664976 - type: f1 value: 57.17723440888718 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 75.44029352901934 - type: f1 value: 54.052855531072964 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 70.5606013153774 - type: f1 value: 52.62215934386531 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 73.11581211903908 - type: f1 value: 52.341291845645465 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 74.28933092224233 - type: f1 value: 57.07918745504911 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.38063214525892 - type: f1 value: 59.46463723443009 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.06926698049766 - type: f1 value: 52.49084283283562 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.74983187626093 - type: f1 value: 56.960640620165904 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.86550100874243 - type: f1 value: 62.47370548140688 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.971082716879636 - type: f1 value: 61.03812421957381 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.98318762609282 - type: f1 value: 51.51207916008392 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.45527908540686 - type: f1 value: 66.16631905400318 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.32750504371216 - type: f1 value: 66.16755288646591 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.09213180901143 - type: f1 value: 66.95654394661507 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.75588433086752 - type: f1 value: 71.79973779656923 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.49428379287154 - type: f1 value: 68.37494379215734 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.90921318090115 - type: f1 value: 66.79517376481645 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.12104909213181 - type: f1 value: 67.29448842879584 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.34095494283793 - type: f1 value: 67.01134288992947 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.61264290517822 - type: f1 value: 64.68730512660757 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.79757901815738 - type: f1 value: 65.24938539425598 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.68728984532616 - type: f1 value: 67.0487169762553 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.07464694014795 - type: f1 value: 59.183532276789286 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.04707464694015 - type: f1 value: 67.66829629003848 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.42434431741762 - type: f1 value: 59.01617226544757 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.53127101546738 - type: f1 value: 68.10033760906255 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.50504371217215 - type: f1 value: 69.74931103158923 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.91190316072628 - type: f1 value: 54.05551136648796 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.78211163416275 - type: f1 value: 49.874888544058535 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.017484868863484 - type: f1 value: 44.53364263352014 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.16207128446537 - type: f1 value: 59.01185692320829 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.42501681237391 - type: f1 value: 67.13169450166086 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.0780094149294 - type: f1 value: 64.41720167850707 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.57162071284466 - type: f1 value: 62.414138683804424 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.71149966375252 - type: f1 value: 58.594805125087234 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.03900470746471 - type: f1 value: 63.87937257883887 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.8776059179556 - type: f1 value: 57.48587618059131 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.87895090786819 - type: f1 value: 66.8141299430347 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.45057162071285 - type: f1 value: 67.46444039673516 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.546738399462 - type: f1 value: 68.63640876702655 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.72965702757229 - type: f1 value: 68.54119560379115 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.35574983187625 - type: f1 value: 65.88844917691927 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.70477471418964 - type: f1 value: 69.19665697061978 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.0880968392737 - type: f1 value: 64.76962317666086 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.18493611297916 - type: f1 value: 62.49984559035371 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.75857431069265 - type: f1 value: 69.20053687623418 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.500336247478145 - type: f1 value: 55.2972398687929 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.68997982515132 - type: f1 value: 59.36848202755348 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.01950235373235 - type: f1 value: 60.09351954625423 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.29186281102892 - type: f1 value: 67.57860496703447 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.77471418964357 - type: f1 value: 61.913983147713836 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.87222595830532 - type: f1 value: 66.03679033708141 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.04505716207127 - type: f1 value: 61.28569169817908 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.38466711499663 - type: f1 value: 67.20532357036844 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.12306657700067 - type: f1 value: 68.91251226588182 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.20040349697378 - type: f1 value: 66.02657347714175 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.73907195696032 - type: f1 value: 66.98484521791418 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.58843308675185 - type: f1 value: 58.95591723092005 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.22730329522528 - type: f1 value: 66.0894499712115 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.48285137861465 - type: f1 value: 65.21963176785157 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.74714189643578 - type: f1 value: 66.8212192745412 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.09213180901143 - type: f1 value: 56.70735546356339 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.05716207128448 - type: f1 value: 74.8413712365364 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.69737726967047 - type: f1 value: 74.7664341963 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.90383322125084 - type: f1 value: 73.59201554448323 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.51176866173503 - type: f1 value: 77.46104434577758 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.31069266980496 - type: f1 value: 74.61048660675635 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.95225285810356 - type: f1 value: 72.33160006574627 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.12373907195696 - type: f1 value: 73.20921012557481 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.86684599865501 - type: f1 value: 73.82348774610831 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.40215198386012 - type: f1 value: 71.11945183971858 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.12844653665098 - type: f1 value: 71.34450495911766 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.52252858103566 - type: f1 value: 73.98878711342999 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.93611297915265 - type: f1 value: 63.723200467653385 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.11903160726295 - type: f1 value: 73.82138439467096 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.15198386012105 - type: f1 value: 66.02172193802167 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.32414256893072 - type: f1 value: 74.30943421170574 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.46805648957633 - type: f1 value: 77.62808409298209 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.318762609280434 - type: f1 value: 62.094284066075076 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.34902488231338 - type: f1 value: 57.12893860987984 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.88433086751849 - type: f1 value: 48.2272350802058 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.4425016812374 - type: f1 value: 64.61463095996173 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.04707464694015 - type: f1 value: 75.05099199098998 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.50437121721586 - type: f1 value: 69.83397721096314 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.94283792871553 - type: f1 value: 68.8704663703913 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.79488903833222 - type: f1 value: 63.615424063345436 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.88231338264963 - type: f1 value: 68.57892302593237 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.248150638870214 - type: f1 value: 61.06680605338809 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.84196368527236 - type: f1 value: 74.52566464968763 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.8285137861466 - type: f1 value: 74.8853197608802 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.13248150638869 - type: f1 value: 74.3982040999179 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.49024882313383 - type: f1 value: 73.82153848368573 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.72158708809684 - type: f1 value: 71.85049433180541 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.137861466039 - type: f1 value: 75.37628348188467 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.86953597848016 - type: f1 value: 71.87537624521661 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.27572293207801 - type: f1 value: 68.80017302344231 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.09952925353059 - type: f1 value: 76.07992707688408 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.140551445864155 - type: f1 value: 61.73855010331415 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.27774041694687 - type: f1 value: 64.83664868894539 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.69468728984533 - type: f1 value: 64.76239666920868 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.44653665097512 - type: f1 value: 73.14646052013873 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.71351714862139 - type: f1 value: 66.67212180163382 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.9946200403497 - type: f1 value: 73.87348793725525 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.15400134498992 - type: f1 value: 67.09433241421094 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.11365164761264 - type: f1 value: 73.59502539433753 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.82582380632145 - type: f1 value: 76.89992945316313 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.81237390719569 - type: f1 value: 72.36499770986265 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.480506569594695 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 29.71252128004552 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.421396787056548 - type: mrr value: 32.48155274872267 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.595 - type: map_at_10 value: 12.642000000000001 - type: map_at_100 value: 15.726 - type: map_at_1000 value: 17.061999999999998 - type: map_at_3 value: 9.125 - type: map_at_5 value: 10.866000000000001 - type: mrr_at_1 value: 43.344 - type: mrr_at_10 value: 52.227999999999994 - type: mrr_at_100 value: 52.898999999999994 - type: mrr_at_1000 value: 52.944 - type: mrr_at_3 value: 49.845 - type: mrr_at_5 value: 51.115 - type: ndcg_at_1 value: 41.949999999999996 - type: ndcg_at_10 value: 33.995 - type: ndcg_at_100 value: 30.869999999999997 - type: ndcg_at_1000 value: 39.487 - type: ndcg_at_3 value: 38.903999999999996 - type: ndcg_at_5 value: 37.236999999999995 - type: precision_at_1 value: 43.344 - type: precision_at_10 value: 25.480000000000004 - type: precision_at_100 value: 7.672 - type: precision_at_1000 value: 2.028 - type: precision_at_3 value: 36.636 - type: precision_at_5 value: 32.632 - type: recall_at_1 value: 5.595 - type: recall_at_10 value: 16.466 - type: recall_at_100 value: 31.226 - type: recall_at_1000 value: 62.778999999999996 - type: recall_at_3 value: 9.931 - type: recall_at_5 value: 12.884 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 40.414 - type: map_at_10 value: 56.754000000000005 - type: map_at_100 value: 57.457 - type: map_at_1000 value: 57.477999999999994 - type: map_at_3 value: 52.873999999999995 - type: map_at_5 value: 55.175 - type: mrr_at_1 value: 45.278 - type: mrr_at_10 value: 59.192 - type: mrr_at_100 value: 59.650000000000006 - type: mrr_at_1000 value: 59.665 - type: mrr_at_3 value: 56.141 - type: mrr_at_5 value: 57.998000000000005 - type: ndcg_at_1 value: 45.278 - type: ndcg_at_10 value: 64.056 - type: ndcg_at_100 value: 66.89 - type: ndcg_at_1000 value: 67.364 - type: ndcg_at_3 value: 56.97 - type: ndcg_at_5 value: 60.719 - type: precision_at_1 value: 45.278 - type: precision_at_10 value: 9.994 - type: precision_at_100 value: 1.165 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 25.512 - type: precision_at_5 value: 17.509 - type: recall_at_1 value: 40.414 - type: recall_at_10 value: 83.596 - type: recall_at_100 value: 95.72 - type: recall_at_1000 value: 99.24 - type: recall_at_3 value: 65.472 - type: recall_at_5 value: 74.039 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.352 - type: map_at_10 value: 84.369 - type: map_at_100 value: 85.02499999999999 - type: map_at_1000 value: 85.04 - type: map_at_3 value: 81.42399999999999 - type: map_at_5 value: 83.279 - type: mrr_at_1 value: 81.05 - type: mrr_at_10 value: 87.401 - type: mrr_at_100 value: 87.504 - type: mrr_at_1000 value: 87.505 - type: mrr_at_3 value: 86.443 - type: mrr_at_5 value: 87.10799999999999 - type: ndcg_at_1 value: 81.04 - type: ndcg_at_10 value: 88.181 - type: ndcg_at_100 value: 89.411 - type: ndcg_at_1000 value: 89.507 - type: ndcg_at_3 value: 85.28099999999999 - type: ndcg_at_5 value: 86.888 - type: precision_at_1 value: 81.04 - type: precision_at_10 value: 13.406 - type: precision_at_100 value: 1.5350000000000001 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.31 - type: precision_at_5 value: 24.54 - type: recall_at_1 value: 70.352 - type: recall_at_10 value: 95.358 - type: recall_at_100 value: 99.541 - type: recall_at_1000 value: 99.984 - type: recall_at_3 value: 87.111 - type: recall_at_5 value: 91.643 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 46.54068723291946 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 63.216287629895994 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.023000000000001 - type: map_at_10 value: 10.071 - type: map_at_100 value: 11.892 - type: map_at_1000 value: 12.196 - type: map_at_3 value: 7.234 - type: map_at_5 value: 8.613999999999999 - type: mrr_at_1 value: 19.900000000000002 - type: mrr_at_10 value: 30.516 - type: mrr_at_100 value: 31.656000000000002 - type: mrr_at_1000 value: 31.723000000000003 - type: mrr_at_3 value: 27.400000000000002 - type: mrr_at_5 value: 29.270000000000003 - type: ndcg_at_1 value: 19.900000000000002 - type: ndcg_at_10 value: 17.474 - type: ndcg_at_100 value: 25.020999999999997 - type: ndcg_at_1000 value: 30.728 - type: ndcg_at_3 value: 16.588 - type: ndcg_at_5 value: 14.498 - type: precision_at_1 value: 19.900000000000002 - type: precision_at_10 value: 9.139999999999999 - type: precision_at_100 value: 2.011 - type: precision_at_1000 value: 0.33899999999999997 - type: precision_at_3 value: 15.667 - type: precision_at_5 value: 12.839999999999998 - type: recall_at_1 value: 4.023000000000001 - type: recall_at_10 value: 18.497 - type: recall_at_100 value: 40.8 - type: recall_at_1000 value: 68.812 - type: recall_at_3 value: 9.508 - type: recall_at_5 value: 12.983 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.967008785134 - type: cos_sim_spearman value: 80.23142141101837 - type: euclidean_pearson value: 81.20166064704539 - type: euclidean_spearman value: 80.18961335654585 - type: manhattan_pearson value: 81.13925443187625 - type: manhattan_spearman value: 80.07948723044424 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.94262461316023 - type: cos_sim_spearman value: 80.01596278563865 - type: euclidean_pearson value: 83.80799622922581 - type: euclidean_spearman value: 79.94984954947103 - type: manhattan_pearson value: 83.68473841756281 - type: manhattan_spearman value: 79.84990707951822 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 80.57346443146068 - type: cos_sim_spearman value: 81.54689837570866 - type: euclidean_pearson value: 81.10909881516007 - type: euclidean_spearman value: 81.56746243261762 - type: manhattan_pearson value: 80.87076036186582 - type: manhattan_spearman value: 81.33074987964402 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 79.54733787179849 - type: cos_sim_spearman value: 77.72202105610411 - type: euclidean_pearson value: 78.9043595478849 - type: euclidean_spearman value: 77.93422804309435 - type: manhattan_pearson value: 78.58115121621368 - type: manhattan_spearman value: 77.62508135122033 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.59880017237558 - type: cos_sim_spearman value: 89.31088630824758 - type: euclidean_pearson value: 88.47069261564656 - type: euclidean_spearman value: 89.33581971465233 - type: manhattan_pearson value: 88.40774264100956 - type: manhattan_spearman value: 89.28657485627835 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.08055117917084 - type: cos_sim_spearman value: 85.78491813080304 - type: euclidean_pearson value: 84.99329155500392 - type: euclidean_spearman value: 85.76728064677287 - type: manhattan_pearson value: 84.87947428989587 - type: manhattan_spearman value: 85.62429454917464 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 82.14190939287384 - type: cos_sim_spearman value: 82.27331573306041 - type: euclidean_pearson value: 81.891896953716 - type: euclidean_spearman value: 82.37695542955998 - type: manhattan_pearson value: 81.73123869460504 - type: manhattan_spearman value: 82.19989168441421 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 76.84695301843362 - type: cos_sim_spearman value: 77.87790986014461 - type: euclidean_pearson value: 76.91981583106315 - type: euclidean_spearman value: 77.88154772749589 - type: manhattan_pearson value: 76.94953277451093 - type: manhattan_spearman value: 77.80499230728604 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 75.44657840482016 - type: cos_sim_spearman value: 75.05531095119674 - type: euclidean_pearson value: 75.88161755829299 - type: euclidean_spearman value: 74.73176238219332 - type: manhattan_pearson value: 75.63984765635362 - type: manhattan_spearman value: 74.86476440770737 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.64700140524133 - type: cos_sim_spearman value: 86.16014210425672 - type: euclidean_pearson value: 86.49086860843221 - type: euclidean_spearman value: 86.09729326815614 - type: manhattan_pearson value: 86.43406265125513 - type: manhattan_spearman value: 86.17740150939994 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.91170098764921 - type: cos_sim_spearman value: 88.12437004058931 - type: euclidean_pearson value: 88.81828254494437 - type: euclidean_spearman value: 88.14831794572122 - type: manhattan_pearson value: 88.93442183448961 - type: manhattan_spearman value: 88.15254630778304 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 72.91390577997292 - type: cos_sim_spearman value: 71.22979457536074 - type: euclidean_pearson value: 74.40314008106749 - type: euclidean_spearman value: 72.54972136083246 - type: manhattan_pearson value: 73.85687539530218 - type: manhattan_spearman value: 72.09500771742637 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 80.9301067983089 - type: cos_sim_spearman value: 80.74989828346473 - type: euclidean_pearson value: 81.36781301814257 - type: euclidean_spearman value: 80.9448819964426 - type: manhattan_pearson value: 81.0351322685609 - type: manhattan_spearman value: 80.70192121844177 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.13820465980005 - type: cos_sim_spearman value: 86.73532498758757 - type: euclidean_pearson value: 87.21329451846637 - type: euclidean_spearman value: 86.57863198601002 - type: manhattan_pearson value: 87.06973713818554 - type: manhattan_spearman value: 86.47534918791499 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.48720108904415 - type: cos_sim_spearman value: 85.62221757068387 - type: euclidean_pearson value: 86.1010129512749 - type: euclidean_spearman value: 85.86580966509942 - type: manhattan_pearson value: 86.26800938808971 - type: manhattan_spearman value: 85.88902721678429 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 83.98021347333516 - type: cos_sim_spearman value: 84.53806553803501 - type: euclidean_pearson value: 84.61483347248364 - type: euclidean_spearman value: 85.14191408011702 - type: manhattan_pearson value: 84.75297588825967 - type: manhattan_spearman value: 85.33176753669242 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 84.51856644893233 - type: cos_sim_spearman value: 85.27510748506413 - type: euclidean_pearson value: 85.09886861540977 - type: euclidean_spearman value: 85.62579245860887 - type: manhattan_pearson value: 84.93017860464607 - type: manhattan_spearman value: 85.5063988898453 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 62.581573200584195 - type: cos_sim_spearman value: 63.05503590247928 - type: euclidean_pearson value: 63.652564812602094 - type: euclidean_spearman value: 62.64811520876156 - type: manhattan_pearson value: 63.506842893061076 - type: manhattan_spearman value: 62.51289573046917 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 48.2248801729127 - type: cos_sim_spearman value: 56.5936604678561 - type: euclidean_pearson value: 43.98149464089 - type: euclidean_spearman value: 56.108561882423615 - type: manhattan_pearson value: 43.86880305903564 - type: manhattan_spearman value: 56.04671150510166 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 55.17564527009831 - type: cos_sim_spearman value: 64.57978560979488 - type: euclidean_pearson value: 58.8818330154583 - type: euclidean_spearman value: 64.99214839071281 - type: manhattan_pearson value: 58.72671436121381 - type: manhattan_spearman value: 65.10713416616109 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 26.772131864023297 - type: cos_sim_spearman value: 34.68200792408681 - type: euclidean_pearson value: 16.68082419005441 - type: euclidean_spearman value: 34.83099932652166 - type: manhattan_pearson value: 16.52605949659529 - type: manhattan_spearman value: 34.82075801399475 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.42415189043831 - type: cos_sim_spearman value: 63.54594264576758 - type: euclidean_pearson value: 57.36577498297745 - type: euclidean_spearman value: 63.111466379158074 - type: manhattan_pearson value: 57.584543715873885 - type: manhattan_spearman value: 63.22361054139183 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 47.55216762405518 - type: cos_sim_spearman value: 56.98670142896412 - type: euclidean_pearson value: 50.15318757562699 - type: euclidean_spearman value: 56.524941926541906 - type: manhattan_pearson value: 49.955618528674904 - type: manhattan_spearman value: 56.37102209240117 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 49.20540980338571 - type: cos_sim_spearman value: 59.9009453504406 - type: euclidean_pearson value: 49.557749853620535 - type: euclidean_spearman value: 59.76631621172456 - type: manhattan_pearson value: 49.62340591181147 - type: manhattan_spearman value: 59.94224880322436 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 51.508169956576985 - type: cos_sim_spearman value: 66.82461565306046 - type: euclidean_pearson value: 56.2274426480083 - type: euclidean_spearman value: 66.6775323848333 - type: manhattan_pearson value: 55.98277796300661 - type: manhattan_spearman value: 66.63669848497175 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 72.86478788045507 - type: cos_sim_spearman value: 76.7946552053193 - type: euclidean_pearson value: 75.01598530490269 - type: euclidean_spearman value: 76.83618917858281 - type: manhattan_pearson value: 74.68337628304332 - type: manhattan_spearman value: 76.57480204017773 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 55.922619099401984 - type: cos_sim_spearman value: 56.599362477240774 - type: euclidean_pearson value: 56.68307052369783 - type: euclidean_spearman value: 54.28760436777401 - type: manhattan_pearson value: 56.67763566500681 - type: manhattan_spearman value: 53.94619541711359 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 66.74357206710913 - type: cos_sim_spearman value: 72.5208244925311 - type: euclidean_pearson value: 67.49254562186032 - type: euclidean_spearman value: 72.02469076238683 - type: manhattan_pearson value: 67.45251772238085 - type: manhattan_spearman value: 72.05538819984538 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 71.25734330033191 - type: cos_sim_spearman value: 76.98349083946823 - type: euclidean_pearson value: 73.71642838667736 - type: euclidean_spearman value: 77.01715504651384 - type: manhattan_pearson value: 73.61712711868105 - type: manhattan_spearman value: 77.01392571153896 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.18215462781212 - type: cos_sim_spearman value: 65.54373266117607 - type: euclidean_pearson value: 64.54126095439005 - type: euclidean_spearman value: 65.30410369102711 - type: manhattan_pearson value: 63.50332221148234 - type: manhattan_spearman value: 64.3455878104313 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 62.30509221440029 - type: cos_sim_spearman value: 65.99582704642478 - type: euclidean_pearson value: 63.43818859884195 - type: euclidean_spearman value: 66.83172582815764 - type: manhattan_pearson value: 63.055779168508764 - type: manhattan_spearman value: 65.49585020501449 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.587830825340404 - type: cos_sim_spearman value: 68.93467614588089 - type: euclidean_pearson value: 62.3073527367404 - type: euclidean_spearman value: 69.69758171553175 - type: manhattan_pearson value: 61.9074580815789 - type: manhattan_spearman value: 69.57696375597865 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 57.143220125577066 - type: cos_sim_spearman value: 67.78857859159226 - type: euclidean_pearson value: 55.58225107923733 - type: euclidean_spearman value: 67.80662907184563 - type: manhattan_pearson value: 56.24953502726514 - type: manhattan_spearman value: 67.98262125431616 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 21.826928900322066 - type: cos_sim_spearman value: 49.578506634400405 - type: euclidean_pearson value: 27.939890138843214 - type: euclidean_spearman value: 52.71950519136242 - type: manhattan_pearson value: 26.39878683847546 - type: manhattan_spearman value: 47.54609580342499 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 57.27603854632001 - type: cos_sim_spearman value: 50.709255283710995 - type: euclidean_pearson value: 59.5419024445929 - type: euclidean_spearman value: 50.709255283710995 - type: manhattan_pearson value: 59.03256832438492 - type: manhattan_spearman value: 61.97797868009122 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.00757054859712 - type: cos_sim_spearman value: 87.29283629622222 - type: euclidean_pearson value: 86.54824171775536 - type: euclidean_spearman value: 87.24364730491402 - type: manhattan_pearson value: 86.5062156915074 - type: manhattan_spearman value: 87.15052170378574 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 82.03549357197389 - type: mrr value: 95.05437645143527 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.260999999999996 - type: map_at_10 value: 66.259 - type: map_at_100 value: 66.884 - type: map_at_1000 value: 66.912 - type: map_at_3 value: 63.685 - type: map_at_5 value: 65.35499999999999 - type: mrr_at_1 value: 60.333000000000006 - type: mrr_at_10 value: 67.5 - type: mrr_at_100 value: 68.013 - type: mrr_at_1000 value: 68.038 - type: mrr_at_3 value: 65.61099999999999 - type: mrr_at_5 value: 66.861 - type: ndcg_at_1 value: 60.333000000000006 - type: ndcg_at_10 value: 70.41 - type: ndcg_at_100 value: 73.10600000000001 - type: ndcg_at_1000 value: 73.846 - type: ndcg_at_3 value: 66.133 - type: ndcg_at_5 value: 68.499 - type: precision_at_1 value: 60.333000000000006 - type: precision_at_10 value: 9.232999999999999 - type: precision_at_100 value: 1.0630000000000002 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 25.667 - type: precision_at_5 value: 17.067 - type: recall_at_1 value: 57.260999999999996 - type: recall_at_10 value: 81.94399999999999 - type: recall_at_100 value: 93.867 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 70.339 - type: recall_at_5 value: 76.25 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.74356435643564 - type: cos_sim_ap value: 93.13411948212683 - type: cos_sim_f1 value: 86.80521991300147 - type: cos_sim_precision value: 84.00374181478017 - type: cos_sim_recall value: 89.8 - type: dot_accuracy value: 99.67920792079208 - type: dot_ap value: 89.27277565444479 - type: dot_f1 value: 83.9276990718124 - type: dot_precision value: 82.04393505253104 - type: dot_recall value: 85.9 - type: euclidean_accuracy value: 99.74257425742574 - type: euclidean_ap value: 93.17993008259062 - type: euclidean_f1 value: 86.69396110542476 - type: euclidean_precision value: 88.78406708595388 - type: euclidean_recall value: 84.7 - type: manhattan_accuracy value: 99.74257425742574 - type: manhattan_ap value: 93.14413755550099 - type: manhattan_f1 value: 86.82483594144371 - type: manhattan_precision value: 87.66564729867483 - type: manhattan_recall value: 86 - type: max_accuracy value: 99.74356435643564 - type: max_ap value: 93.17993008259062 - type: max_f1 value: 86.82483594144371 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 57.525863806168566 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.68850574423839 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.71580650644033 - type: mrr value: 50.50971903913081 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 29.152190498799484 - type: cos_sim_spearman value: 29.686180371952727 - type: dot_pearson value: 27.248664793816342 - type: dot_spearman value: 28.37748983721745 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.20400000000000001 - type: map_at_10 value: 1.6209999999999998 - type: map_at_100 value: 9.690999999999999 - type: map_at_1000 value: 23.733 - type: map_at_3 value: 0.575 - type: map_at_5 value: 0.885 - type: mrr_at_1 value: 78 - type: mrr_at_10 value: 86.56700000000001 - type: mrr_at_100 value: 86.56700000000001 - type: mrr_at_1000 value: 86.56700000000001 - type: mrr_at_3 value: 85.667 - type: mrr_at_5 value: 86.56700000000001 - type: ndcg_at_1 value: 76 - type: ndcg_at_10 value: 71.326 - type: ndcg_at_100 value: 54.208999999999996 - type: ndcg_at_1000 value: 49.252 - type: ndcg_at_3 value: 74.235 - type: ndcg_at_5 value: 73.833 - type: precision_at_1 value: 78 - type: precision_at_10 value: 74.8 - type: precision_at_100 value: 55.50000000000001 - type: precision_at_1000 value: 21.836 - type: precision_at_3 value: 78 - type: precision_at_5 value: 78 - type: recall_at_1 value: 0.20400000000000001 - type: recall_at_10 value: 1.894 - type: recall_at_100 value: 13.245999999999999 - type: recall_at_1000 value: 46.373 - type: recall_at_3 value: 0.613 - type: recall_at_5 value: 0.991 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.89999999999999 - type: f1 value: 94.69999999999999 - type: precision value: 94.11666666666667 - type: recall value: 95.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.20809248554913 - type: f1 value: 63.431048720066066 - type: precision value: 61.69143958161298 - type: recall value: 68.20809248554913 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.21951219512195 - type: f1 value: 66.82926829268293 - type: precision value: 65.1260162601626 - type: recall value: 71.21951219512195 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.2 - type: f1 value: 96.26666666666667 - type: precision value: 95.8 - type: recall value: 97.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.3 - type: f1 value: 99.06666666666666 - type: precision value: 98.95 - type: recall value: 99.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.63333333333333 - type: precision value: 96.26666666666668 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96 - type: f1 value: 94.86666666666666 - type: precision value: 94.31666666666668 - type: recall value: 96 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.01492537313433 - type: f1 value: 40.178867566927266 - type: precision value: 38.179295828549556 - type: recall value: 47.01492537313433 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.5 - type: f1 value: 83.62537480063796 - type: precision value: 82.44555555555554 - type: recall value: 86.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.48780487804879 - type: f1 value: 75.45644599303138 - type: precision value: 73.37398373983739 - type: recall value: 80.48780487804879 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.95666666666666 - type: precision value: 91.125 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.73754556500607 - type: f1 value: 89.65168084244632 - type: precision value: 88.73025516403402 - type: recall value: 91.73754556500607 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.04347826086956 - type: f1 value: 76.2128364389234 - type: precision value: 74.2 - type: recall value: 81.04347826086956 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.65217391304348 - type: f1 value: 79.4376811594203 - type: precision value: 77.65797101449274 - type: recall value: 83.65217391304348 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.5 - type: f1 value: 85.02690476190476 - type: precision value: 83.96261904761904 - type: recall value: 87.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.3 - type: f1 value: 86.52333333333333 - type: precision value: 85.22833333333332 - type: recall value: 89.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.01809408926418 - type: f1 value: 59.00594446432805 - type: precision value: 56.827215807915444 - type: recall value: 65.01809408926418 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.2 - type: f1 value: 88.58 - type: precision value: 87.33333333333334 - type: recall value: 91.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 59.199999999999996 - type: f1 value: 53.299166276284915 - type: precision value: 51.3383908045977 - type: recall value: 59.199999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.2 - type: f1 value: 91.2 - type: precision value: 90.25 - type: recall value: 93.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 64.76190476190476 - type: f1 value: 59.867110667110666 - type: precision value: 58.07390192653351 - type: recall value: 64.76190476190476 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.2 - type: f1 value: 71.48147546897547 - type: precision value: 69.65409090909091 - type: recall value: 76.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.8 - type: f1 value: 92.14 - type: precision value: 91.35833333333333 - type: recall value: 93.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.89999999999999 - type: f1 value: 97.2 - type: precision value: 96.85000000000001 - type: recall value: 97.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.6 - type: f1 value: 92.93333333333334 - type: precision value: 92.13333333333333 - type: recall value: 94.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.1 - type: f1 value: 69.14817460317461 - type: precision value: 67.2515873015873 - type: recall value: 74.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.19999999999999 - type: f1 value: 94.01333333333335 - type: precision value: 93.46666666666667 - type: recall value: 95.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.9 - type: f1 value: 72.07523809523809 - type: precision value: 70.19777777777779 - type: recall value: 76.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.31666666666666 - type: precision value: 91.43333333333332 - type: recall value: 94.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.1 - type: precision value: 96.76666666666668 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.85714285714286 - type: f1 value: 90.92093441150045 - type: precision value: 90.00449236298293 - type: recall value: 92.85714285714286 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.16239316239316 - type: f1 value: 91.33903133903132 - type: precision value: 90.56267806267806 - type: recall value: 93.16239316239316 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.4 - type: f1 value: 90.25666666666666 - type: precision value: 89.25833333333334 - type: recall value: 92.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.22727272727272 - type: f1 value: 87.53030303030303 - type: precision value: 86.37121212121211 - type: recall value: 90.22727272727272 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.03563941299791 - type: f1 value: 74.7349505840072 - type: precision value: 72.9035639412998 - type: recall value: 79.03563941299791 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97 - type: f1 value: 96.15 - type: precision value: 95.76666666666668 - type: recall value: 97 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.26459143968872 - type: f1 value: 71.55642023346303 - type: precision value: 69.7544932369835 - type: recall value: 76.26459143968872 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 58.119658119658126 - type: f1 value: 51.65242165242165 - type: precision value: 49.41768108434775 - type: recall value: 58.119658119658126 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.3 - type: f1 value: 69.52055555555555 - type: precision value: 67.7574938949939 - type: recall value: 74.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.8 - type: f1 value: 93.31666666666666 - type: precision value: 92.60000000000001 - type: recall value: 94.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.63551401869158 - type: f1 value: 72.35202492211837 - type: precision value: 70.60358255451713 - type: recall value: 76.63551401869158 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.4 - type: f1 value: 88.4811111111111 - type: precision value: 87.7452380952381 - type: recall value: 90.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95 - type: f1 value: 93.60666666666667 - type: precision value: 92.975 - type: recall value: 95 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.2 - type: f1 value: 63.01595782872099 - type: precision value: 61.596587301587306 - type: recall value: 67.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.7 - type: f1 value: 94.52999999999999 - type: precision value: 94 - type: recall value: 95.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.6 - type: f1 value: 93.28999999999999 - type: precision value: 92.675 - type: recall value: 94.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.39999999999999 - type: f1 value: 95.28333333333333 - type: precision value: 94.75 - type: recall value: 96.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.9 - type: f1 value: 89.83 - type: precision value: 88.92 - type: recall value: 91.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.69999999999999 - type: f1 value: 93.34222222222223 - type: precision value: 92.75416666666668 - type: recall value: 94.69999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 60.333333333333336 - type: f1 value: 55.31203703703703 - type: precision value: 53.39971108326371 - type: recall value: 60.333333333333336 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 12.9 - type: f1 value: 11.099861903031458 - type: precision value: 10.589187932631877 - type: recall value: 12.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.7 - type: f1 value: 83.0152380952381 - type: precision value: 81.37833333333333 - type: recall value: 86.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.39285714285714 - type: f1 value: 56.832482993197274 - type: precision value: 54.56845238095237 - type: recall value: 63.39285714285714 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.73765093304062 - type: f1 value: 41.555736920720456 - type: precision value: 39.06874531737319 - type: recall value: 48.73765093304062 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 41.099999999999994 - type: f1 value: 36.540165945165946 - type: precision value: 35.05175685425686 - type: recall value: 41.099999999999994 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.42333333333333 - type: precision value: 92.75833333333333 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.63333333333334 - type: precision value: 93.01666666666665 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.9 - type: f1 value: 73.64833333333334 - type: precision value: 71.90282106782105 - type: recall value: 77.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 59.4 - type: f1 value: 54.90521367521367 - type: precision value: 53.432840025471606 - type: recall value: 59.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.6 - type: precision value: 96.2 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.2 - type: f1 value: 62.25926129426129 - type: precision value: 60.408376623376626 - type: recall value: 67.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.2 - type: f1 value: 87.60666666666667 - type: precision value: 86.45277777777778 - type: recall value: 90.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.7 - type: f1 value: 97 - type: precision value: 96.65 - type: recall value: 97.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.2 - type: f1 value: 91.39746031746031 - type: precision value: 90.6125 - type: recall value: 93.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 32.11678832116788 - type: f1 value: 27.210415386260234 - type: precision value: 26.20408990846947 - type: recall value: 32.11678832116788 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.5 - type: f1 value: 6.787319277832475 - type: precision value: 6.3452094433344435 - type: recall value: 8.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.08 - type: precision value: 94.61666666666667 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.3 - type: f1 value: 93.88333333333333 - type: precision value: 93.18333333333332 - type: recall value: 95.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.11904761904762 - type: f1 value: 80.69444444444444 - type: precision value: 78.72023809523809 - type: recall value: 85.11904761904762 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.1 - type: f1 value: 9.276381801735853 - type: precision value: 8.798174603174601 - type: recall value: 11.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.56107660455487 - type: f1 value: 58.70433569191332 - type: precision value: 56.896926581464015 - type: recall value: 63.56107660455487 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.69999999999999 - type: f1 value: 93.10000000000001 - type: precision value: 92.35 - type: recall value: 94.69999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.8 - type: f1 value: 96.01222222222222 - type: precision value: 95.67083333333332 - type: recall value: 96.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.2 - type: f1 value: 7.911555250305249 - type: precision value: 7.631246556216846 - type: recall value: 9.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.48917748917748 - type: f1 value: 72.27375798804371 - type: precision value: 70.14430014430013 - type: recall value: 77.48917748917748 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.09923664122137 - type: f1 value: 72.61541257724463 - type: precision value: 70.8998380754106 - type: recall value: 77.09923664122137 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.2532751091703 - type: f1 value: 97.69529354682193 - type: precision value: 97.42843279961184 - type: recall value: 98.2532751091703 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.8 - type: f1 value: 79.14672619047619 - type: precision value: 77.59489247311828 - type: recall value: 82.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.35028248587571 - type: f1 value: 92.86252354048965 - type: precision value: 92.2080979284369 - type: recall value: 94.35028248587571 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.5 - type: f1 value: 6.282429263935621 - type: precision value: 5.783274240739785 - type: recall value: 8.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.7 - type: f1 value: 91.025 - type: precision value: 90.30428571428571 - type: recall value: 92.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81 - type: f1 value: 77.8232380952381 - type: precision value: 76.60194444444444 - type: recall value: 81 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91 - type: f1 value: 88.70857142857142 - type: precision value: 87.7 - type: recall value: 91 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.39999999999999 - type: f1 value: 95.3 - type: precision value: 94.76666666666667 - type: recall value: 96.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.1 - type: f1 value: 7.001008218834307 - type: precision value: 6.708329562594269 - type: recall value: 8.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.1313672922252 - type: f1 value: 84.09070598748882 - type: precision value: 82.79171454104429 - type: recall value: 87.1313672922252 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.39999999999999 - type: f1 value: 95.28333333333333 - type: precision value: 94.73333333333332 - type: recall value: 96.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 42.29249011857708 - type: f1 value: 36.981018542283365 - type: precision value: 35.415877813576024 - type: recall value: 42.29249011857708 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.80281690140845 - type: f1 value: 80.86854460093896 - type: precision value: 79.60093896713614 - type: recall value: 83.80281690140845 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 45.26946107784431 - type: f1 value: 39.80235464678088 - type: precision value: 38.14342660001342 - type: recall value: 45.26946107784431 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.9 - type: precision value: 92.26666666666668 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 37.93103448275862 - type: f1 value: 33.15192743764172 - type: precision value: 31.57456528146183 - type: recall value: 37.93103448275862 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.01408450704226 - type: f1 value: 63.41549295774648 - type: precision value: 61.342778895595806 - type: recall value: 69.01408450704226 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.66666666666667 - type: f1 value: 71.60705960705961 - type: precision value: 69.60683760683762 - type: recall value: 76.66666666666667 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.8 - type: f1 value: 94.48333333333333 - type: precision value: 93.83333333333333 - type: recall value: 95.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 52.81837160751566 - type: f1 value: 48.435977731384824 - type: precision value: 47.11291973845539 - type: recall value: 52.81837160751566 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 44.9 - type: f1 value: 38.88962621607783 - type: precision value: 36.95936507936508 - type: recall value: 44.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.55374592833876 - type: f1 value: 88.22553125484721 - type: precision value: 87.26927252985884 - type: recall value: 90.55374592833876 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.6 - type: f1 value: 93.13333333333333 - type: precision value: 92.45333333333333 - type: recall value: 94.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.99666666666667 - type: precision value: 91.26666666666668 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.03937007874016 - type: f1 value: 81.75853018372703 - type: precision value: 80.34120734908137 - type: recall value: 85.03937007874016 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.3 - type: f1 value: 85.5 - type: precision value: 84.25833333333334 - type: recall value: 88.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.51246537396122 - type: f1 value: 60.02297410192148 - type: precision value: 58.133467727289236 - type: recall value: 65.51246537396122 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96 - type: f1 value: 94.89 - type: precision value: 94.39166666666667 - type: recall value: 96 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 57.692307692307686 - type: f1 value: 53.162393162393165 - type: precision value: 51.70673076923077 - type: recall value: 57.692307692307686 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.60000000000001 - type: f1 value: 89.21190476190475 - type: precision value: 88.08666666666667 - type: recall value: 91.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88 - type: f1 value: 85.47 - type: precision value: 84.43266233766234 - type: recall value: 88 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.7 - type: f1 value: 90.64999999999999 - type: precision value: 89.68333333333332 - type: recall value: 92.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.30660377358491 - type: f1 value: 76.33044137466307 - type: precision value: 74.78970125786164 - type: recall value: 80.30660377358491 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.39999999999999 - type: f1 value: 95.44 - type: precision value: 94.99166666666666 - type: recall value: 96.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.53284671532847 - type: f1 value: 95.37712895377129 - type: precision value: 94.7992700729927 - type: recall value: 96.53284671532847 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89 - type: f1 value: 86.23190476190476 - type: precision value: 85.035 - type: recall value: 89 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.585 - type: map_at_10 value: 9.012 - type: map_at_100 value: 14.027000000000001 - type: map_at_1000 value: 15.565000000000001 - type: map_at_3 value: 5.032 - type: map_at_5 value: 6.657 - type: mrr_at_1 value: 28.571 - type: mrr_at_10 value: 45.377 - type: mrr_at_100 value: 46.119 - type: mrr_at_1000 value: 46.127 - type: mrr_at_3 value: 41.156 - type: mrr_at_5 value: 42.585 - type: ndcg_at_1 value: 27.551 - type: ndcg_at_10 value: 23.395 - type: ndcg_at_100 value: 33.342 - type: ndcg_at_1000 value: 45.523 - type: ndcg_at_3 value: 25.158 - type: ndcg_at_5 value: 23.427 - type: precision_at_1 value: 28.571 - type: precision_at_10 value: 21.429000000000002 - type: precision_at_100 value: 6.714 - type: precision_at_1000 value: 1.473 - type: precision_at_3 value: 27.211000000000002 - type: precision_at_5 value: 24.490000000000002 - type: recall_at_1 value: 2.585 - type: recall_at_10 value: 15.418999999999999 - type: recall_at_100 value: 42.485 - type: recall_at_1000 value: 79.536 - type: recall_at_3 value: 6.239999999999999 - type: recall_at_5 value: 8.996 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.3234 - type: ap value: 14.361688653847423 - type: f1 value: 54.819068624319044 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.97792869269949 - type: f1 value: 62.28965628513728 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 38.90540145385218 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.53513739047506 - type: cos_sim_ap value: 75.27741586677557 - type: cos_sim_f1 value: 69.18792902473774 - type: cos_sim_precision value: 67.94708725515136 - type: cos_sim_recall value: 70.47493403693932 - type: dot_accuracy value: 84.7052512368123 - type: dot_ap value: 69.36075482849378 - type: dot_f1 value: 64.44688376631296 - type: dot_precision value: 59.92288500793831 - type: dot_recall value: 69.70976253298153 - type: euclidean_accuracy value: 86.60666388508076 - type: euclidean_ap value: 75.47512772621097 - type: euclidean_f1 value: 69.413872536473 - type: euclidean_precision value: 67.39562624254472 - type: euclidean_recall value: 71.55672823218997 - type: manhattan_accuracy value: 86.52917684925792 - type: manhattan_ap value: 75.34000110496703 - type: manhattan_f1 value: 69.28489190226429 - type: manhattan_precision value: 67.24608889992551 - type: manhattan_recall value: 71.45118733509234 - type: max_accuracy value: 86.60666388508076 - type: max_ap value: 75.47512772621097 - type: max_f1 value: 69.413872536473 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.01695967710637 - type: cos_sim_ap value: 85.8298270742901 - type: cos_sim_f1 value: 78.46988128389272 - type: cos_sim_precision value: 74.86017897091722 - type: cos_sim_recall value: 82.44533415460425 - type: dot_accuracy value: 88.19420188613343 - type: dot_ap value: 83.82679165901324 - type: dot_f1 value: 76.55833777304208 - type: dot_precision value: 75.6884875846501 - type: dot_recall value: 77.44841392054204 - type: euclidean_accuracy value: 89.03054294252338 - type: euclidean_ap value: 85.89089555185325 - type: euclidean_f1 value: 78.62997658079624 - type: euclidean_precision value: 74.92329149232914 - type: euclidean_recall value: 82.72251308900523 - type: manhattan_accuracy value: 89.0266620095471 - type: manhattan_ap value: 85.86458997929147 - type: manhattan_f1 value: 78.50685331000291 - type: manhattan_precision value: 74.5499861534201 - type: manhattan_recall value: 82.90729904527257 - type: max_accuracy value: 89.03054294252338 - type: max_ap value: 85.89089555185325 - type: max_f1 value: 78.62997658079624 --- ## Multilingual-E5-large [Multilingual E5 Text Embeddings: A Technical Report](https://arxiv.org/pdf/2402.05672). Liang Wang, Nan Yang, Xiaolong Huang, Linjun Yang, Rangan Majumder, Furu Wei, arXiv 2024 This model has 24 layers and the embedding size is 1024. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ", even for non-English texts. # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右,放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅"] tokenizer = AutoTokenizer.from_pretrained('intfloat/multilingual-e5-large') model = AutoModel.from_pretrained('intfloat/multilingual-e5-large') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Supported Languages This model is initialized from [xlm-roberta-large](https://huggingface.co/xlm-roberta-large) and continually trained on a mixture of multilingual datasets. It supports 100 languages from xlm-roberta, but low-resource languages may see performance degradation. ## Training Details **Initialization**: [xlm-roberta-large](https://huggingface.co/xlm-roberta-large) **First stage**: contrastive pre-training with weak supervision | Dataset | Weak supervision | # of text pairs | |--------------------------------------------------------------------------------------------------------|---------------------------------------|-----------------| | Filtered [mC4](https://huggingface.co/datasets/mc4) | (title, page content) | 1B | | [CC News](https://huggingface.co/datasets/intfloat/multilingual_cc_news) | (title, news content) | 400M | | [NLLB](https://huggingface.co/datasets/allenai/nllb) | translation pairs | 2.4B | | [Wikipedia](https://huggingface.co/datasets/intfloat/wikipedia) | (hierarchical section title, passage) | 150M | | Filtered [Reddit](https://www.reddit.com/) | (comment, response) | 800M | | [S2ORC](https://github.com/allenai/s2orc) | (title, abstract) and citation pairs | 100M | | [Stackexchange](https://stackexchange.com/) | (question, answer) | 50M | | [xP3](https://huggingface.co/datasets/bigscience/xP3) | (input prompt, response) | 80M | | [Miscellaneous unsupervised SBERT data](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | - | 10M | **Second stage**: supervised fine-tuning | Dataset | Language | # of text pairs | |----------------------------------------------------------------------------------------|--------------|-----------------| | [MS MARCO](https://microsoft.github.io/msmarco/) | English | 500k | | [NQ](https://github.com/facebookresearch/DPR) | English | 70k | | [Trivia QA](https://github.com/facebookresearch/DPR) | English | 60k | | [NLI from SimCSE](https://github.com/princeton-nlp/SimCSE) | English | <300k | | [ELI5](https://huggingface.co/datasets/eli5) | English | 500k | | [DuReader Retrieval](https://github.com/baidu/DuReader/tree/master/DuReader-Retrieval) | Chinese | 86k | | [KILT Fever](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [KILT HotpotQA](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [SQuAD](https://huggingface.co/datasets/squad) | English | 87k | | [Quora](https://huggingface.co/datasets/quora) | English | 150k | | [Mr. TyDi](https://huggingface.co/datasets/castorini/mr-tydi) | 11 languages | 50k | | [MIRACL](https://huggingface.co/datasets/miracl/miracl) | 16 languages | 40k | For all labeled datasets, we only use its training set for fine-tuning. For other training details, please refer to our paper at [https://arxiv.org/pdf/2402.05672](https://arxiv.org/pdf/2402.05672). ## Benchmark Results on [Mr. TyDi](https://arxiv.org/abs/2108.08787) | Model | Avg MRR@10 | | ar | bn | en | fi | id | ja | ko | ru | sw | te | th | |-----------------------|------------|-------|------| --- | --- | --- | --- | --- | --- | --- |------| --- | --- | | BM25 | 33.3 | | 36.7 | 41.3 | 15.1 | 28.8 | 38.2 | 21.7 | 28.1 | 32.9 | 39.6 | 42.4 | 41.7 | | mDPR | 16.7 | | 26.0 | 25.8 | 16.2 | 11.3 | 14.6 | 18.1 | 21.9 | 18.5 | 7.3 | 10.6 | 13.5 | | BM25 + mDPR | 41.7 | | 49.1 | 53.5 | 28.4 | 36.5 | 45.5 | 35.5 | 36.2 | 42.7 | 40.5 | 42.0 | 49.2 | | | | | multilingual-e5-small | 64.4 | | 71.5 | 66.3 | 54.5 | 57.7 | 63.2 | 55.4 | 54.3 | 60.8 | 65.4 | 89.1 | 70.1 | | multilingual-e5-base | 65.9 | | 72.3 | 65.0 | 58.5 | 60.8 | 64.9 | 56.6 | 55.8 | 62.7 | 69.0 | 86.6 | 72.7 | | multilingual-e5-large | **70.5** | | 77.5 | 73.2 | 60.8 | 66.8 | 68.5 | 62.5 | 61.6 | 65.8 | 72.7 | 90.2 | 76.2 | ## MTEB Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/multilingual-e5-large') input_texts = [ 'query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 i s 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or traini ng for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮 ,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右, 放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油 锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅" ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, bitext mining, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2024multilingual, title={Multilingual E5 Text Embeddings: A Technical Report}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Yang, Linjun and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2402.05672}, year={2024} } ``` ## Limitations Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
BAAI/bge-large-en-v1.5
BAAI
feature-extraction
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "transformers", "mteb", "en", "arxiv:2401.03462", "arxiv:2312.15503", "arxiv:2311.13534", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-09-12T05:20:08Z"
2024-02-21T02:51:44+00:00
2,014,014
495
--- language: - en license: mit tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers - mteb model-index: - name: bge-large-en-v1.5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.8507462686567 - type: ap value: 38.566457320228245 - type: f1 value: 69.69386648043475 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 92.416675 - type: ap value: 89.1928861155922 - type: f1 value: 92.39477019574215 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.175999999999995 - type: f1 value: 47.80712792870253 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 40.184999999999995 - type: map_at_10 value: 55.654 - type: map_at_100 value: 56.25 - type: map_at_1000 value: 56.255 - type: map_at_3 value: 51.742999999999995 - type: map_at_5 value: 54.129000000000005 - type: mrr_at_1 value: 40.967 - type: mrr_at_10 value: 55.96 - type: mrr_at_100 value: 56.54900000000001 - type: mrr_at_1000 value: 56.554 - type: mrr_at_3 value: 51.980000000000004 - type: mrr_at_5 value: 54.44 - type: ndcg_at_1 value: 40.184999999999995 - type: ndcg_at_10 value: 63.542 - type: ndcg_at_100 value: 65.96499999999999 - type: ndcg_at_1000 value: 66.08699999999999 - type: ndcg_at_3 value: 55.582 - type: ndcg_at_5 value: 59.855000000000004 - type: precision_at_1 value: 40.184999999999995 - type: precision_at_10 value: 8.841000000000001 - type: precision_at_100 value: 0.987 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.238 - type: precision_at_5 value: 15.405 - type: recall_at_1 value: 40.184999999999995 - type: recall_at_10 value: 88.407 - type: recall_at_100 value: 98.72 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 66.714 - type: recall_at_5 value: 77.027 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.567077926750066 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 43.19453389182364 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.46555939623092 - type: mrr value: 77.82361605768807 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 84.9554128814735 - type: cos_sim_spearman value: 84.65373612172036 - type: euclidean_pearson value: 83.2905059954138 - type: euclidean_spearman value: 84.52240782811128 - type: manhattan_pearson value: 82.99533802997436 - type: manhattan_spearman value: 84.20673798475734 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.78896103896103 - type: f1 value: 87.77189310964883 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.714538337650495 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.90108349284447 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.795 - type: map_at_10 value: 43.669000000000004 - type: map_at_100 value: 45.151 - type: map_at_1000 value: 45.278 - type: map_at_3 value: 40.006 - type: map_at_5 value: 42.059999999999995 - type: mrr_at_1 value: 39.771 - type: mrr_at_10 value: 49.826 - type: mrr_at_100 value: 50.504000000000005 - type: mrr_at_1000 value: 50.549 - type: mrr_at_3 value: 47.115 - type: mrr_at_5 value: 48.832 - type: ndcg_at_1 value: 39.771 - type: ndcg_at_10 value: 50.217999999999996 - type: ndcg_at_100 value: 55.454 - type: ndcg_at_1000 value: 57.37 - type: ndcg_at_3 value: 44.885000000000005 - type: ndcg_at_5 value: 47.419 - type: precision_at_1 value: 39.771 - type: precision_at_10 value: 9.642000000000001 - type: precision_at_100 value: 1.538 - type: precision_at_1000 value: 0.198 - type: precision_at_3 value: 21.268 - type: precision_at_5 value: 15.536 - type: recall_at_1 value: 32.795 - type: recall_at_10 value: 62.580999999999996 - type: recall_at_100 value: 84.438 - type: recall_at_1000 value: 96.492 - type: recall_at_3 value: 47.071000000000005 - type: recall_at_5 value: 54.079 - type: map_at_1 value: 32.671 - type: map_at_10 value: 43.334 - type: map_at_100 value: 44.566 - type: map_at_1000 value: 44.702999999999996 - type: map_at_3 value: 40.343 - type: map_at_5 value: 41.983 - type: mrr_at_1 value: 40.764 - type: mrr_at_10 value: 49.382 - type: mrr_at_100 value: 49.988 - type: mrr_at_1000 value: 50.03300000000001 - type: mrr_at_3 value: 47.293 - type: mrr_at_5 value: 48.51 - type: ndcg_at_1 value: 40.764 - type: ndcg_at_10 value: 49.039 - type: ndcg_at_100 value: 53.259 - type: ndcg_at_1000 value: 55.253 - type: ndcg_at_3 value: 45.091 - type: ndcg_at_5 value: 46.839999999999996 - type: precision_at_1 value: 40.764 - type: precision_at_10 value: 9.191 - type: precision_at_100 value: 1.476 - type: precision_at_1000 value: 0.19499999999999998 - type: precision_at_3 value: 21.72 - type: precision_at_5 value: 15.299 - type: recall_at_1 value: 32.671 - type: recall_at_10 value: 58.816 - type: recall_at_100 value: 76.654 - type: recall_at_1000 value: 89.05999999999999 - type: recall_at_3 value: 46.743 - type: recall_at_5 value: 51.783 - type: map_at_1 value: 40.328 - type: map_at_10 value: 53.32599999999999 - type: map_at_100 value: 54.37499999999999 - type: map_at_1000 value: 54.429 - type: map_at_3 value: 49.902 - type: map_at_5 value: 52.002 - type: mrr_at_1 value: 46.332 - type: mrr_at_10 value: 56.858 - type: mrr_at_100 value: 57.522 - type: mrr_at_1000 value: 57.54899999999999 - type: mrr_at_3 value: 54.472 - type: mrr_at_5 value: 55.996 - type: ndcg_at_1 value: 46.332 - type: ndcg_at_10 value: 59.313 - type: ndcg_at_100 value: 63.266999999999996 - type: ndcg_at_1000 value: 64.36 - type: ndcg_at_3 value: 53.815000000000005 - type: ndcg_at_5 value: 56.814 - type: precision_at_1 value: 46.332 - type: precision_at_10 value: 9.53 - type: precision_at_100 value: 1.238 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 24.054000000000002 - type: precision_at_5 value: 16.589000000000002 - type: recall_at_1 value: 40.328 - type: recall_at_10 value: 73.421 - type: recall_at_100 value: 90.059 - type: recall_at_1000 value: 97.81 - type: recall_at_3 value: 59.009 - type: recall_at_5 value: 66.352 - type: map_at_1 value: 27.424 - type: map_at_10 value: 36.332 - type: map_at_100 value: 37.347 - type: map_at_1000 value: 37.422 - type: map_at_3 value: 33.743 - type: map_at_5 value: 35.176 - type: mrr_at_1 value: 29.153000000000002 - type: mrr_at_10 value: 38.233 - type: mrr_at_100 value: 39.109 - type: mrr_at_1000 value: 39.164 - type: mrr_at_3 value: 35.876000000000005 - type: mrr_at_5 value: 37.169000000000004 - type: ndcg_at_1 value: 29.153000000000002 - type: ndcg_at_10 value: 41.439 - type: ndcg_at_100 value: 46.42 - type: ndcg_at_1000 value: 48.242000000000004 - type: ndcg_at_3 value: 36.362 - type: ndcg_at_5 value: 38.743 - type: precision_at_1 value: 29.153000000000002 - type: precision_at_10 value: 6.315999999999999 - type: precision_at_100 value: 0.927 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 15.443000000000001 - type: precision_at_5 value: 10.644 - type: recall_at_1 value: 27.424 - type: recall_at_10 value: 55.364000000000004 - type: recall_at_100 value: 78.211 - type: recall_at_1000 value: 91.74600000000001 - type: recall_at_3 value: 41.379 - type: recall_at_5 value: 47.14 - type: map_at_1 value: 19.601 - type: map_at_10 value: 27.826 - type: map_at_100 value: 29.017 - type: map_at_1000 value: 29.137 - type: map_at_3 value: 25.125999999999998 - type: map_at_5 value: 26.765 - type: mrr_at_1 value: 24.005000000000003 - type: mrr_at_10 value: 32.716 - type: mrr_at_100 value: 33.631 - type: mrr_at_1000 value: 33.694 - type: mrr_at_3 value: 29.934 - type: mrr_at_5 value: 31.630999999999997 - type: ndcg_at_1 value: 24.005000000000003 - type: ndcg_at_10 value: 33.158 - type: ndcg_at_100 value: 38.739000000000004 - type: ndcg_at_1000 value: 41.495 - type: ndcg_at_3 value: 28.185 - type: ndcg_at_5 value: 30.796 - type: precision_at_1 value: 24.005000000000003 - type: precision_at_10 value: 5.908 - type: precision_at_100 value: 1.005 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 13.391 - type: precision_at_5 value: 9.876 - type: recall_at_1 value: 19.601 - type: recall_at_10 value: 44.746 - type: recall_at_100 value: 68.82300000000001 - type: recall_at_1000 value: 88.215 - type: recall_at_3 value: 31.239 - type: recall_at_5 value: 37.695 - type: map_at_1 value: 30.130000000000003 - type: map_at_10 value: 40.96 - type: map_at_100 value: 42.282 - type: map_at_1000 value: 42.392 - type: map_at_3 value: 37.889 - type: map_at_5 value: 39.661 - type: mrr_at_1 value: 36.958999999999996 - type: mrr_at_10 value: 46.835 - type: mrr_at_100 value: 47.644 - type: mrr_at_1000 value: 47.688 - type: mrr_at_3 value: 44.562000000000005 - type: mrr_at_5 value: 45.938 - type: ndcg_at_1 value: 36.958999999999996 - type: ndcg_at_10 value: 47.06 - type: ndcg_at_100 value: 52.345 - type: ndcg_at_1000 value: 54.35 - type: ndcg_at_3 value: 42.301 - type: ndcg_at_5 value: 44.635999999999996 - type: precision_at_1 value: 36.958999999999996 - type: precision_at_10 value: 8.479000000000001 - type: precision_at_100 value: 1.284 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 20.244 - type: precision_at_5 value: 14.224999999999998 - type: recall_at_1 value: 30.130000000000003 - type: recall_at_10 value: 59.27 - type: recall_at_100 value: 81.195 - type: recall_at_1000 value: 94.21199999999999 - type: recall_at_3 value: 45.885 - type: recall_at_5 value: 52.016 - type: map_at_1 value: 26.169999999999998 - type: map_at_10 value: 36.451 - type: map_at_100 value: 37.791000000000004 - type: map_at_1000 value: 37.897 - type: map_at_3 value: 33.109 - type: map_at_5 value: 34.937000000000005 - type: mrr_at_1 value: 32.877 - type: mrr_at_10 value: 42.368 - type: mrr_at_100 value: 43.201 - type: mrr_at_1000 value: 43.259 - type: mrr_at_3 value: 39.763999999999996 - type: mrr_at_5 value: 41.260000000000005 - type: ndcg_at_1 value: 32.877 - type: ndcg_at_10 value: 42.659000000000006 - type: ndcg_at_100 value: 48.161 - type: ndcg_at_1000 value: 50.345 - type: ndcg_at_3 value: 37.302 - type: ndcg_at_5 value: 39.722 - type: precision_at_1 value: 32.877 - type: precision_at_10 value: 7.9 - type: precision_at_100 value: 1.236 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 17.846 - type: precision_at_5 value: 12.9 - type: recall_at_1 value: 26.169999999999998 - type: recall_at_10 value: 55.35 - type: recall_at_100 value: 78.755 - type: recall_at_1000 value: 93.518 - type: recall_at_3 value: 40.176 - type: recall_at_5 value: 46.589000000000006 - type: map_at_1 value: 27.15516666666667 - type: map_at_10 value: 36.65741666666667 - type: map_at_100 value: 37.84991666666666 - type: map_at_1000 value: 37.96316666666667 - type: map_at_3 value: 33.74974999999999 - type: map_at_5 value: 35.3765 - type: mrr_at_1 value: 32.08233333333334 - type: mrr_at_10 value: 41.033833333333334 - type: mrr_at_100 value: 41.84524999999999 - type: mrr_at_1000 value: 41.89983333333333 - type: mrr_at_3 value: 38.62008333333333 - type: mrr_at_5 value: 40.03441666666666 - type: ndcg_at_1 value: 32.08233333333334 - type: ndcg_at_10 value: 42.229 - type: ndcg_at_100 value: 47.26716666666667 - type: ndcg_at_1000 value: 49.43466666666667 - type: ndcg_at_3 value: 37.36408333333333 - type: ndcg_at_5 value: 39.6715 - type: precision_at_1 value: 32.08233333333334 - type: precision_at_10 value: 7.382583333333334 - type: precision_at_100 value: 1.16625 - type: precision_at_1000 value: 0.15408333333333332 - type: precision_at_3 value: 17.218 - type: precision_at_5 value: 12.21875 - type: recall_at_1 value: 27.15516666666667 - type: recall_at_10 value: 54.36683333333333 - type: recall_at_100 value: 76.37183333333333 - type: recall_at_1000 value: 91.26183333333333 - type: recall_at_3 value: 40.769916666666674 - type: recall_at_5 value: 46.702333333333335 - type: map_at_1 value: 25.749 - type: map_at_10 value: 33.001999999999995 - type: map_at_100 value: 33.891 - type: map_at_1000 value: 33.993 - type: map_at_3 value: 30.703999999999997 - type: map_at_5 value: 31.959 - type: mrr_at_1 value: 28.834 - type: mrr_at_10 value: 35.955 - type: mrr_at_100 value: 36.709 - type: mrr_at_1000 value: 36.779 - type: mrr_at_3 value: 33.947 - type: mrr_at_5 value: 35.089 - type: ndcg_at_1 value: 28.834 - type: ndcg_at_10 value: 37.329 - type: ndcg_at_100 value: 41.79 - type: ndcg_at_1000 value: 44.169000000000004 - type: ndcg_at_3 value: 33.184999999999995 - type: ndcg_at_5 value: 35.107 - type: precision_at_1 value: 28.834 - type: precision_at_10 value: 5.7669999999999995 - type: precision_at_100 value: 0.876 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 14.213000000000001 - type: precision_at_5 value: 9.754999999999999 - type: recall_at_1 value: 25.749 - type: recall_at_10 value: 47.791 - type: recall_at_100 value: 68.255 - type: recall_at_1000 value: 85.749 - type: recall_at_3 value: 36.199 - type: recall_at_5 value: 41.071999999999996 - type: map_at_1 value: 17.777 - type: map_at_10 value: 25.201 - type: map_at_100 value: 26.423999999999996 - type: map_at_1000 value: 26.544 - type: map_at_3 value: 22.869 - type: map_at_5 value: 24.023 - type: mrr_at_1 value: 21.473 - type: mrr_at_10 value: 29.12 - type: mrr_at_100 value: 30.144 - type: mrr_at_1000 value: 30.215999999999998 - type: mrr_at_3 value: 26.933 - type: mrr_at_5 value: 28.051 - type: ndcg_at_1 value: 21.473 - type: ndcg_at_10 value: 30.003 - type: ndcg_at_100 value: 35.766 - type: ndcg_at_1000 value: 38.501000000000005 - type: ndcg_at_3 value: 25.773000000000003 - type: ndcg_at_5 value: 27.462999999999997 - type: precision_at_1 value: 21.473 - type: precision_at_10 value: 5.482 - type: precision_at_100 value: 0.975 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 12.205 - type: precision_at_5 value: 8.692 - type: recall_at_1 value: 17.777 - type: recall_at_10 value: 40.582 - type: recall_at_100 value: 66.305 - type: recall_at_1000 value: 85.636 - type: recall_at_3 value: 28.687 - type: recall_at_5 value: 33.089 - type: map_at_1 value: 26.677 - type: map_at_10 value: 36.309000000000005 - type: map_at_100 value: 37.403999999999996 - type: map_at_1000 value: 37.496 - type: map_at_3 value: 33.382 - type: map_at_5 value: 34.98 - type: mrr_at_1 value: 31.343 - type: mrr_at_10 value: 40.549 - type: mrr_at_100 value: 41.342 - type: mrr_at_1000 value: 41.397 - type: mrr_at_3 value: 38.029 - type: mrr_at_5 value: 39.451 - type: ndcg_at_1 value: 31.343 - type: ndcg_at_10 value: 42.1 - type: ndcg_at_100 value: 47.089999999999996 - type: ndcg_at_1000 value: 49.222 - type: ndcg_at_3 value: 36.836999999999996 - type: ndcg_at_5 value: 39.21 - type: precision_at_1 value: 31.343 - type: precision_at_10 value: 7.164 - type: precision_at_100 value: 1.0959999999999999 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 16.915 - type: precision_at_5 value: 11.940000000000001 - type: recall_at_1 value: 26.677 - type: recall_at_10 value: 55.54599999999999 - type: recall_at_100 value: 77.094 - type: recall_at_1000 value: 92.01 - type: recall_at_3 value: 41.191 - type: recall_at_5 value: 47.006 - type: map_at_1 value: 24.501 - type: map_at_10 value: 33.102 - type: map_at_100 value: 34.676 - type: map_at_1000 value: 34.888000000000005 - type: map_at_3 value: 29.944 - type: map_at_5 value: 31.613999999999997 - type: mrr_at_1 value: 29.447000000000003 - type: mrr_at_10 value: 37.996 - type: mrr_at_100 value: 38.946 - type: mrr_at_1000 value: 38.995000000000005 - type: mrr_at_3 value: 35.079 - type: mrr_at_5 value: 36.69 - type: ndcg_at_1 value: 29.447000000000003 - type: ndcg_at_10 value: 39.232 - type: ndcg_at_100 value: 45.247 - type: ndcg_at_1000 value: 47.613 - type: ndcg_at_3 value: 33.922999999999995 - type: ndcg_at_5 value: 36.284 - type: precision_at_1 value: 29.447000000000003 - type: precision_at_10 value: 7.648000000000001 - type: precision_at_100 value: 1.516 - type: precision_at_1000 value: 0.23900000000000002 - type: precision_at_3 value: 16.008 - type: precision_at_5 value: 11.779 - type: recall_at_1 value: 24.501 - type: recall_at_10 value: 51.18899999999999 - type: recall_at_100 value: 78.437 - type: recall_at_1000 value: 92.842 - type: recall_at_3 value: 35.808 - type: recall_at_5 value: 42.197 - type: map_at_1 value: 22.039 - type: map_at_10 value: 30.377 - type: map_at_100 value: 31.275 - type: map_at_1000 value: 31.379 - type: map_at_3 value: 27.98 - type: map_at_5 value: 29.358 - type: mrr_at_1 value: 24.03 - type: mrr_at_10 value: 32.568000000000005 - type: mrr_at_100 value: 33.403 - type: mrr_at_1000 value: 33.475 - type: mrr_at_3 value: 30.436999999999998 - type: mrr_at_5 value: 31.796000000000003 - type: ndcg_at_1 value: 24.03 - type: ndcg_at_10 value: 35.198 - type: ndcg_at_100 value: 39.668 - type: ndcg_at_1000 value: 42.296 - type: ndcg_at_3 value: 30.709999999999997 - type: ndcg_at_5 value: 33.024 - type: precision_at_1 value: 24.03 - type: precision_at_10 value: 5.564 - type: precision_at_100 value: 0.828 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 13.309000000000001 - type: precision_at_5 value: 9.39 - type: recall_at_1 value: 22.039 - type: recall_at_10 value: 47.746 - type: recall_at_100 value: 68.23599999999999 - type: recall_at_1000 value: 87.852 - type: recall_at_3 value: 35.852000000000004 - type: recall_at_5 value: 41.410000000000004 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 15.692999999999998 - type: map_at_10 value: 26.903 - type: map_at_100 value: 28.987000000000002 - type: map_at_1000 value: 29.176999999999996 - type: map_at_3 value: 22.137 - type: map_at_5 value: 24.758 - type: mrr_at_1 value: 35.57 - type: mrr_at_10 value: 47.821999999999996 - type: mrr_at_100 value: 48.608000000000004 - type: mrr_at_1000 value: 48.638999999999996 - type: mrr_at_3 value: 44.452000000000005 - type: mrr_at_5 value: 46.546 - type: ndcg_at_1 value: 35.57 - type: ndcg_at_10 value: 36.567 - type: ndcg_at_100 value: 44.085 - type: ndcg_at_1000 value: 47.24 - type: ndcg_at_3 value: 29.964000000000002 - type: ndcg_at_5 value: 32.511 - type: precision_at_1 value: 35.57 - type: precision_at_10 value: 11.485 - type: precision_at_100 value: 1.9619999999999997 - type: precision_at_1000 value: 0.256 - type: precision_at_3 value: 22.237000000000002 - type: precision_at_5 value: 17.471999999999998 - type: recall_at_1 value: 15.692999999999998 - type: recall_at_10 value: 43.056 - type: recall_at_100 value: 68.628 - type: recall_at_1000 value: 86.075 - type: recall_at_3 value: 26.918999999999997 - type: recall_at_5 value: 34.14 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.53 - type: map_at_10 value: 20.951 - type: map_at_100 value: 30.136000000000003 - type: map_at_1000 value: 31.801000000000002 - type: map_at_3 value: 15.021 - type: map_at_5 value: 17.471999999999998 - type: mrr_at_1 value: 71.0 - type: mrr_at_10 value: 79.176 - type: mrr_at_100 value: 79.418 - type: mrr_at_1000 value: 79.426 - type: mrr_at_3 value: 78.125 - type: mrr_at_5 value: 78.61200000000001 - type: ndcg_at_1 value: 58.5 - type: ndcg_at_10 value: 44.106 - type: ndcg_at_100 value: 49.268 - type: ndcg_at_1000 value: 56.711999999999996 - type: ndcg_at_3 value: 48.934 - type: ndcg_at_5 value: 45.826 - type: precision_at_1 value: 71.0 - type: precision_at_10 value: 35.0 - type: precision_at_100 value: 11.360000000000001 - type: precision_at_1000 value: 2.046 - type: precision_at_3 value: 52.833 - type: precision_at_5 value: 44.15 - type: recall_at_1 value: 9.53 - type: recall_at_10 value: 26.811 - type: recall_at_100 value: 55.916999999999994 - type: recall_at_1000 value: 79.973 - type: recall_at_3 value: 16.413 - type: recall_at_5 value: 19.980999999999998 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.519999999999996 - type: f1 value: 46.36601294761231 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 74.413 - type: map_at_10 value: 83.414 - type: map_at_100 value: 83.621 - type: map_at_1000 value: 83.635 - type: map_at_3 value: 82.337 - type: map_at_5 value: 83.039 - type: mrr_at_1 value: 80.19800000000001 - type: mrr_at_10 value: 87.715 - type: mrr_at_100 value: 87.778 - type: mrr_at_1000 value: 87.779 - type: mrr_at_3 value: 87.106 - type: mrr_at_5 value: 87.555 - type: ndcg_at_1 value: 80.19800000000001 - type: ndcg_at_10 value: 87.182 - type: ndcg_at_100 value: 87.90299999999999 - type: ndcg_at_1000 value: 88.143 - type: ndcg_at_3 value: 85.60600000000001 - type: ndcg_at_5 value: 86.541 - type: precision_at_1 value: 80.19800000000001 - type: precision_at_10 value: 10.531 - type: precision_at_100 value: 1.113 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 32.933 - type: precision_at_5 value: 20.429 - type: recall_at_1 value: 74.413 - type: recall_at_10 value: 94.363 - type: recall_at_100 value: 97.165 - type: recall_at_1000 value: 98.668 - type: recall_at_3 value: 90.108 - type: recall_at_5 value: 92.52 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.701 - type: map_at_10 value: 37.122 - type: map_at_100 value: 39.178000000000004 - type: map_at_1000 value: 39.326 - type: map_at_3 value: 32.971000000000004 - type: map_at_5 value: 35.332 - type: mrr_at_1 value: 44.753 - type: mrr_at_10 value: 53.452 - type: mrr_at_100 value: 54.198 - type: mrr_at_1000 value: 54.225 - type: mrr_at_3 value: 50.952 - type: mrr_at_5 value: 52.464 - type: ndcg_at_1 value: 44.753 - type: ndcg_at_10 value: 45.021 - type: ndcg_at_100 value: 52.028 - type: ndcg_at_1000 value: 54.596000000000004 - type: ndcg_at_3 value: 41.622 - type: ndcg_at_5 value: 42.736000000000004 - type: precision_at_1 value: 44.753 - type: precision_at_10 value: 12.284 - type: precision_at_100 value: 1.955 - type: precision_at_1000 value: 0.243 - type: precision_at_3 value: 27.828999999999997 - type: precision_at_5 value: 20.061999999999998 - type: recall_at_1 value: 22.701 - type: recall_at_10 value: 51.432 - type: recall_at_100 value: 77.009 - type: recall_at_1000 value: 92.511 - type: recall_at_3 value: 37.919000000000004 - type: recall_at_5 value: 44.131 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 40.189 - type: map_at_10 value: 66.24600000000001 - type: map_at_100 value: 67.098 - type: map_at_1000 value: 67.149 - type: map_at_3 value: 62.684 - type: map_at_5 value: 64.974 - type: mrr_at_1 value: 80.378 - type: mrr_at_10 value: 86.127 - type: mrr_at_100 value: 86.29299999999999 - type: mrr_at_1000 value: 86.297 - type: mrr_at_3 value: 85.31400000000001 - type: mrr_at_5 value: 85.858 - type: ndcg_at_1 value: 80.378 - type: ndcg_at_10 value: 74.101 - type: ndcg_at_100 value: 76.993 - type: ndcg_at_1000 value: 77.948 - type: ndcg_at_3 value: 69.232 - type: ndcg_at_5 value: 72.04599999999999 - type: precision_at_1 value: 80.378 - type: precision_at_10 value: 15.595999999999998 - type: precision_at_100 value: 1.7840000000000003 - type: precision_at_1000 value: 0.191 - type: precision_at_3 value: 44.884 - type: precision_at_5 value: 29.145 - type: recall_at_1 value: 40.189 - type: recall_at_10 value: 77.981 - type: recall_at_100 value: 89.21 - type: recall_at_1000 value: 95.48299999999999 - type: recall_at_3 value: 67.326 - type: recall_at_5 value: 72.863 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 92.84599999999999 - type: ap value: 89.4710787567357 - type: f1 value: 92.83752676932258 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.132 - type: map_at_10 value: 35.543 - type: map_at_100 value: 36.702 - type: map_at_1000 value: 36.748999999999995 - type: map_at_3 value: 31.737 - type: map_at_5 value: 33.927 - type: mrr_at_1 value: 23.782 - type: mrr_at_10 value: 36.204 - type: mrr_at_100 value: 37.29 - type: mrr_at_1000 value: 37.330999999999996 - type: mrr_at_3 value: 32.458999999999996 - type: mrr_at_5 value: 34.631 - type: ndcg_at_1 value: 23.782 - type: ndcg_at_10 value: 42.492999999999995 - type: ndcg_at_100 value: 47.985 - type: ndcg_at_1000 value: 49.141 - type: ndcg_at_3 value: 34.748000000000005 - type: ndcg_at_5 value: 38.651 - type: precision_at_1 value: 23.782 - type: precision_at_10 value: 6.665 - type: precision_at_100 value: 0.941 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.776 - type: precision_at_5 value: 10.84 - type: recall_at_1 value: 23.132 - type: recall_at_10 value: 63.794 - type: recall_at_100 value: 89.027 - type: recall_at_1000 value: 97.807 - type: recall_at_3 value: 42.765 - type: recall_at_5 value: 52.11 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 94.59188326493388 - type: f1 value: 94.3842594786827 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.49384404924761 - type: f1 value: 59.7580539534629 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 77.56220578345663 - type: f1 value: 75.27228165561478 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.53463349024884 - type: f1 value: 80.4893958236536 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 32.56100273484962 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.470380028839607 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.06102792457849 - type: mrr value: 33.30709199672238 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.776999999999999 - type: map_at_10 value: 14.924000000000001 - type: map_at_100 value: 18.955 - type: map_at_1000 value: 20.538999999999998 - type: map_at_3 value: 10.982 - type: map_at_5 value: 12.679000000000002 - type: mrr_at_1 value: 47.988 - type: mrr_at_10 value: 57.232000000000006 - type: mrr_at_100 value: 57.818999999999996 - type: mrr_at_1000 value: 57.847 - type: mrr_at_3 value: 54.901999999999994 - type: mrr_at_5 value: 56.481 - type: ndcg_at_1 value: 46.594 - type: ndcg_at_10 value: 38.129000000000005 - type: ndcg_at_100 value: 35.54 - type: ndcg_at_1000 value: 44.172 - type: ndcg_at_3 value: 43.025999999999996 - type: ndcg_at_5 value: 41.052 - type: precision_at_1 value: 47.988 - type: precision_at_10 value: 28.111000000000004 - type: precision_at_100 value: 8.929 - type: precision_at_1000 value: 2.185 - type: precision_at_3 value: 40.144000000000005 - type: precision_at_5 value: 35.232 - type: recall_at_1 value: 6.776999999999999 - type: recall_at_10 value: 19.289 - type: recall_at_100 value: 36.359 - type: recall_at_1000 value: 67.54 - type: recall_at_3 value: 11.869 - type: recall_at_5 value: 14.999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 31.108000000000004 - type: map_at_10 value: 47.126000000000005 - type: map_at_100 value: 48.171 - type: map_at_1000 value: 48.199 - type: map_at_3 value: 42.734 - type: map_at_5 value: 45.362 - type: mrr_at_1 value: 34.936 - type: mrr_at_10 value: 49.571 - type: mrr_at_100 value: 50.345 - type: mrr_at_1000 value: 50.363 - type: mrr_at_3 value: 45.959 - type: mrr_at_5 value: 48.165 - type: ndcg_at_1 value: 34.936 - type: ndcg_at_10 value: 55.028999999999996 - type: ndcg_at_100 value: 59.244 - type: ndcg_at_1000 value: 59.861 - type: ndcg_at_3 value: 46.872 - type: ndcg_at_5 value: 51.217999999999996 - type: precision_at_1 value: 34.936 - type: precision_at_10 value: 9.099 - type: precision_at_100 value: 1.145 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 21.456 - type: precision_at_5 value: 15.411 - type: recall_at_1 value: 31.108000000000004 - type: recall_at_10 value: 76.53999999999999 - type: recall_at_100 value: 94.39 - type: recall_at_1000 value: 98.947 - type: recall_at_3 value: 55.572 - type: recall_at_5 value: 65.525 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.56400000000001 - type: map_at_10 value: 85.482 - type: map_at_100 value: 86.114 - type: map_at_1000 value: 86.13 - type: map_at_3 value: 82.607 - type: map_at_5 value: 84.405 - type: mrr_at_1 value: 82.42 - type: mrr_at_10 value: 88.304 - type: mrr_at_100 value: 88.399 - type: mrr_at_1000 value: 88.399 - type: mrr_at_3 value: 87.37 - type: mrr_at_5 value: 88.024 - type: ndcg_at_1 value: 82.45 - type: ndcg_at_10 value: 89.06500000000001 - type: ndcg_at_100 value: 90.232 - type: ndcg_at_1000 value: 90.305 - type: ndcg_at_3 value: 86.375 - type: ndcg_at_5 value: 87.85300000000001 - type: precision_at_1 value: 82.45 - type: precision_at_10 value: 13.486999999999998 - type: precision_at_100 value: 1.534 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.813 - type: precision_at_5 value: 24.773999999999997 - type: recall_at_1 value: 71.56400000000001 - type: recall_at_10 value: 95.812 - type: recall_at_100 value: 99.7 - type: recall_at_1000 value: 99.979 - type: recall_at_3 value: 87.966 - type: recall_at_5 value: 92.268 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 57.241876648614145 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 64.66212576446223 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.308 - type: map_at_10 value: 13.803 - type: map_at_100 value: 16.176 - type: map_at_1000 value: 16.561 - type: map_at_3 value: 9.761000000000001 - type: map_at_5 value: 11.802 - type: mrr_at_1 value: 26.200000000000003 - type: mrr_at_10 value: 37.621 - type: mrr_at_100 value: 38.767 - type: mrr_at_1000 value: 38.815 - type: mrr_at_3 value: 34.117 - type: mrr_at_5 value: 36.107 - type: ndcg_at_1 value: 26.200000000000003 - type: ndcg_at_10 value: 22.64 - type: ndcg_at_100 value: 31.567 - type: ndcg_at_1000 value: 37.623 - type: ndcg_at_3 value: 21.435000000000002 - type: ndcg_at_5 value: 18.87 - type: precision_at_1 value: 26.200000000000003 - type: precision_at_10 value: 11.74 - type: precision_at_100 value: 2.465 - type: precision_at_1000 value: 0.391 - type: precision_at_3 value: 20.033 - type: precision_at_5 value: 16.64 - type: recall_at_1 value: 5.308 - type: recall_at_10 value: 23.794999999999998 - type: recall_at_100 value: 50.015 - type: recall_at_1000 value: 79.283 - type: recall_at_3 value: 12.178 - type: recall_at_5 value: 16.882 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.93231134675553 - type: cos_sim_spearman value: 81.68319292603205 - type: euclidean_pearson value: 81.8396814380367 - type: euclidean_spearman value: 81.24641903349945 - type: manhattan_pearson value: 81.84698799204274 - type: manhattan_spearman value: 81.24269997904105 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.73241671587446 - type: cos_sim_spearman value: 79.05091082971826 - type: euclidean_pearson value: 83.91146869578044 - type: euclidean_spearman value: 79.87978465370936 - type: manhattan_pearson value: 83.90888338917678 - type: manhattan_spearman value: 79.87482848584241 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 85.14970731146177 - type: cos_sim_spearman value: 86.37363490084627 - type: euclidean_pearson value: 83.02154218530433 - type: euclidean_spearman value: 83.80258761957367 - type: manhattan_pearson value: 83.01664495119347 - type: manhattan_spearman value: 83.77567458007952 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.40474139886784 - type: cos_sim_spearman value: 82.77768789165984 - type: euclidean_pearson value: 80.7065877443695 - type: euclidean_spearman value: 81.375940662505 - type: manhattan_pearson value: 80.6507552270278 - type: manhattan_spearman value: 81.32782179098741 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.08585968722274 - type: cos_sim_spearman value: 88.03110031451399 - type: euclidean_pearson value: 85.74012019602384 - type: euclidean_spearman value: 86.13592849438209 - type: manhattan_pearson value: 85.74404842369206 - type: manhattan_spearman value: 86.14492318960154 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.95069052788875 - type: cos_sim_spearman value: 86.4867991595147 - type: euclidean_pearson value: 84.31013325754635 - type: euclidean_spearman value: 85.01529258006482 - type: manhattan_pearson value: 84.26995570085374 - type: manhattan_spearman value: 84.96982104986162 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.54617647971897 - type: cos_sim_spearman value: 87.49834181751034 - type: euclidean_pearson value: 86.01015322577122 - type: euclidean_spearman value: 84.63362652063199 - type: manhattan_pearson value: 86.13807574475706 - type: manhattan_spearman value: 84.7772370721132 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.20047755786615 - type: cos_sim_spearman value: 67.05324077987636 - type: euclidean_pearson value: 66.91930642976601 - type: euclidean_spearman value: 65.21491856099105 - type: manhattan_pearson value: 66.78756851976624 - type: manhattan_spearman value: 65.12356257740728 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.19852871539686 - type: cos_sim_spearman value: 87.5161895296395 - type: euclidean_pearson value: 84.59848645207485 - type: euclidean_spearman value: 85.26427328757919 - type: manhattan_pearson value: 84.59747366996524 - type: manhattan_spearman value: 85.24045855146915 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.63320317811032 - type: mrr value: 96.26242947321379 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 60.928000000000004 - type: map_at_10 value: 70.112 - type: map_at_100 value: 70.59299999999999 - type: map_at_1000 value: 70.623 - type: map_at_3 value: 66.846 - type: map_at_5 value: 68.447 - type: mrr_at_1 value: 64.0 - type: mrr_at_10 value: 71.212 - type: mrr_at_100 value: 71.616 - type: mrr_at_1000 value: 71.64500000000001 - type: mrr_at_3 value: 68.77799999999999 - type: mrr_at_5 value: 70.094 - type: ndcg_at_1 value: 64.0 - type: ndcg_at_10 value: 74.607 - type: ndcg_at_100 value: 76.416 - type: ndcg_at_1000 value: 77.102 - type: ndcg_at_3 value: 69.126 - type: ndcg_at_5 value: 71.41300000000001 - type: precision_at_1 value: 64.0 - type: precision_at_10 value: 9.933 - type: precision_at_100 value: 1.077 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.556 - type: precision_at_5 value: 17.467 - type: recall_at_1 value: 60.928000000000004 - type: recall_at_10 value: 87.322 - type: recall_at_100 value: 94.833 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 72.628 - type: recall_at_5 value: 78.428 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.86237623762376 - type: cos_sim_ap value: 96.72586477206649 - type: cos_sim_f1 value: 93.01858362631845 - type: cos_sim_precision value: 93.4409687184662 - type: cos_sim_recall value: 92.60000000000001 - type: dot_accuracy value: 99.78019801980199 - type: dot_ap value: 93.72748205246228 - type: dot_f1 value: 89.04109589041096 - type: dot_precision value: 87.16475095785441 - type: dot_recall value: 91.0 - type: euclidean_accuracy value: 99.85445544554456 - type: euclidean_ap value: 96.6661459876145 - type: euclidean_f1 value: 92.58337481333997 - type: euclidean_precision value: 92.17046580773042 - type: euclidean_recall value: 93.0 - type: manhattan_accuracy value: 99.85445544554456 - type: manhattan_ap value: 96.6883549244056 - type: manhattan_f1 value: 92.57598405580468 - type: manhattan_precision value: 92.25422045680239 - type: manhattan_recall value: 92.9 - type: max_accuracy value: 99.86237623762376 - type: max_ap value: 96.72586477206649 - type: max_f1 value: 93.01858362631845 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.39930057069995 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.96398659903402 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.946944700355395 - type: mrr value: 56.97151398438164 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.541657650692905 - type: cos_sim_spearman value: 31.605804192286303 - type: dot_pearson value: 28.26905996736398 - type: dot_spearman value: 27.864801765851187 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22599999999999998 - type: map_at_10 value: 1.8870000000000002 - type: map_at_100 value: 9.78 - type: map_at_1000 value: 22.514 - type: map_at_3 value: 0.6669999999999999 - type: map_at_5 value: 1.077 - type: mrr_at_1 value: 82.0 - type: mrr_at_10 value: 89.86699999999999 - type: mrr_at_100 value: 89.86699999999999 - type: mrr_at_1000 value: 89.86699999999999 - type: mrr_at_3 value: 89.667 - type: mrr_at_5 value: 89.667 - type: ndcg_at_1 value: 79.0 - type: ndcg_at_10 value: 74.818 - type: ndcg_at_100 value: 53.715999999999994 - type: ndcg_at_1000 value: 47.082 - type: ndcg_at_3 value: 82.134 - type: ndcg_at_5 value: 79.81899999999999 - type: precision_at_1 value: 82.0 - type: precision_at_10 value: 78.0 - type: precision_at_100 value: 54.48 - type: precision_at_1000 value: 20.518 - type: precision_at_3 value: 87.333 - type: precision_at_5 value: 85.2 - type: recall_at_1 value: 0.22599999999999998 - type: recall_at_10 value: 2.072 - type: recall_at_100 value: 13.013 - type: recall_at_1000 value: 43.462 - type: recall_at_3 value: 0.695 - type: recall_at_5 value: 1.139 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.328 - type: map_at_10 value: 9.795 - type: map_at_100 value: 15.801000000000002 - type: map_at_1000 value: 17.23 - type: map_at_3 value: 4.734 - type: map_at_5 value: 6.644 - type: mrr_at_1 value: 30.612000000000002 - type: mrr_at_10 value: 46.902 - type: mrr_at_100 value: 47.495 - type: mrr_at_1000 value: 47.495 - type: mrr_at_3 value: 41.156 - type: mrr_at_5 value: 44.218 - type: ndcg_at_1 value: 28.571 - type: ndcg_at_10 value: 24.806 - type: ndcg_at_100 value: 36.419000000000004 - type: ndcg_at_1000 value: 47.272999999999996 - type: ndcg_at_3 value: 25.666 - type: ndcg_at_5 value: 25.448999999999998 - type: precision_at_1 value: 30.612000000000002 - type: precision_at_10 value: 23.061 - type: precision_at_100 value: 7.714 - type: precision_at_1000 value: 1.484 - type: precision_at_3 value: 26.531 - type: precision_at_5 value: 26.122 - type: recall_at_1 value: 2.328 - type: recall_at_10 value: 16.524 - type: recall_at_100 value: 47.179 - type: recall_at_1000 value: 81.22200000000001 - type: recall_at_3 value: 5.745 - type: recall_at_5 value: 9.339 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 70.9142 - type: ap value: 14.335574772555415 - type: f1 value: 54.62839595194111 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.94340690435768 - type: f1 value: 60.286487936731916 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 51.26597708987974 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.48882398521786 - type: cos_sim_ap value: 79.04326607602204 - type: cos_sim_f1 value: 71.64566826860633 - type: cos_sim_precision value: 70.55512918905092 - type: cos_sim_recall value: 72.77044854881267 - type: dot_accuracy value: 84.19264469213805 - type: dot_ap value: 67.96360043562528 - type: dot_f1 value: 64.06418393006827 - type: dot_precision value: 58.64941898706424 - type: dot_recall value: 70.58047493403694 - type: euclidean_accuracy value: 87.45902127913214 - type: euclidean_ap value: 78.9742237648272 - type: euclidean_f1 value: 71.5553235908142 - type: euclidean_precision value: 70.77955601445535 - type: euclidean_recall value: 72.34828496042216 - type: manhattan_accuracy value: 87.41729749061214 - type: manhattan_ap value: 78.90073137580596 - type: manhattan_f1 value: 71.3942611553533 - type: manhattan_precision value: 68.52705653967483 - type: manhattan_recall value: 74.51187335092348 - type: max_accuracy value: 87.48882398521786 - type: max_ap value: 79.04326607602204 - type: max_f1 value: 71.64566826860633 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.68125897465751 - type: cos_sim_ap value: 85.6003454431979 - type: cos_sim_f1 value: 77.6957163958641 - type: cos_sim_precision value: 73.0110366307807 - type: cos_sim_recall value: 83.02279026793964 - type: dot_accuracy value: 87.7672992587418 - type: dot_ap value: 82.4971301112899 - type: dot_f1 value: 75.90528233151184 - type: dot_precision value: 72.0370626469368 - type: dot_recall value: 80.21250384970742 - type: euclidean_accuracy value: 88.4503434625684 - type: euclidean_ap value: 84.91949884748384 - type: euclidean_f1 value: 76.92365018444684 - type: euclidean_precision value: 74.53245721712759 - type: euclidean_recall value: 79.47336002463813 - type: manhattan_accuracy value: 88.47556952691427 - type: manhattan_ap value: 84.8963689101517 - type: manhattan_f1 value: 76.85901249256395 - type: manhattan_precision value: 74.31693989071039 - type: manhattan_recall value: 79.58115183246073 - type: max_accuracy value: 88.68125897465751 - type: max_ap value: 85.6003454431979 - type: max_f1 value: 77.6957163958641 --- <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> For more details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). If you are looking for a model that supports more languages, longer texts, and other retrieval methods, you can try using [bge-m3](https://huggingface.co/BAAI/bge-m3). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding focuses on retrieval-augmented LLMs, consisting of the following projects currently: - **Long-Context LLM**: [Activation Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon) - **Fine-tuning of LM** : [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail) - **Dense Retrieval**: [BGE-M3](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3), [LLM Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), [BGE Embedding](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/baai_general_embedding) - **Reranker Model**: [BGE Reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) - **Benchmark**: [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) ## News - 1/30/2024: Release **BGE-M3**, a new member to BGE model series! M3 stands for **M**ulti-linguality (100+ languages), **M**ulti-granularities (input length up to 8192), **M**ulti-Functionality (unification of dense, lexical, multi-vec/colbert retrieval). It is the first embedding model that supports all three retrieval methods, achieving new SOTA on multi-lingual (MIRACL) and cross-lingual (MKQA) benchmarks. [Technical Report](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/BGE_M3/BGE_M3.pdf) and [Code](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3). :fire: - 1/9/2024: Release [Activation-Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon), an effective, efficient, compatible, and low-cost (training) method to extend the context length of LLM. [Technical Report](https://arxiv.org/abs/2401.03462) :fire: - 12/24/2023: Release **LLaRA**, a LLaMA-7B based dense retriever, leading to state-of-the-art performances on MS MARCO and BEIR. Model and code will be open-sourced. Please stay tuned. [Technical Report](https://arxiv.org/abs/2312.15503) :fire: - 11/23/2023: Release [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail), a method to maintain general capabilities during fine-tuning by merging multiple language models. [Technical Report](https://arxiv.org/abs/2311.13534) :fire: - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Technical Report](https://arxiv.org/pdf/2310.07554.pdf) - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) and [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3#usage) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3) | Multi-Functionality(dense retrieval, sparse retrieval, multi-vector(colbert)), Multi-Linguality, and Multi-Granularity(8192 tokens) | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` #### Usage of the ONNX files ```python from optimum.onnxruntime import ORTModelForFeatureExtraction # type: ignore import torch from transformers import AutoModel, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-en-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-en-v1.5', revision="refs/pr/13") model_ort = ORTModelForFeatureExtraction.from_pretrained('BAAI/bge-large-en-v1.5', revision="refs/pr/13",file_name="onnx/model.onnx") # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') model_output_ort = model_ort(**encoded_input) # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # model_output and model_output_ort are identical ``` Its also possible to deploy the onnx files with the [infinity_emb](https://github.com/michaelfeil/infinity) pip package. ```python import asyncio from infinity_emb import AsyncEmbeddingEngine, EngineArgs sentences = ["Embed this is sentence via Infinity.", "Paris is in France."] engine = AsyncEmbeddingEngine.from_args( EngineArgs(model_name_or_path = "BAAI/bge-large-en-v1.5", device="cpu", engine="optimum" # or engine="torch" )) async def main(): async with engine: embeddings, usage = await engine.embed(sentences=sentences) asyncio.run(main()) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR", "BIOSSES", "SCIFACT" ]
intfloat/e5-large-v2
intfloat
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "openvino", "bert", "mteb", "Sentence Transformers", "sentence-similarity", "en", "arxiv:2212.03533", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-05-19T07:23:33Z"
2025-02-17T03:26:51+00:00
1,896,878
241
--- language: - en license: mit tags: - mteb - Sentence Transformers - sentence-similarity - sentence-transformers model-index: - name: e5-large-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 79.22388059701493 - type: ap value: 43.20816505595132 - type: f1 value: 73.27811303522058 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.748325 - type: ap value: 90.72534979701297 - type: f1 value: 93.73895874282185 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.612 - type: f1 value: 47.61157345898393 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 23.541999999999998 - type: map_at_10 value: 38.208 - type: map_at_100 value: 39.417 - type: map_at_1000 value: 39.428999999999995 - type: map_at_3 value: 33.95 - type: map_at_5 value: 36.329 - type: mrr_at_1 value: 23.755000000000003 - type: mrr_at_10 value: 38.288 - type: mrr_at_100 value: 39.511 - type: mrr_at_1000 value: 39.523 - type: mrr_at_3 value: 34.009 - type: mrr_at_5 value: 36.434 - type: ndcg_at_1 value: 23.541999999999998 - type: ndcg_at_10 value: 46.417 - type: ndcg_at_100 value: 51.812000000000005 - type: ndcg_at_1000 value: 52.137 - type: ndcg_at_3 value: 37.528 - type: ndcg_at_5 value: 41.81 - type: precision_at_1 value: 23.541999999999998 - type: precision_at_10 value: 7.269 - type: precision_at_100 value: 0.9690000000000001 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 15.979 - type: precision_at_5 value: 11.664 - type: recall_at_1 value: 23.541999999999998 - type: recall_at_10 value: 72.688 - type: recall_at_100 value: 96.871 - type: recall_at_1000 value: 99.431 - type: recall_at_3 value: 47.937000000000005 - type: recall_at_5 value: 58.321 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 45.546499570522094 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 41.01607489943561 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 59.616107510107774 - type: mrr value: 72.75106626214661 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 84.33018094733868 - type: cos_sim_spearman value: 83.60190492611737 - type: euclidean_pearson value: 82.1492450218961 - type: euclidean_spearman value: 82.70308926526991 - type: manhattan_pearson value: 81.93959600076842 - type: manhattan_spearman value: 82.73260801016369 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.54545454545455 - type: f1 value: 84.49582530928923 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 37.362725540120096 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 34.849509608178145 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 31.502999999999997 - type: map_at_10 value: 43.323 - type: map_at_100 value: 44.708999999999996 - type: map_at_1000 value: 44.838 - type: map_at_3 value: 38.987 - type: map_at_5 value: 41.516999999999996 - type: mrr_at_1 value: 38.769999999999996 - type: mrr_at_10 value: 49.13 - type: mrr_at_100 value: 49.697 - type: mrr_at_1000 value: 49.741 - type: mrr_at_3 value: 45.804 - type: mrr_at_5 value: 47.842 - type: ndcg_at_1 value: 38.769999999999996 - type: ndcg_at_10 value: 50.266999999999996 - type: ndcg_at_100 value: 54.967 - type: ndcg_at_1000 value: 56.976000000000006 - type: ndcg_at_3 value: 43.823 - type: ndcg_at_5 value: 47.12 - type: precision_at_1 value: 38.769999999999996 - type: precision_at_10 value: 10.057 - type: precision_at_100 value: 1.554 - type: precision_at_1000 value: 0.202 - type: precision_at_3 value: 21.125 - type: precision_at_5 value: 15.851 - type: recall_at_1 value: 31.502999999999997 - type: recall_at_10 value: 63.715999999999994 - type: recall_at_100 value: 83.61800000000001 - type: recall_at_1000 value: 96.63199999999999 - type: recall_at_3 value: 45.403 - type: recall_at_5 value: 54.481 - type: map_at_1 value: 27.833000000000002 - type: map_at_10 value: 37.330999999999996 - type: map_at_100 value: 38.580999999999996 - type: map_at_1000 value: 38.708 - type: map_at_3 value: 34.713 - type: map_at_5 value: 36.104 - type: mrr_at_1 value: 35.223 - type: mrr_at_10 value: 43.419000000000004 - type: mrr_at_100 value: 44.198 - type: mrr_at_1000 value: 44.249 - type: mrr_at_3 value: 41.614000000000004 - type: mrr_at_5 value: 42.553000000000004 - type: ndcg_at_1 value: 35.223 - type: ndcg_at_10 value: 42.687999999999995 - type: ndcg_at_100 value: 47.447 - type: ndcg_at_1000 value: 49.701 - type: ndcg_at_3 value: 39.162 - type: ndcg_at_5 value: 40.557 - type: precision_at_1 value: 35.223 - type: precision_at_10 value: 7.962 - type: precision_at_100 value: 1.304 - type: precision_at_1000 value: 0.18 - type: precision_at_3 value: 19.023 - type: precision_at_5 value: 13.184999999999999 - type: recall_at_1 value: 27.833000000000002 - type: recall_at_10 value: 51.881 - type: recall_at_100 value: 72.04 - type: recall_at_1000 value: 86.644 - type: recall_at_3 value: 40.778 - type: recall_at_5 value: 45.176 - type: map_at_1 value: 38.175 - type: map_at_10 value: 51.174 - type: map_at_100 value: 52.26499999999999 - type: map_at_1000 value: 52.315999999999995 - type: map_at_3 value: 47.897 - type: map_at_5 value: 49.703 - type: mrr_at_1 value: 43.448 - type: mrr_at_10 value: 54.505 - type: mrr_at_100 value: 55.216 - type: mrr_at_1000 value: 55.242000000000004 - type: mrr_at_3 value: 51.98500000000001 - type: mrr_at_5 value: 53.434000000000005 - type: ndcg_at_1 value: 43.448 - type: ndcg_at_10 value: 57.282 - type: ndcg_at_100 value: 61.537 - type: ndcg_at_1000 value: 62.546 - type: ndcg_at_3 value: 51.73799999999999 - type: ndcg_at_5 value: 54.324 - type: precision_at_1 value: 43.448 - type: precision_at_10 value: 9.292 - type: precision_at_100 value: 1.233 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 23.218 - type: precision_at_5 value: 15.887 - type: recall_at_1 value: 38.175 - type: recall_at_10 value: 72.00999999999999 - type: recall_at_100 value: 90.155 - type: recall_at_1000 value: 97.257 - type: recall_at_3 value: 57.133 - type: recall_at_5 value: 63.424 - type: map_at_1 value: 22.405 - type: map_at_10 value: 30.043 - type: map_at_100 value: 31.191000000000003 - type: map_at_1000 value: 31.275 - type: map_at_3 value: 27.034000000000002 - type: map_at_5 value: 28.688000000000002 - type: mrr_at_1 value: 24.068 - type: mrr_at_10 value: 31.993 - type: mrr_at_100 value: 32.992 - type: mrr_at_1000 value: 33.050000000000004 - type: mrr_at_3 value: 28.964000000000002 - type: mrr_at_5 value: 30.653000000000002 - type: ndcg_at_1 value: 24.068 - type: ndcg_at_10 value: 35.198 - type: ndcg_at_100 value: 40.709 - type: ndcg_at_1000 value: 42.855 - type: ndcg_at_3 value: 29.139 - type: ndcg_at_5 value: 32.045 - type: precision_at_1 value: 24.068 - type: precision_at_10 value: 5.65 - type: precision_at_100 value: 0.885 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 12.279 - type: precision_at_5 value: 8.994 - type: recall_at_1 value: 22.405 - type: recall_at_10 value: 49.391 - type: recall_at_100 value: 74.53699999999999 - type: recall_at_1000 value: 90.605 - type: recall_at_3 value: 33.126 - type: recall_at_5 value: 40.073 - type: map_at_1 value: 13.309999999999999 - type: map_at_10 value: 20.688000000000002 - type: map_at_100 value: 22.022 - type: map_at_1000 value: 22.152 - type: map_at_3 value: 17.954 - type: map_at_5 value: 19.439 - type: mrr_at_1 value: 16.294 - type: mrr_at_10 value: 24.479 - type: mrr_at_100 value: 25.515 - type: mrr_at_1000 value: 25.593 - type: mrr_at_3 value: 21.642 - type: mrr_at_5 value: 23.189999999999998 - type: ndcg_at_1 value: 16.294 - type: ndcg_at_10 value: 25.833000000000002 - type: ndcg_at_100 value: 32.074999999999996 - type: ndcg_at_1000 value: 35.083 - type: ndcg_at_3 value: 20.493 - type: ndcg_at_5 value: 22.949 - type: precision_at_1 value: 16.294 - type: precision_at_10 value: 5.112 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.134 - type: precision_at_3 value: 9.908999999999999 - type: precision_at_5 value: 7.587000000000001 - type: recall_at_1 value: 13.309999999999999 - type: recall_at_10 value: 37.851 - type: recall_at_100 value: 64.835 - type: recall_at_1000 value: 86.334 - type: recall_at_3 value: 23.493 - type: recall_at_5 value: 29.528 - type: map_at_1 value: 25.857999999999997 - type: map_at_10 value: 35.503 - type: map_at_100 value: 36.957 - type: map_at_1000 value: 37.065 - type: map_at_3 value: 32.275999999999996 - type: map_at_5 value: 34.119 - type: mrr_at_1 value: 31.954 - type: mrr_at_10 value: 40.851 - type: mrr_at_100 value: 41.863 - type: mrr_at_1000 value: 41.900999999999996 - type: mrr_at_3 value: 38.129999999999995 - type: mrr_at_5 value: 39.737 - type: ndcg_at_1 value: 31.954 - type: ndcg_at_10 value: 41.343999999999994 - type: ndcg_at_100 value: 47.397 - type: ndcg_at_1000 value: 49.501 - type: ndcg_at_3 value: 36.047000000000004 - type: ndcg_at_5 value: 38.639 - type: precision_at_1 value: 31.954 - type: precision_at_10 value: 7.68 - type: precision_at_100 value: 1.247 - type: precision_at_1000 value: 0.16199999999999998 - type: precision_at_3 value: 17.132 - type: precision_at_5 value: 12.589 - type: recall_at_1 value: 25.857999999999997 - type: recall_at_10 value: 53.43599999999999 - type: recall_at_100 value: 78.82400000000001 - type: recall_at_1000 value: 92.78999999999999 - type: recall_at_3 value: 38.655 - type: recall_at_5 value: 45.216 - type: map_at_1 value: 24.709 - type: map_at_10 value: 34.318 - type: map_at_100 value: 35.657 - type: map_at_1000 value: 35.783 - type: map_at_3 value: 31.326999999999998 - type: map_at_5 value: 33.021 - type: mrr_at_1 value: 30.137000000000004 - type: mrr_at_10 value: 39.093 - type: mrr_at_100 value: 39.992 - type: mrr_at_1000 value: 40.056999999999995 - type: mrr_at_3 value: 36.606 - type: mrr_at_5 value: 37.861 - type: ndcg_at_1 value: 30.137000000000004 - type: ndcg_at_10 value: 39.974 - type: ndcg_at_100 value: 45.647999999999996 - type: ndcg_at_1000 value: 48.259 - type: ndcg_at_3 value: 35.028 - type: ndcg_at_5 value: 37.175999999999995 - type: precision_at_1 value: 30.137000000000004 - type: precision_at_10 value: 7.363 - type: precision_at_100 value: 1.184 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 16.857 - type: precision_at_5 value: 11.963 - type: recall_at_1 value: 24.709 - type: recall_at_10 value: 52.087 - type: recall_at_100 value: 76.125 - type: recall_at_1000 value: 93.82300000000001 - type: recall_at_3 value: 38.149 - type: recall_at_5 value: 43.984 - type: map_at_1 value: 23.40791666666667 - type: map_at_10 value: 32.458083333333335 - type: map_at_100 value: 33.691916666666664 - type: map_at_1000 value: 33.81191666666666 - type: map_at_3 value: 29.51625 - type: map_at_5 value: 31.168083333333335 - type: mrr_at_1 value: 27.96591666666666 - type: mrr_at_10 value: 36.528583333333344 - type: mrr_at_100 value: 37.404 - type: mrr_at_1000 value: 37.464333333333336 - type: mrr_at_3 value: 33.92883333333333 - type: mrr_at_5 value: 35.41933333333333 - type: ndcg_at_1 value: 27.96591666666666 - type: ndcg_at_10 value: 37.89141666666666 - type: ndcg_at_100 value: 43.23066666666666 - type: ndcg_at_1000 value: 45.63258333333333 - type: ndcg_at_3 value: 32.811249999999994 - type: ndcg_at_5 value: 35.22566666666667 - type: precision_at_1 value: 27.96591666666666 - type: precision_at_10 value: 6.834083333333332 - type: precision_at_100 value: 1.12225 - type: precision_at_1000 value: 0.15241666666666667 - type: precision_at_3 value: 15.264333333333335 - type: precision_at_5 value: 11.039416666666666 - type: recall_at_1 value: 23.40791666666667 - type: recall_at_10 value: 49.927083333333336 - type: recall_at_100 value: 73.44641666666668 - type: recall_at_1000 value: 90.19950000000001 - type: recall_at_3 value: 35.88341666666667 - type: recall_at_5 value: 42.061249999999994 - type: map_at_1 value: 19.592000000000002 - type: map_at_10 value: 26.895999999999997 - type: map_at_100 value: 27.921000000000003 - type: map_at_1000 value: 28.02 - type: map_at_3 value: 24.883 - type: map_at_5 value: 25.812 - type: mrr_at_1 value: 22.698999999999998 - type: mrr_at_10 value: 29.520999999999997 - type: mrr_at_100 value: 30.458000000000002 - type: mrr_at_1000 value: 30.526999999999997 - type: mrr_at_3 value: 27.633000000000003 - type: mrr_at_5 value: 28.483999999999998 - type: ndcg_at_1 value: 22.698999999999998 - type: ndcg_at_10 value: 31.061 - type: ndcg_at_100 value: 36.398 - type: ndcg_at_1000 value: 38.89 - type: ndcg_at_3 value: 27.149 - type: ndcg_at_5 value: 28.627000000000002 - type: precision_at_1 value: 22.698999999999998 - type: precision_at_10 value: 5.106999999999999 - type: precision_at_100 value: 0.857 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 11.963 - type: precision_at_5 value: 8.221 - type: recall_at_1 value: 19.592000000000002 - type: recall_at_10 value: 41.329 - type: recall_at_100 value: 66.094 - type: recall_at_1000 value: 84.511 - type: recall_at_3 value: 30.61 - type: recall_at_5 value: 34.213 - type: map_at_1 value: 14.71 - type: map_at_10 value: 20.965 - type: map_at_100 value: 21.994 - type: map_at_1000 value: 22.133 - type: map_at_3 value: 18.741 - type: map_at_5 value: 19.951 - type: mrr_at_1 value: 18.307000000000002 - type: mrr_at_10 value: 24.66 - type: mrr_at_100 value: 25.540000000000003 - type: mrr_at_1000 value: 25.629 - type: mrr_at_3 value: 22.511 - type: mrr_at_5 value: 23.72 - type: ndcg_at_1 value: 18.307000000000002 - type: ndcg_at_10 value: 25.153 - type: ndcg_at_100 value: 30.229 - type: ndcg_at_1000 value: 33.623 - type: ndcg_at_3 value: 21.203 - type: ndcg_at_5 value: 23.006999999999998 - type: precision_at_1 value: 18.307000000000002 - type: precision_at_10 value: 4.725 - type: precision_at_100 value: 0.8659999999999999 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 10.14 - type: precision_at_5 value: 7.481 - type: recall_at_1 value: 14.71 - type: recall_at_10 value: 34.087 - type: recall_at_100 value: 57.147999999999996 - type: recall_at_1000 value: 81.777 - type: recall_at_3 value: 22.996 - type: recall_at_5 value: 27.73 - type: map_at_1 value: 23.472 - type: map_at_10 value: 32.699 - type: map_at_100 value: 33.867000000000004 - type: map_at_1000 value: 33.967000000000006 - type: map_at_3 value: 29.718 - type: map_at_5 value: 31.345 - type: mrr_at_1 value: 28.265 - type: mrr_at_10 value: 36.945 - type: mrr_at_100 value: 37.794 - type: mrr_at_1000 value: 37.857 - type: mrr_at_3 value: 34.266000000000005 - type: mrr_at_5 value: 35.768 - type: ndcg_at_1 value: 28.265 - type: ndcg_at_10 value: 38.35 - type: ndcg_at_100 value: 43.739 - type: ndcg_at_1000 value: 46.087 - type: ndcg_at_3 value: 33.004 - type: ndcg_at_5 value: 35.411 - type: precision_at_1 value: 28.265 - type: precision_at_10 value: 6.715999999999999 - type: precision_at_100 value: 1.059 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 15.299 - type: precision_at_5 value: 10.951 - type: recall_at_1 value: 23.472 - type: recall_at_10 value: 51.413 - type: recall_at_100 value: 75.17 - type: recall_at_1000 value: 91.577 - type: recall_at_3 value: 36.651 - type: recall_at_5 value: 42.814 - type: map_at_1 value: 23.666 - type: map_at_10 value: 32.963 - type: map_at_100 value: 34.544999999999995 - type: map_at_1000 value: 34.792 - type: map_at_3 value: 29.74 - type: map_at_5 value: 31.5 - type: mrr_at_1 value: 29.051 - type: mrr_at_10 value: 38.013000000000005 - type: mrr_at_100 value: 38.997 - type: mrr_at_1000 value: 39.055 - type: mrr_at_3 value: 34.947 - type: mrr_at_5 value: 36.815 - type: ndcg_at_1 value: 29.051 - type: ndcg_at_10 value: 39.361000000000004 - type: ndcg_at_100 value: 45.186 - type: ndcg_at_1000 value: 47.867 - type: ndcg_at_3 value: 33.797 - type: ndcg_at_5 value: 36.456 - type: precision_at_1 value: 29.051 - type: precision_at_10 value: 7.668 - type: precision_at_100 value: 1.532 - type: precision_at_1000 value: 0.247 - type: precision_at_3 value: 15.876000000000001 - type: precision_at_5 value: 11.779 - type: recall_at_1 value: 23.666 - type: recall_at_10 value: 51.858000000000004 - type: recall_at_100 value: 77.805 - type: recall_at_1000 value: 94.504 - type: recall_at_3 value: 36.207 - type: recall_at_5 value: 43.094 - type: map_at_1 value: 15.662 - type: map_at_10 value: 23.594 - type: map_at_100 value: 24.593999999999998 - type: map_at_1000 value: 24.694 - type: map_at_3 value: 20.925 - type: map_at_5 value: 22.817999999999998 - type: mrr_at_1 value: 17.375 - type: mrr_at_10 value: 25.734 - type: mrr_at_100 value: 26.586 - type: mrr_at_1000 value: 26.671 - type: mrr_at_3 value: 23.044 - type: mrr_at_5 value: 24.975 - type: ndcg_at_1 value: 17.375 - type: ndcg_at_10 value: 28.186 - type: ndcg_at_100 value: 33.436 - type: ndcg_at_1000 value: 36.203 - type: ndcg_at_3 value: 23.152 - type: ndcg_at_5 value: 26.397 - type: precision_at_1 value: 17.375 - type: precision_at_10 value: 4.677 - type: precision_at_100 value: 0.786 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 10.351 - type: precision_at_5 value: 7.985 - type: recall_at_1 value: 15.662 - type: recall_at_10 value: 40.066 - type: recall_at_100 value: 65.006 - type: recall_at_1000 value: 85.94000000000001 - type: recall_at_3 value: 27.400000000000002 - type: recall_at_5 value: 35.002 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 8.853 - type: map_at_10 value: 15.568000000000001 - type: map_at_100 value: 17.383000000000003 - type: map_at_1000 value: 17.584 - type: map_at_3 value: 12.561 - type: map_at_5 value: 14.056 - type: mrr_at_1 value: 18.958 - type: mrr_at_10 value: 28.288000000000004 - type: mrr_at_100 value: 29.432000000000002 - type: mrr_at_1000 value: 29.498 - type: mrr_at_3 value: 25.049 - type: mrr_at_5 value: 26.857 - type: ndcg_at_1 value: 18.958 - type: ndcg_at_10 value: 22.21 - type: ndcg_at_100 value: 29.596 - type: ndcg_at_1000 value: 33.583 - type: ndcg_at_3 value: 16.994999999999997 - type: ndcg_at_5 value: 18.95 - type: precision_at_1 value: 18.958 - type: precision_at_10 value: 7.192 - type: precision_at_100 value: 1.5 - type: precision_at_1000 value: 0.22399999999999998 - type: precision_at_3 value: 12.573 - type: precision_at_5 value: 10.202 - type: recall_at_1 value: 8.853 - type: recall_at_10 value: 28.087 - type: recall_at_100 value: 53.701 - type: recall_at_1000 value: 76.29899999999999 - type: recall_at_3 value: 15.913 - type: recall_at_5 value: 20.658 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.077 - type: map_at_10 value: 20.788999999999998 - type: map_at_100 value: 30.429000000000002 - type: map_at_1000 value: 32.143 - type: map_at_3 value: 14.692 - type: map_at_5 value: 17.139 - type: mrr_at_1 value: 70.75 - type: mrr_at_10 value: 78.036 - type: mrr_at_100 value: 78.401 - type: mrr_at_1000 value: 78.404 - type: mrr_at_3 value: 76.75 - type: mrr_at_5 value: 77.47500000000001 - type: ndcg_at_1 value: 58.12500000000001 - type: ndcg_at_10 value: 44.015 - type: ndcg_at_100 value: 49.247 - type: ndcg_at_1000 value: 56.211999999999996 - type: ndcg_at_3 value: 49.151 - type: ndcg_at_5 value: 46.195 - type: precision_at_1 value: 70.75 - type: precision_at_10 value: 35.5 - type: precision_at_100 value: 11.355 - type: precision_at_1000 value: 2.1950000000000003 - type: precision_at_3 value: 53.083000000000006 - type: precision_at_5 value: 44.800000000000004 - type: recall_at_1 value: 9.077 - type: recall_at_10 value: 26.259 - type: recall_at_100 value: 56.547000000000004 - type: recall_at_1000 value: 78.551 - type: recall_at_3 value: 16.162000000000003 - type: recall_at_5 value: 19.753999999999998 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 49.44500000000001 - type: f1 value: 44.67067691783401 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 68.182 - type: map_at_10 value: 78.223 - type: map_at_100 value: 78.498 - type: map_at_1000 value: 78.512 - type: map_at_3 value: 76.71 - type: map_at_5 value: 77.725 - type: mrr_at_1 value: 73.177 - type: mrr_at_10 value: 82.513 - type: mrr_at_100 value: 82.633 - type: mrr_at_1000 value: 82.635 - type: mrr_at_3 value: 81.376 - type: mrr_at_5 value: 82.182 - type: ndcg_at_1 value: 73.177 - type: ndcg_at_10 value: 82.829 - type: ndcg_at_100 value: 83.84 - type: ndcg_at_1000 value: 84.07900000000001 - type: ndcg_at_3 value: 80.303 - type: ndcg_at_5 value: 81.846 - type: precision_at_1 value: 73.177 - type: precision_at_10 value: 10.241999999999999 - type: precision_at_100 value: 1.099 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 31.247999999999998 - type: precision_at_5 value: 19.697 - type: recall_at_1 value: 68.182 - type: recall_at_10 value: 92.657 - type: recall_at_100 value: 96.709 - type: recall_at_1000 value: 98.184 - type: recall_at_3 value: 85.9 - type: recall_at_5 value: 89.755 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 21.108 - type: map_at_10 value: 33.342 - type: map_at_100 value: 35.281 - type: map_at_1000 value: 35.478 - type: map_at_3 value: 29.067 - type: map_at_5 value: 31.563000000000002 - type: mrr_at_1 value: 41.667 - type: mrr_at_10 value: 49.913000000000004 - type: mrr_at_100 value: 50.724000000000004 - type: mrr_at_1000 value: 50.766 - type: mrr_at_3 value: 47.504999999999995 - type: mrr_at_5 value: 49.033 - type: ndcg_at_1 value: 41.667 - type: ndcg_at_10 value: 41.144 - type: ndcg_at_100 value: 48.326 - type: ndcg_at_1000 value: 51.486 - type: ndcg_at_3 value: 37.486999999999995 - type: ndcg_at_5 value: 38.78 - type: precision_at_1 value: 41.667 - type: precision_at_10 value: 11.358 - type: precision_at_100 value: 1.873 - type: precision_at_1000 value: 0.244 - type: precision_at_3 value: 25 - type: precision_at_5 value: 18.519 - type: recall_at_1 value: 21.108 - type: recall_at_10 value: 47.249 - type: recall_at_100 value: 74.52 - type: recall_at_1000 value: 93.31 - type: recall_at_3 value: 33.271 - type: recall_at_5 value: 39.723000000000006 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 40.317 - type: map_at_10 value: 64.861 - type: map_at_100 value: 65.697 - type: map_at_1000 value: 65.755 - type: map_at_3 value: 61.258 - type: map_at_5 value: 63.590999999999994 - type: mrr_at_1 value: 80.635 - type: mrr_at_10 value: 86.528 - type: mrr_at_100 value: 86.66199999999999 - type: mrr_at_1000 value: 86.666 - type: mrr_at_3 value: 85.744 - type: mrr_at_5 value: 86.24300000000001 - type: ndcg_at_1 value: 80.635 - type: ndcg_at_10 value: 73.13199999999999 - type: ndcg_at_100 value: 75.927 - type: ndcg_at_1000 value: 76.976 - type: ndcg_at_3 value: 68.241 - type: ndcg_at_5 value: 71.071 - type: precision_at_1 value: 80.635 - type: precision_at_10 value: 15.326 - type: precision_at_100 value: 1.7500000000000002 - type: precision_at_1000 value: 0.189 - type: precision_at_3 value: 43.961 - type: precision_at_5 value: 28.599999999999998 - type: recall_at_1 value: 40.317 - type: recall_at_10 value: 76.631 - type: recall_at_100 value: 87.495 - type: recall_at_1000 value: 94.362 - type: recall_at_3 value: 65.94200000000001 - type: recall_at_5 value: 71.499 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 91.686 - type: ap value: 87.5577120393173 - type: f1 value: 91.6629447355139 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.702 - type: map_at_10 value: 36.414 - type: map_at_100 value: 37.561 - type: map_at_1000 value: 37.605 - type: map_at_3 value: 32.456 - type: map_at_5 value: 34.827000000000005 - type: mrr_at_1 value: 24.355 - type: mrr_at_10 value: 37.01 - type: mrr_at_100 value: 38.085 - type: mrr_at_1000 value: 38.123000000000005 - type: mrr_at_3 value: 33.117999999999995 - type: mrr_at_5 value: 35.452 - type: ndcg_at_1 value: 24.384 - type: ndcg_at_10 value: 43.456 - type: ndcg_at_100 value: 48.892 - type: ndcg_at_1000 value: 49.964 - type: ndcg_at_3 value: 35.475 - type: ndcg_at_5 value: 39.711 - type: precision_at_1 value: 24.384 - type: precision_at_10 value: 6.7940000000000005 - type: precision_at_100 value: 0.951 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 15.052999999999999 - type: precision_at_5 value: 11.189 - type: recall_at_1 value: 23.702 - type: recall_at_10 value: 65.057 - type: recall_at_100 value: 90.021 - type: recall_at_1000 value: 98.142 - type: recall_at_3 value: 43.551 - type: recall_at_5 value: 53.738 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 94.62380300957591 - type: f1 value: 94.49871222100734 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.14090287277702 - type: f1 value: 60.32101258220515 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.84330867518494 - type: f1 value: 71.92248688515255 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.10692669804976 - type: f1 value: 77.9904839122866 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.822988923078444 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 30.38394880253403 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.82504612539082 - type: mrr value: 32.84462298174977 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.029 - type: map_at_10 value: 14.088999999999999 - type: map_at_100 value: 17.601 - type: map_at_1000 value: 19.144 - type: map_at_3 value: 10.156 - type: map_at_5 value: 11.892 - type: mrr_at_1 value: 46.44 - type: mrr_at_10 value: 56.596999999999994 - type: mrr_at_100 value: 57.11000000000001 - type: mrr_at_1000 value: 57.14 - type: mrr_at_3 value: 54.334 - type: mrr_at_5 value: 55.774 - type: ndcg_at_1 value: 44.891999999999996 - type: ndcg_at_10 value: 37.134 - type: ndcg_at_100 value: 33.652 - type: ndcg_at_1000 value: 42.548 - type: ndcg_at_3 value: 41.851 - type: ndcg_at_5 value: 39.842 - type: precision_at_1 value: 46.44 - type: precision_at_10 value: 27.647 - type: precision_at_100 value: 8.309999999999999 - type: precision_at_1000 value: 2.146 - type: precision_at_3 value: 39.422000000000004 - type: precision_at_5 value: 34.675 - type: recall_at_1 value: 6.029 - type: recall_at_10 value: 18.907 - type: recall_at_100 value: 33.76 - type: recall_at_1000 value: 65.14999999999999 - type: recall_at_3 value: 11.584999999999999 - type: recall_at_5 value: 14.626 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 39.373000000000005 - type: map_at_10 value: 55.836 - type: map_at_100 value: 56.611999999999995 - type: map_at_1000 value: 56.63 - type: map_at_3 value: 51.747 - type: map_at_5 value: 54.337999999999994 - type: mrr_at_1 value: 44.147999999999996 - type: mrr_at_10 value: 58.42699999999999 - type: mrr_at_100 value: 58.902 - type: mrr_at_1000 value: 58.914 - type: mrr_at_3 value: 55.156000000000006 - type: mrr_at_5 value: 57.291000000000004 - type: ndcg_at_1 value: 44.119 - type: ndcg_at_10 value: 63.444 - type: ndcg_at_100 value: 66.40599999999999 - type: ndcg_at_1000 value: 66.822 - type: ndcg_at_3 value: 55.962 - type: ndcg_at_5 value: 60.228 - type: precision_at_1 value: 44.119 - type: precision_at_10 value: 10.006 - type: precision_at_100 value: 1.17 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 25.135 - type: precision_at_5 value: 17.59 - type: recall_at_1 value: 39.373000000000005 - type: recall_at_10 value: 83.78999999999999 - type: recall_at_100 value: 96.246 - type: recall_at_1000 value: 99.324 - type: recall_at_3 value: 64.71900000000001 - type: recall_at_5 value: 74.508 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 69.199 - type: map_at_10 value: 82.892 - type: map_at_100 value: 83.578 - type: map_at_1000 value: 83.598 - type: map_at_3 value: 79.948 - type: map_at_5 value: 81.779 - type: mrr_at_1 value: 79.67 - type: mrr_at_10 value: 86.115 - type: mrr_at_100 value: 86.249 - type: mrr_at_1000 value: 86.251 - type: mrr_at_3 value: 85.08200000000001 - type: mrr_at_5 value: 85.783 - type: ndcg_at_1 value: 79.67 - type: ndcg_at_10 value: 86.839 - type: ndcg_at_100 value: 88.252 - type: ndcg_at_1000 value: 88.401 - type: ndcg_at_3 value: 83.86200000000001 - type: ndcg_at_5 value: 85.473 - type: precision_at_1 value: 79.67 - type: precision_at_10 value: 13.19 - type: precision_at_100 value: 1.521 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 36.677 - type: precision_at_5 value: 24.118000000000002 - type: recall_at_1 value: 69.199 - type: recall_at_10 value: 94.321 - type: recall_at_100 value: 99.20400000000001 - type: recall_at_1000 value: 99.947 - type: recall_at_3 value: 85.787 - type: recall_at_5 value: 90.365 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 55.82810046856353 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 63.38132611783628 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.127000000000001 - type: map_at_10 value: 12.235 - type: map_at_100 value: 14.417 - type: map_at_1000 value: 14.75 - type: map_at_3 value: 8.906 - type: map_at_5 value: 10.591000000000001 - type: mrr_at_1 value: 25.2 - type: mrr_at_10 value: 35.879 - type: mrr_at_100 value: 36.935 - type: mrr_at_1000 value: 36.997 - type: mrr_at_3 value: 32.783 - type: mrr_at_5 value: 34.367999999999995 - type: ndcg_at_1 value: 25.2 - type: ndcg_at_10 value: 20.509 - type: ndcg_at_100 value: 28.67 - type: ndcg_at_1000 value: 34.42 - type: ndcg_at_3 value: 19.948 - type: ndcg_at_5 value: 17.166 - type: precision_at_1 value: 25.2 - type: precision_at_10 value: 10.440000000000001 - type: precision_at_100 value: 2.214 - type: precision_at_1000 value: 0.359 - type: precision_at_3 value: 18.533 - type: precision_at_5 value: 14.860000000000001 - type: recall_at_1 value: 5.127000000000001 - type: recall_at_10 value: 21.147 - type: recall_at_100 value: 44.946999999999996 - type: recall_at_1000 value: 72.89 - type: recall_at_3 value: 11.277 - type: recall_at_5 value: 15.042 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.0373011786213 - type: cos_sim_spearman value: 79.27889560856613 - type: euclidean_pearson value: 80.31186315495655 - type: euclidean_spearman value: 79.41630415280811 - type: manhattan_pearson value: 80.31755140442013 - type: manhattan_spearman value: 79.43069870027611 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.8659751342045 - type: cos_sim_spearman value: 76.95377612997667 - type: euclidean_pearson value: 81.24552945497848 - type: euclidean_spearman value: 77.18236963555253 - type: manhattan_pearson value: 81.26477607759037 - type: manhattan_spearman value: 77.13821753062756 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 83.34597139044875 - type: cos_sim_spearman value: 84.124169425592 - type: euclidean_pearson value: 83.68590721511401 - type: euclidean_spearman value: 84.18846190846398 - type: manhattan_pearson value: 83.57630235061498 - type: manhattan_spearman value: 84.10244043726902 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 82.67641885599572 - type: cos_sim_spearman value: 80.46450725650428 - type: euclidean_pearson value: 81.61645042715865 - type: euclidean_spearman value: 80.61418394236874 - type: manhattan_pearson value: 81.55712034928871 - type: manhattan_spearman value: 80.57905670523951 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.86650310886782 - type: cos_sim_spearman value: 89.76081629222328 - type: euclidean_pearson value: 89.1530747029954 - type: euclidean_spearman value: 89.80990657280248 - type: manhattan_pearson value: 89.10640563278132 - type: manhattan_spearman value: 89.76282108434047 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.93864027911118 - type: cos_sim_spearman value: 85.47096193999023 - type: euclidean_pearson value: 85.03141840870533 - type: euclidean_spearman value: 85.43124029598181 - type: manhattan_pearson value: 84.99002664393512 - type: manhattan_spearman value: 85.39169195120834 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.7045343749832 - type: cos_sim_spearman value: 89.03262221146677 - type: euclidean_pearson value: 89.56078218264365 - type: euclidean_spearman value: 89.17827006466868 - type: manhattan_pearson value: 89.52717595468582 - type: manhattan_spearman value: 89.15878115952923 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 64.20191302875551 - type: cos_sim_spearman value: 64.11446552557646 - type: euclidean_pearson value: 64.6918197393619 - type: euclidean_spearman value: 63.440182631197764 - type: manhattan_pearson value: 64.55692904121835 - type: manhattan_spearman value: 63.424877742756266 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.37793104662344 - type: cos_sim_spearman value: 87.7357802629067 - type: euclidean_pearson value: 87.4286301545109 - type: euclidean_spearman value: 87.78452920777421 - type: manhattan_pearson value: 87.42445169331255 - type: manhattan_spearman value: 87.78537677249598 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 84.31465405081792 - type: mrr value: 95.7173781193389 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.760999999999996 - type: map_at_10 value: 67.904 - type: map_at_100 value: 68.539 - type: map_at_1000 value: 68.562 - type: map_at_3 value: 65.415 - type: map_at_5 value: 66.788 - type: mrr_at_1 value: 60.333000000000006 - type: mrr_at_10 value: 68.797 - type: mrr_at_100 value: 69.236 - type: mrr_at_1000 value: 69.257 - type: mrr_at_3 value: 66.667 - type: mrr_at_5 value: 67.967 - type: ndcg_at_1 value: 60.333000000000006 - type: ndcg_at_10 value: 72.24199999999999 - type: ndcg_at_100 value: 74.86 - type: ndcg_at_1000 value: 75.354 - type: ndcg_at_3 value: 67.93400000000001 - type: ndcg_at_5 value: 70.02199999999999 - type: precision_at_1 value: 60.333000000000006 - type: precision_at_10 value: 9.533 - type: precision_at_100 value: 1.09 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.778000000000002 - type: precision_at_5 value: 17.467 - type: recall_at_1 value: 57.760999999999996 - type: recall_at_10 value: 84.383 - type: recall_at_100 value: 96.267 - type: recall_at_1000 value: 100 - type: recall_at_3 value: 72.628 - type: recall_at_5 value: 78.094 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.8029702970297 - type: cos_sim_ap value: 94.9210324173411 - type: cos_sim_f1 value: 89.8521162672106 - type: cos_sim_precision value: 91.67533818938605 - type: cos_sim_recall value: 88.1 - type: dot_accuracy value: 99.69504950495049 - type: dot_ap value: 90.4919719146181 - type: dot_f1 value: 84.72289156626506 - type: dot_precision value: 81.76744186046511 - type: dot_recall value: 87.9 - type: euclidean_accuracy value: 99.79702970297029 - type: euclidean_ap value: 94.87827463795753 - type: euclidean_f1 value: 89.55680081507896 - type: euclidean_precision value: 91.27725856697819 - type: euclidean_recall value: 87.9 - type: manhattan_accuracy value: 99.7990099009901 - type: manhattan_ap value: 94.87587025149682 - type: manhattan_f1 value: 89.76298537569339 - type: manhattan_precision value: 90.53916581892166 - type: manhattan_recall value: 89 - type: max_accuracy value: 99.8029702970297 - type: max_ap value: 94.9210324173411 - type: max_f1 value: 89.8521162672106 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.92385753948724 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 33.671756975431144 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.677928036739004 - type: mrr value: 51.56413133435193 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.523589340819683 - type: cos_sim_spearman value: 30.187407518823235 - type: dot_pearson value: 29.039713969699015 - type: dot_spearman value: 29.114740651155508 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.211 - type: map_at_10 value: 1.6199999999999999 - type: map_at_100 value: 8.658000000000001 - type: map_at_1000 value: 21.538 - type: map_at_3 value: 0.575 - type: map_at_5 value: 0.919 - type: mrr_at_1 value: 78 - type: mrr_at_10 value: 86.18599999999999 - type: mrr_at_100 value: 86.18599999999999 - type: mrr_at_1000 value: 86.18599999999999 - type: mrr_at_3 value: 85 - type: mrr_at_5 value: 85.9 - type: ndcg_at_1 value: 74 - type: ndcg_at_10 value: 66.542 - type: ndcg_at_100 value: 50.163999999999994 - type: ndcg_at_1000 value: 45.696999999999996 - type: ndcg_at_3 value: 71.531 - type: ndcg_at_5 value: 70.45 - type: precision_at_1 value: 78 - type: precision_at_10 value: 69.39999999999999 - type: precision_at_100 value: 51.06 - type: precision_at_1000 value: 20.022000000000002 - type: precision_at_3 value: 76 - type: precision_at_5 value: 74.8 - type: recall_at_1 value: 0.211 - type: recall_at_10 value: 1.813 - type: recall_at_100 value: 12.098 - type: recall_at_1000 value: 42.618 - type: recall_at_3 value: 0.603 - type: recall_at_5 value: 0.987 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.2079999999999997 - type: map_at_10 value: 7.777000000000001 - type: map_at_100 value: 12.825000000000001 - type: map_at_1000 value: 14.196 - type: map_at_3 value: 4.285 - type: map_at_5 value: 6.177 - type: mrr_at_1 value: 30.612000000000002 - type: mrr_at_10 value: 42.635 - type: mrr_at_100 value: 43.955 - type: mrr_at_1000 value: 43.955 - type: mrr_at_3 value: 38.435 - type: mrr_at_5 value: 41.088 - type: ndcg_at_1 value: 28.571 - type: ndcg_at_10 value: 20.666999999999998 - type: ndcg_at_100 value: 31.840000000000003 - type: ndcg_at_1000 value: 43.191 - type: ndcg_at_3 value: 23.45 - type: ndcg_at_5 value: 22.994 - type: precision_at_1 value: 30.612000000000002 - type: precision_at_10 value: 17.959 - type: precision_at_100 value: 6.755 - type: precision_at_1000 value: 1.4200000000000002 - type: precision_at_3 value: 23.810000000000002 - type: precision_at_5 value: 23.673 - type: recall_at_1 value: 2.2079999999999997 - type: recall_at_10 value: 13.144 - type: recall_at_100 value: 42.491 - type: recall_at_1000 value: 77.04299999999999 - type: recall_at_3 value: 5.3469999999999995 - type: recall_at_5 value: 9.139 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 70.9044 - type: ap value: 14.625783489340755 - type: f1 value: 54.814936562590546 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 60.94227504244483 - type: f1 value: 61.22516038508854 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.602409155145864 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.94641473445789 - type: cos_sim_ap value: 76.91572747061197 - type: cos_sim_f1 value: 70.14348097317529 - type: cos_sim_precision value: 66.53254437869822 - type: cos_sim_recall value: 74.1688654353562 - type: dot_accuracy value: 84.80061989628658 - type: dot_ap value: 70.7952548895177 - type: dot_f1 value: 65.44780728844965 - type: dot_precision value: 61.53310104529617 - type: dot_recall value: 69.89445910290237 - type: euclidean_accuracy value: 86.94641473445789 - type: euclidean_ap value: 76.80774009393652 - type: euclidean_f1 value: 70.30522503879979 - type: euclidean_precision value: 68.94977168949772 - type: euclidean_recall value: 71.71503957783642 - type: manhattan_accuracy value: 86.8629671574179 - type: manhattan_ap value: 76.76518632600317 - type: manhattan_f1 value: 70.16056518946692 - type: manhattan_precision value: 68.360450563204 - type: manhattan_recall value: 72.0580474934037 - type: max_accuracy value: 86.94641473445789 - type: max_ap value: 76.91572747061197 - type: max_f1 value: 70.30522503879979 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.10428066907285 - type: cos_sim_ap value: 86.25114759921435 - type: cos_sim_f1 value: 78.37857884586856 - type: cos_sim_precision value: 75.60818546078993 - type: cos_sim_recall value: 81.35971666153372 - type: dot_accuracy value: 87.41995575736406 - type: dot_ap value: 81.51838010086782 - type: dot_f1 value: 74.77398015435503 - type: dot_precision value: 71.53002390662354 - type: dot_recall value: 78.32614721281182 - type: euclidean_accuracy value: 89.12368533395428 - type: euclidean_ap value: 86.33456799874504 - type: euclidean_f1 value: 78.45496750232127 - type: euclidean_precision value: 75.78388462366364 - type: euclidean_recall value: 81.32121958731136 - type: manhattan_accuracy value: 89.10622113556099 - type: manhattan_ap value: 86.31215061745333 - type: manhattan_f1 value: 78.40684906011539 - type: manhattan_precision value: 75.89536643366722 - type: manhattan_recall value: 81.09023714197721 - type: max_accuracy value: 89.12368533395428 - type: max_ap value: 86.33456799874504 - type: max_f1 value: 78.45496750232127 --- # E5-large-v2 [Text Embeddings by Weakly-Supervised Contrastive Pre-training](https://arxiv.org/pdf/2212.03533.pdf). Liang Wang, Nan Yang, Xiaolong Huang, Binxing Jiao, Linjun Yang, Daxin Jiang, Rangan Majumder, Furu Wei, arXiv 2022 This model has 24 layers and the embedding size is 1024. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ". # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments."] tokenizer = AutoTokenizer.from_pretrained('intfloat/e5-large-v2') model = AutoModel.from_pretrained('intfloat/e5-large-v2') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Training Details Please refer to our paper at [https://arxiv.org/pdf/2212.03533.pdf](https://arxiv.org/pdf/2212.03533.pdf). ## Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/e5-large-v2') input_texts = [ 'query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2022text, title={Text Embeddings by Weakly-Supervised Contrastive Pre-training}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Jiao, Binxing and Yang, Linjun and Jiang, Daxin and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2212.03533}, year={2022} } ``` ## Limitations This model only works for English texts. Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
opensearch-project/opensearch-neural-sparse-encoding-doc-v2-distill
opensearch-project
fill-mask
[ "transformers", "pytorch", "safetensors", "distilbert", "fill-mask", "learned sparse", "opensearch", "retrieval", "passage-retrieval", "document-expansion", "bag-of-words", "en", "arxiv:2411.04403", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2024-07-17T07:51:35Z"
2025-02-24T05:01:33+00:00
1,872,096
7
--- language: en license: apache-2.0 tags: - learned sparse - opensearch - transformers - retrieval - passage-retrieval - document-expansion - bag-of-words --- # opensearch-neural-sparse-encoding-doc-v2-distill ## Select the model The model should be selected considering search relevance, model inference and retrieval efficiency(FLOPS). We benchmark models' **zero-shot performance** on a subset of BEIR benchmark: TrecCovid,NFCorpus,NQ,HotpotQA,FiQA,ArguAna,Touche,DBPedia,SCIDOCS,FEVER,Climate FEVER,SciFact,Quora. Overall, the v2 series of models have better search relevance, efficiency and inference speed than the v1 series. The specific advantages and disadvantages may vary across different datasets. | Model | Inference-free for Retrieval | Model Parameters | AVG NDCG@10 | AVG FLOPS | |-------|------------------------------|------------------|-------------|-----------| | [opensearch-neural-sparse-encoding-v1](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-v1) | | 133M | 0.524 | 11.4 | | [opensearch-neural-sparse-encoding-v2-distill](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-v2-distill) | | 67M | 0.528 | 8.3 | | [opensearch-neural-sparse-encoding-doc-v1](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-doc-v1) | ✔️ | 133M | 0.490 | 2.3 | | [opensearch-neural-sparse-encoding-doc-v2-distill](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-doc-v2-distill) | ✔️ | 67M | 0.504 | 1.8 | | [opensearch-neural-sparse-encoding-doc-v2-mini](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-doc-v2-mini) | ✔️ | 23M | 0.497 | 1.7 | ## Overview - **Paper**: [Towards Competitive Search Relevance For Inference-Free Learned Sparse Retrievers](https://arxiv.org/abs/2411.04403) - **Fine-tuning sample**: [opensearch-sparse-model-tuning-sample](https://github.com/zhichao-aws/opensearch-sparse-model-tuning-sample) This is a learned sparse retrieval model. It encodes the documents to 30522 dimensional **sparse vectors**. For queries, it just use a tokenizer and a weight look-up table to generate sparse vectors. The non-zero dimension index means the corresponding token in the vocabulary, and the weight means the importance of the token. And the similarity score is the inner product of query/document sparse vectors. The training datasets includes MS MARCO, eli5_question_answer, squad_pairs, WikiAnswers, yahoo_answers_title_question, gooaq_pairs, stackexchange_duplicate_questions_body_body, wikihow, S2ORC_title_abstract, stackexchange_duplicate_questions_title-body_title-body, yahoo_answers_question_answer, searchQA_top5_snippets, stackexchange_duplicate_questions_title_title, yahoo_answers_title_answer. OpenSearch neural sparse feature supports learned sparse retrieval with lucene inverted index. Link: https://opensearch.org/docs/latest/query-dsl/specialized/neural-sparse/. The indexing and search can be performed with OpenSearch high-level API. ## Usage (HuggingFace) This model is supposed to run inside OpenSearch cluster. But you can also use it outside the cluster, with HuggingFace models API. ```python import json import itertools import torch from transformers import AutoModelForMaskedLM, AutoTokenizer # get sparse vector from dense vectors with shape batch_size * seq_len * vocab_size def get_sparse_vector(feature, output): values, _ = torch.max(output*feature["attention_mask"].unsqueeze(-1), dim=1) values = torch.log(1 + torch.relu(values)) values[:,special_token_ids] = 0 return values # transform the sparse vector to a dict of (token, weight) def transform_sparse_vector_to_dict(sparse_vector): sample_indices,token_indices=torch.nonzero(sparse_vector,as_tuple=True) non_zero_values = sparse_vector[(sample_indices,token_indices)].tolist() number_of_tokens_for_each_sample = torch.bincount(sample_indices).cpu().tolist() tokens = [transform_sparse_vector_to_dict.id_to_token[_id] for _id in token_indices.tolist()] output = [] end_idxs = list(itertools.accumulate([0]+number_of_tokens_for_each_sample)) for i in range(len(end_idxs)-1): token_strings = tokens[end_idxs[i]:end_idxs[i+1]] weights = non_zero_values[end_idxs[i]:end_idxs[i+1]] output.append(dict(zip(token_strings, weights))) return output # download the idf file from model hub. idf is used to give weights for query tokens def get_tokenizer_idf(tokenizer): from huggingface_hub import hf_hub_download local_cached_path = hf_hub_download(repo_id="opensearch-project/opensearch-neural-sparse-encoding-doc-v2-distill", filename="idf.json") with open(local_cached_path) as f: idf = json.load(f) idf_vector = [0]*tokenizer.vocab_size for token,weight in idf.items(): _id = tokenizer._convert_token_to_id_with_added_voc(token) idf_vector[_id]=weight return torch.tensor(idf_vector) # load the model model = AutoModelForMaskedLM.from_pretrained("opensearch-project/opensearch-neural-sparse-encoding-doc-v2-distill") tokenizer = AutoTokenizer.from_pretrained("opensearch-project/opensearch-neural-sparse-encoding-doc-v2-distill") idf = get_tokenizer_idf(tokenizer) # set the special tokens and id_to_token transform for post-process special_token_ids = [tokenizer.vocab[token] for token in tokenizer.special_tokens_map.values()] get_sparse_vector.special_token_ids = special_token_ids id_to_token = ["" for i in range(tokenizer.vocab_size)] for token, _id in tokenizer.vocab.items(): id_to_token[_id] = token transform_sparse_vector_to_dict.id_to_token = id_to_token query = "What's the weather in ny now?" document = "Currently New York is rainy." # encode the query feature_query = tokenizer([query], padding=True, truncation=True, return_tensors='pt', return_token_type_ids=False) input_ids = feature_query["input_ids"] batch_size = input_ids.shape[0] query_vector = torch.zeros(batch_size, tokenizer.vocab_size) query_vector[torch.arange(batch_size).unsqueeze(-1), input_ids] = 1 query_sparse_vector = query_vector*idf # encode the document feature_document = tokenizer([document], padding=True, truncation=True, return_tensors='pt', return_token_type_ids=False) output = model(**feature_document)[0] document_sparse_vector = get_sparse_vector(feature_document, output) # get similarity score sim_score = torch.matmul(query_sparse_vector[0],document_sparse_vector[0]) print(sim_score) # tensor(17.5307, grad_fn=<DotBackward0>) query_token_weight = transform_sparse_vector_to_dict(query_sparse_vector)[0] document_query_token_weight = transform_sparse_vector_to_dict(document_sparse_vector)[0] for token in sorted(query_token_weight, key=lambda x:query_token_weight[x], reverse=True): if token in document_query_token_weight: print("score in query: %.4f, score in document: %.4f, token: %s"%(query_token_weight[token],document_query_token_weight[token],token)) # result: # score in query: 5.7729, score in document: 1.4109, token: ny # score in query: 4.5684, score in document: 1.4673, token: weather # score in query: 3.5895, score in document: 0.7473, token: now ``` The above code sample shows an example of neural sparse search. Although there is no overlap token in original query and document, but this model performs a good match. ## Detailed Search Relevance <div style="overflow-x: auto;"> | Model | Average | Trec Covid | NFCorpus | NQ | HotpotQA | FiQA | ArguAna | Touche | DBPedia | SCIDOCS | FEVER | Climate FEVER | SciFact | Quora | |-------|---------|------------|----------|----|----------|------|---------|--------|---------|---------|-------|---------------|---------|-------| | [opensearch-neural-sparse-encoding-v1](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-v1) | 0.524 | 0.771 | 0.360 | 0.553 | 0.697 | 0.376 | 0.508 | 0.278 | 0.447 | 0.164 | 0.821 | 0.263 | 0.723 | 0.856 | | [opensearch-neural-sparse-encoding-v2-distill](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-v2-distill) | 0.528 | 0.775 | 0.347 | 0.561 | 0.685 | 0.374 | 0.551 | 0.278 | 0.435 | 0.173 | 0.849 | 0.249 | 0.722 | 0.863 | | [opensearch-neural-sparse-encoding-doc-v1](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-doc-v1) | 0.490 | 0.707 | 0.352 | 0.521 | 0.677 | 0.344 | 0.461 | 0.294 | 0.412 | 0.154 | 0.743 | 0.202 | 0.716 | 0.788 | | [opensearch-neural-sparse-encoding-doc-v2-distill](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-doc-v2-distill) | 0.504 | 0.690 | 0.343 | 0.528 | 0.675 | 0.357 | 0.496 | 0.287 | 0.418 | 0.166 | 0.818 | 0.224 | 0.715 | 0.841 | | [opensearch-neural-sparse-encoding-doc-v2-mini](https://huggingface.co/opensearch-project/opensearch-neural-sparse-encoding-doc-v2-mini) | 0.497 | 0.709 | 0.336 | 0.510 | 0.666 | 0.338 | 0.480 | 0.285 | 0.407 | 0.164 | 0.812 | 0.216 | 0.699 | 0.837 | </div> ## License This project is licensed under the [Apache v2.0 License](https://github.com/opensearch-project/neural-search/blob/main/LICENSE). ## Copyright Copyright OpenSearch Contributors. See [NOTICE](https://github.com/opensearch-project/neural-search/blob/main/NOTICE) for details.
[ "SCIFACT" ]
jinaai/jina-embeddings-v3
jinaai
feature-extraction
[ "transformers", "pytorch", "onnx", "safetensors", "feature-extraction", "sentence-similarity", "mteb", "sentence-transformers", "custom_code", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "arxiv:2409.10173", "license:cc-by-nc-4.0", "model-index", "region:eu" ]
"2024-09-05T11:56:46Z"
2025-02-24T07:06:37+00:00
1,734,211
828
--- language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - false - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh library_name: transformers license: cc-by-nc-4.0 tags: - feature-extraction - sentence-similarity - mteb - sentence-transformers inference: false model-index: - name: jina-embeddings-v3 results: - task: type: STS dataset: name: MTEB AFQMC (default) type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cosine_pearson value: 41.74237700998808 - type: cosine_spearman value: 43.4726782647566 - type: euclidean_pearson value: 42.244585459479964 - type: euclidean_spearman value: 43.525070045169606 - type: main_score value: 43.4726782647566 - type: manhattan_pearson value: 42.04616728224863 - type: manhattan_spearman value: 43.308828270754645 - type: pearson value: 41.74237700998808 - type: spearman value: 43.4726782647566 - task: type: Retrieval dataset: name: MTEB ArguAna-PL (default) type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: main_score value: 50.117999999999995 - type: map_at_1 value: 24.253 - type: map_at_10 value: 40.725 - type: map_at_100 value: 41.699999999999996 - type: map_at_1000 value: 41.707 - type: map_at_20 value: 41.467999999999996 - type: map_at_3 value: 35.467 - type: map_at_5 value: 38.291 - type: mrr_at_1 value: 24.751066856330013 - type: mrr_at_10 value: 40.91063808169072 - type: mrr_at_100 value: 41.885497923928675 - type: mrr_at_1000 value: 41.89301098419842 - type: mrr_at_20 value: 41.653552355442514 - type: mrr_at_3 value: 35.656709340919775 - type: mrr_at_5 value: 38.466097676623946 - type: nauc_map_at_1000_diff1 value: 7.503000359807567 - type: nauc_map_at_1000_max value: -11.030405164830546 - type: nauc_map_at_1000_std value: -8.902792782585117 - type: nauc_map_at_100_diff1 value: 7.509899249593199 - type: nauc_map_at_100_max value: -11.023581259404406 - type: nauc_map_at_100_std value: -8.892241185067272 - type: nauc_map_at_10_diff1 value: 7.24369711881512 - type: nauc_map_at_10_max value: -10.810000200433278 - type: nauc_map_at_10_std value: -8.987230542165776 - type: nauc_map_at_1_diff1 value: 11.37175831832417 - type: nauc_map_at_1_max value: -13.315221903223055 - type: nauc_map_at_1_std value: -9.398199605510275 - type: nauc_map_at_20_diff1 value: 7.477364530860648 - type: nauc_map_at_20_max value: -10.901251218105566 - type: nauc_map_at_20_std value: -8.868148116405925 - type: nauc_map_at_3_diff1 value: 6.555548802174882 - type: nauc_map_at_3_max value: -12.247274800542934 - type: nauc_map_at_3_std value: -9.879475250984811 - type: nauc_map_at_5_diff1 value: 7.426588563355882 - type: nauc_map_at_5_max value: -11.347695686001805 - type: nauc_map_at_5_std value: -9.34441892203972 - type: nauc_mrr_at_1000_diff1 value: 5.99737552143614 - type: nauc_mrr_at_1000_max value: -11.327205136505727 - type: nauc_mrr_at_1000_std value: -8.791079115519503 - type: nauc_mrr_at_100_diff1 value: 6.004622525255784 - type: nauc_mrr_at_100_max value: -11.320336759899723 - type: nauc_mrr_at_100_std value: -8.780602249831777 - type: nauc_mrr_at_10_diff1 value: 5.783623516930227 - type: nauc_mrr_at_10_max value: -11.095971693467078 - type: nauc_mrr_at_10_std value: -8.877242032013582 - type: nauc_mrr_at_1_diff1 value: 9.694937537703797 - type: nauc_mrr_at_1_max value: -12.531905083727912 - type: nauc_mrr_at_1_std value: -8.903992940100146 - type: nauc_mrr_at_20_diff1 value: 5.984841206233873 - type: nauc_mrr_at_20_max value: -11.195236951048969 - type: nauc_mrr_at_20_std value: -8.757266039186018 - type: nauc_mrr_at_3_diff1 value: 5.114333824261379 - type: nauc_mrr_at_3_max value: -12.64809799843464 - type: nauc_mrr_at_3_std value: -9.791146138025184 - type: nauc_mrr_at_5_diff1 value: 5.88941606224512 - type: nauc_mrr_at_5_max value: -11.763903418071918 - type: nauc_mrr_at_5_std value: -9.279175712709446 - type: nauc_ndcg_at_1000_diff1 value: 7.076950652226086 - type: nauc_ndcg_at_1000_max value: -10.386482092087371 - type: nauc_ndcg_at_1000_std value: -8.309190917074046 - type: nauc_ndcg_at_100_diff1 value: 7.2329220284865245 - type: nauc_ndcg_at_100_max value: -10.208048403220337 - type: nauc_ndcg_at_100_std value: -7.997975874274613 - type: nauc_ndcg_at_10_diff1 value: 6.065391100006953 - type: nauc_ndcg_at_10_max value: -9.046164377601153 - type: nauc_ndcg_at_10_std value: -8.34724889697153 - type: nauc_ndcg_at_1_diff1 value: 11.37175831832417 - type: nauc_ndcg_at_1_max value: -13.315221903223055 - type: nauc_ndcg_at_1_std value: -9.398199605510275 - type: nauc_ndcg_at_20_diff1 value: 6.949389989202601 - type: nauc_ndcg_at_20_max value: -9.35740451760307 - type: nauc_ndcg_at_20_std value: -7.761295171828212 - type: nauc_ndcg_at_3_diff1 value: 5.051471796151364 - type: nauc_ndcg_at_3_max value: -12.158763333711653 - type: nauc_ndcg_at_3_std value: -10.078902544421926 - type: nauc_ndcg_at_5_diff1 value: 6.527454512611454 - type: nauc_ndcg_at_5_max value: -10.525118233848586 - type: nauc_ndcg_at_5_std value: -9.120055125584031 - type: nauc_precision_at_1000_diff1 value: -10.6495668199151 - type: nauc_precision_at_1000_max value: 12.070656425217841 - type: nauc_precision_at_1000_std value: 55.844551709649004 - type: nauc_precision_at_100_diff1 value: 19.206967129266285 - type: nauc_precision_at_100_max value: 16.296851020813456 - type: nauc_precision_at_100_std value: 45.60378984257811 - type: nauc_precision_at_10_diff1 value: 0.6490335354304879 - type: nauc_precision_at_10_max value: 0.5757198255366447 - type: nauc_precision_at_10_std value: -4.875847131691451 - type: nauc_precision_at_1_diff1 value: 11.37175831832417 - type: nauc_precision_at_1_max value: -13.315221903223055 - type: nauc_precision_at_1_std value: -9.398199605510275 - type: nauc_precision_at_20_diff1 value: 4.899369866929203 - type: nauc_precision_at_20_max value: 5.988537297189552 - type: nauc_precision_at_20_std value: 4.830900387582837 - type: nauc_precision_at_3_diff1 value: 0.8791156910997744 - type: nauc_precision_at_3_max value: -11.983373635905993 - type: nauc_precision_at_3_std value: -10.646185111581257 - type: nauc_precision_at_5_diff1 value: 3.9314486166548432 - type: nauc_precision_at_5_max value: -7.798591396895839 - type: nauc_precision_at_5_std value: -8.293043407234125 - type: nauc_recall_at_1000_diff1 value: -10.649566819918673 - type: nauc_recall_at_1000_max value: 12.070656425214647 - type: nauc_recall_at_1000_std value: 55.84455170965023 - type: nauc_recall_at_100_diff1 value: 19.206967129265127 - type: nauc_recall_at_100_max value: 16.296851020813722 - type: nauc_recall_at_100_std value: 45.60378984257728 - type: nauc_recall_at_10_diff1 value: 0.6490335354304176 - type: nauc_recall_at_10_max value: 0.5757198255366095 - type: nauc_recall_at_10_std value: -4.875847131691468 - type: nauc_recall_at_1_diff1 value: 11.37175831832417 - type: nauc_recall_at_1_max value: -13.315221903223055 - type: nauc_recall_at_1_std value: -9.398199605510275 - type: nauc_recall_at_20_diff1 value: 4.899369866929402 - type: nauc_recall_at_20_max value: 5.98853729718968 - type: nauc_recall_at_20_std value: 4.830900387582967 - type: nauc_recall_at_3_diff1 value: 0.8791156910997652 - type: nauc_recall_at_3_max value: -11.983373635905997 - type: nauc_recall_at_3_std value: -10.64618511158124 - type: nauc_recall_at_5_diff1 value: 3.9314486166548472 - type: nauc_recall_at_5_max value: -7.7985913968958585 - type: nauc_recall_at_5_std value: -8.293043407234132 - type: ndcg_at_1 value: 24.253 - type: ndcg_at_10 value: 50.117999999999995 - type: ndcg_at_100 value: 54.291999999999994 - type: ndcg_at_1000 value: 54.44799999999999 - type: ndcg_at_20 value: 52.771 - type: ndcg_at_3 value: 39.296 - type: ndcg_at_5 value: 44.373000000000005 - type: precision_at_1 value: 24.253 - type: precision_at_10 value: 8.016 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.527 - type: precision_at_3 value: 16.808999999999997 - type: precision_at_5 value: 12.546 - type: recall_at_1 value: 24.253 - type: recall_at_10 value: 80.156 - type: recall_at_100 value: 98.43499999999999 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_20 value: 90.54100000000001 - type: recall_at_3 value: 50.427 - type: recall_at_5 value: 62.731 - task: type: Retrieval dataset: name: MTEB DBPedia-PL (default) type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: main_score value: 34.827000000000005 - type: map_at_1 value: 7.049999999999999 - type: map_at_10 value: 14.982999999999999 - type: map_at_100 value: 20.816000000000003 - type: map_at_1000 value: 22.33 - type: map_at_20 value: 17.272000000000002 - type: map_at_3 value: 10.661 - type: map_at_5 value: 12.498 - type: mrr_at_1 value: 57.25 - type: mrr_at_10 value: 65.81934523809524 - type: mrr_at_100 value: 66.2564203928212 - type: mrr_at_1000 value: 66.27993662923856 - type: mrr_at_20 value: 66.0732139130649 - type: mrr_at_3 value: 64.08333333333333 - type: mrr_at_5 value: 65.27083333333333 - type: nauc_map_at_1000_diff1 value: 16.41780871174038 - type: nauc_map_at_1000_max value: 30.193946325654654 - type: nauc_map_at_1000_std value: 31.46095497039037 - type: nauc_map_at_100_diff1 value: 18.57903165498531 - type: nauc_map_at_100_max value: 29.541476938623262 - type: nauc_map_at_100_std value: 28.228604103301052 - type: nauc_map_at_10_diff1 value: 24.109434489748946 - type: nauc_map_at_10_max value: 21.475954208048968 - type: nauc_map_at_10_std value: 9.964464537806988 - type: nauc_map_at_1_diff1 value: 38.67437644802124 - type: nauc_map_at_1_max value: 14.52136658726491 - type: nauc_map_at_1_std value: -2.8981666782088755 - type: nauc_map_at_20_diff1 value: 21.42547228801935 - type: nauc_map_at_20_max value: 25.04510402960458 - type: nauc_map_at_20_std value: 16.533079346431155 - type: nauc_map_at_3_diff1 value: 26.63648858245477 - type: nauc_map_at_3_max value: 13.632235789780415 - type: nauc_map_at_3_std value: -0.40129174577700716 - type: nauc_map_at_5_diff1 value: 24.513861031197933 - type: nauc_map_at_5_max value: 16.599888813946688 - type: nauc_map_at_5_std value: 3.4448514739556346 - type: nauc_mrr_at_1000_diff1 value: 36.57353464537154 - type: nauc_mrr_at_1000_max value: 55.34763483979515 - type: nauc_mrr_at_1000_std value: 40.3722796438533 - type: nauc_mrr_at_100_diff1 value: 36.555989566513134 - type: nauc_mrr_at_100_max value: 55.347805216808396 - type: nauc_mrr_at_100_std value: 40.38465945075711 - type: nauc_mrr_at_10_diff1 value: 36.771572999261984 - type: nauc_mrr_at_10_max value: 55.41239897909165 - type: nauc_mrr_at_10_std value: 40.52058934624793 - type: nauc_mrr_at_1_diff1 value: 38.2472828531032 - type: nauc_mrr_at_1_max value: 51.528473828685705 - type: nauc_mrr_at_1_std value: 33.03676467942882 - type: nauc_mrr_at_20_diff1 value: 36.642602571889036 - type: nauc_mrr_at_20_max value: 55.3763342076553 - type: nauc_mrr_at_20_std value: 40.41520090500838 - type: nauc_mrr_at_3_diff1 value: 36.79451847426628 - type: nauc_mrr_at_3_max value: 54.59778581826193 - type: nauc_mrr_at_3_std value: 39.48392075873095 - type: nauc_mrr_at_5_diff1 value: 36.92150807529304 - type: nauc_mrr_at_5_max value: 55.03553978718272 - type: nauc_mrr_at_5_std value: 40.20147745489917 - type: nauc_ndcg_at_1000_diff1 value: 21.843092744321268 - type: nauc_ndcg_at_1000_max value: 44.93275990394279 - type: nauc_ndcg_at_1000_std value: 47.09186225236347 - type: nauc_ndcg_at_100_diff1 value: 25.180282568979095 - type: nauc_ndcg_at_100_max value: 41.737709709508394 - type: nauc_ndcg_at_100_std value: 38.80950644139446 - type: nauc_ndcg_at_10_diff1 value: 24.108368037214046 - type: nauc_ndcg_at_10_max value: 41.29298370689967 - type: nauc_ndcg_at_10_std value: 35.06450769738732 - type: nauc_ndcg_at_1_diff1 value: 35.51010679525079 - type: nauc_ndcg_at_1_max value: 42.40790024212412 - type: nauc_ndcg_at_1_std value: 26.696412036243157 - type: nauc_ndcg_at_20_diff1 value: 23.909989673256195 - type: nauc_ndcg_at_20_max value: 39.78444647091927 - type: nauc_ndcg_at_20_std value: 33.39544470364529 - type: nauc_ndcg_at_3_diff1 value: 22.50484297956035 - type: nauc_ndcg_at_3_max value: 39.14551926034168 - type: nauc_ndcg_at_3_std value: 30.330135925392014 - type: nauc_ndcg_at_5_diff1 value: 21.7798872028265 - type: nauc_ndcg_at_5_max value: 40.23856975248015 - type: nauc_ndcg_at_5_std value: 32.438381067440396 - type: nauc_precision_at_1000_diff1 value: -21.62692442272279 - type: nauc_precision_at_1000_max value: 0.9689046974430882 - type: nauc_precision_at_1000_std value: 18.54001058230465 - type: nauc_precision_at_100_diff1 value: -10.132258779856192 - type: nauc_precision_at_100_max value: 23.74516110444681 - type: nauc_precision_at_100_std value: 47.03416663319965 - type: nauc_precision_at_10_diff1 value: 1.543656509571949 - type: nauc_precision_at_10_max value: 36.98864812757555 - type: nauc_precision_at_10_std value: 46.56427199077426 - type: nauc_precision_at_1_diff1 value: 38.2472828531032 - type: nauc_precision_at_1_max value: 51.528473828685705 - type: nauc_precision_at_1_std value: 33.03676467942882 - type: nauc_precision_at_20_diff1 value: -4.612864872734335 - type: nauc_precision_at_20_max value: 34.03565449182125 - type: nauc_precision_at_20_std value: 48.880727648349534 - type: nauc_precision_at_3_diff1 value: 6.360850444467829 - type: nauc_precision_at_3_max value: 36.25816942368427 - type: nauc_precision_at_3_std value: 34.48882647419187 - type: nauc_precision_at_5_diff1 value: 2.6445596936740037 - type: nauc_precision_at_5_max value: 37.174463388899056 - type: nauc_precision_at_5_std value: 40.25254370626113 - type: nauc_recall_at_1000_diff1 value: 13.041227176748077 - type: nauc_recall_at_1000_max value: 39.722336427072094 - type: nauc_recall_at_1000_std value: 52.04032890059214 - type: nauc_recall_at_100_diff1 value: 18.286096899139153 - type: nauc_recall_at_100_max value: 34.072389201930314 - type: nauc_recall_at_100_std value: 37.73637623416653 - type: nauc_recall_at_10_diff1 value: 22.35560419280504 - type: nauc_recall_at_10_max value: 19.727247199595197 - type: nauc_recall_at_10_std value: 8.58498575109203 - type: nauc_recall_at_1_diff1 value: 38.67437644802124 - type: nauc_recall_at_1_max value: 14.52136658726491 - type: nauc_recall_at_1_std value: -2.8981666782088755 - type: nauc_recall_at_20_diff1 value: 19.026320886902916 - type: nauc_recall_at_20_max value: 22.753562309469867 - type: nauc_recall_at_20_std value: 14.89994263882445 - type: nauc_recall_at_3_diff1 value: 23.428129702129684 - type: nauc_recall_at_3_max value: 10.549153954790542 - type: nauc_recall_at_3_std value: -1.7590608997055206 - type: nauc_recall_at_5_diff1 value: 21.27448645803921 - type: nauc_recall_at_5_max value: 13.620279707461677 - type: nauc_recall_at_5_std value: 2.0577962208292675 - type: ndcg_at_1 value: 46.75 - type: ndcg_at_10 value: 34.827000000000005 - type: ndcg_at_100 value: 38.157999999999994 - type: ndcg_at_1000 value: 44.816 - type: ndcg_at_20 value: 34.152 - type: ndcg_at_3 value: 39.009 - type: ndcg_at_5 value: 36.826 - type: precision_at_1 value: 57.25 - type: precision_at_10 value: 27.575 - type: precision_at_100 value: 8.84 - type: precision_at_1000 value: 1.949 - type: precision_at_20 value: 20.724999999999998 - type: precision_at_3 value: 41.167 - type: precision_at_5 value: 35.199999999999996 - type: recall_at_1 value: 7.049999999999999 - type: recall_at_10 value: 19.817999999999998 - type: recall_at_100 value: 42.559999999999995 - type: recall_at_1000 value: 63.744 - type: recall_at_20 value: 25.968000000000004 - type: recall_at_3 value: 11.959 - type: recall_at_5 value: 14.939 - task: type: Retrieval dataset: name: MTEB FiQA-PL (default) type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: main_score value: 38.828 - type: map_at_1 value: 19.126 - type: map_at_10 value: 31.002000000000002 - type: map_at_100 value: 32.736 - type: map_at_1000 value: 32.933 - type: map_at_20 value: 31.894 - type: map_at_3 value: 26.583000000000002 - type: map_at_5 value: 28.904000000000003 - type: mrr_at_1 value: 37.808641975308646 - type: mrr_at_10 value: 46.36745541838134 - type: mrr_at_100 value: 47.14140915794908 - type: mrr_at_1000 value: 47.190701435388846 - type: mrr_at_20 value: 46.81387776440309 - type: mrr_at_3 value: 43.750000000000014 - type: mrr_at_5 value: 45.23919753086418 - type: nauc_map_at_1000_diff1 value: 38.5532285881503 - type: nauc_map_at_1000_max value: 34.44383884813453 - type: nauc_map_at_1000_std value: -1.3963497949476722 - type: nauc_map_at_100_diff1 value: 38.49292464176943 - type: nauc_map_at_100_max value: 34.33752755618645 - type: nauc_map_at_100_std value: -1.4794032905848582 - type: nauc_map_at_10_diff1 value: 38.26061536370962 - type: nauc_map_at_10_max value: 33.16977912721411 - type: nauc_map_at_10_std value: -2.3853370604730393 - type: nauc_map_at_1_diff1 value: 46.288767289528344 - type: nauc_map_at_1_max value: 25.67706785013364 - type: nauc_map_at_1_std value: -6.989769609924645 - type: nauc_map_at_20_diff1 value: 38.507270129330685 - type: nauc_map_at_20_max value: 33.70963328055982 - type: nauc_map_at_20_std value: -1.9835510011554272 - type: nauc_map_at_3_diff1 value: 39.81061518646884 - type: nauc_map_at_3_max value: 30.101186374147748 - type: nauc_map_at_3_std value: -4.027120247237715 - type: nauc_map_at_5_diff1 value: 38.55602589746512 - type: nauc_map_at_5_max value: 31.515174267015983 - type: nauc_map_at_5_std value: -3.4064239358570303 - type: nauc_mrr_at_1000_diff1 value: 45.030514454725726 - type: nauc_mrr_at_1000_max value: 43.878919881666164 - type: nauc_mrr_at_1000_std value: 2.517594250297626 - type: nauc_mrr_at_100_diff1 value: 45.00868212878687 - type: nauc_mrr_at_100_max value: 43.87437011120001 - type: nauc_mrr_at_100_std value: 2.5257874265014966 - type: nauc_mrr_at_10_diff1 value: 44.855044606754056 - type: nauc_mrr_at_10_max value: 43.946617058785186 - type: nauc_mrr_at_10_std value: 2.5173751662794044 - type: nauc_mrr_at_1_diff1 value: 49.441510997817346 - type: nauc_mrr_at_1_max value: 43.08547383044357 - type: nauc_mrr_at_1_std value: -1.8747770703324347 - type: nauc_mrr_at_20_diff1 value: 45.019880416584215 - type: nauc_mrr_at_20_max value: 43.85691473662242 - type: nauc_mrr_at_20_std value: 2.4625487605091303 - type: nauc_mrr_at_3_diff1 value: 45.322041658604036 - type: nauc_mrr_at_3_max value: 43.95079293074395 - type: nauc_mrr_at_3_std value: 2.4644274393435737 - type: nauc_mrr_at_5_diff1 value: 44.99461837803437 - type: nauc_mrr_at_5_max value: 43.97934275090601 - type: nauc_mrr_at_5_std value: 2.5353091695125096 - type: nauc_ndcg_at_1000_diff1 value: 39.38449023275524 - type: nauc_ndcg_at_1000_max value: 39.48382767312788 - type: nauc_ndcg_at_1000_std value: 3.414789408343409 - type: nauc_ndcg_at_100_diff1 value: 38.29675861135578 - type: nauc_ndcg_at_100_max value: 38.2674786507297 - type: nauc_ndcg_at_100_std value: 2.7094055381218207 - type: nauc_ndcg_at_10_diff1 value: 38.09514955708717 - type: nauc_ndcg_at_10_max value: 36.664923238906525 - type: nauc_ndcg_at_10_std value: 0.6901410544967921 - type: nauc_ndcg_at_1_diff1 value: 49.441510997817346 - type: nauc_ndcg_at_1_max value: 43.08547383044357 - type: nauc_ndcg_at_1_std value: -1.8747770703324347 - type: nauc_ndcg_at_20_diff1 value: 38.44967736231759 - type: nauc_ndcg_at_20_max value: 36.871179313622584 - type: nauc_ndcg_at_20_std value: 1.157560360065234 - type: nauc_ndcg_at_3_diff1 value: 39.02419271805571 - type: nauc_ndcg_at_3_max value: 37.447669442586324 - type: nauc_ndcg_at_3_std value: 0.41502589779297794 - type: nauc_ndcg_at_5_diff1 value: 38.10233452742001 - type: nauc_ndcg_at_5_max value: 35.816381905465676 - type: nauc_ndcg_at_5_std value: -0.3704499913387088 - type: nauc_precision_at_1000_diff1 value: 2.451267097838658 - type: nauc_precision_at_1000_max value: 29.116394969085306 - type: nauc_precision_at_1000_std value: 14.85900786538363 - type: nauc_precision_at_100_diff1 value: 8.10919082251277 - type: nauc_precision_at_100_max value: 36.28388256191417 - type: nauc_precision_at_100_std value: 14.830039904317657 - type: nauc_precision_at_10_diff1 value: 15.02446609920477 - type: nauc_precision_at_10_max value: 41.008463775454054 - type: nauc_precision_at_10_std value: 10.431403152334486 - type: nauc_precision_at_1_diff1 value: 49.441510997817346 - type: nauc_precision_at_1_max value: 43.08547383044357 - type: nauc_precision_at_1_std value: -1.8747770703324347 - type: nauc_precision_at_20_diff1 value: 14.222022201169926 - type: nauc_precision_at_20_max value: 40.10189643835305 - type: nauc_precision_at_20_std value: 12.204443815975527 - type: nauc_precision_at_3_diff1 value: 25.41905395341234 - type: nauc_precision_at_3_max value: 41.56133905339819 - type: nauc_precision_at_3_std value: 5.575516915590082 - type: nauc_precision_at_5_diff1 value: 20.20081221089351 - type: nauc_precision_at_5_max value: 40.95218555916681 - type: nauc_precision_at_5_std value: 7.2040745500708745 - type: nauc_recall_at_1000_diff1 value: 28.021198234033395 - type: nauc_recall_at_1000_max value: 36.165148684597504 - type: nauc_recall_at_1000_std value: 28.28852356008973 - type: nauc_recall_at_100_diff1 value: 21.882447802741897 - type: nauc_recall_at_100_max value: 26.979684607567222 - type: nauc_recall_at_100_std value: 9.783658817010082 - type: nauc_recall_at_10_diff1 value: 28.493097951178818 - type: nauc_recall_at_10_max value: 29.40937476550134 - type: nauc_recall_at_10_std value: 2.7593763576979353 - type: nauc_recall_at_1_diff1 value: 46.288767289528344 - type: nauc_recall_at_1_max value: 25.67706785013364 - type: nauc_recall_at_1_std value: -6.989769609924645 - type: nauc_recall_at_20_diff1 value: 27.638381299425234 - type: nauc_recall_at_20_max value: 27.942035836106328 - type: nauc_recall_at_20_std value: 3.489835161380808 - type: nauc_recall_at_3_diff1 value: 33.90054781392646 - type: nauc_recall_at_3_max value: 27.778812533030322 - type: nauc_recall_at_3_std value: -0.03054068020022706 - type: nauc_recall_at_5_diff1 value: 30.279060732221346 - type: nauc_recall_at_5_max value: 27.49854749597931 - type: nauc_recall_at_5_std value: 0.5434664581939099 - type: ndcg_at_1 value: 37.809 - type: ndcg_at_10 value: 38.828 - type: ndcg_at_100 value: 45.218 - type: ndcg_at_1000 value: 48.510999999999996 - type: ndcg_at_20 value: 41.11 - type: ndcg_at_3 value: 34.466 - type: ndcg_at_5 value: 35.843 - type: precision_at_1 value: 37.809 - type: precision_at_10 value: 11.157 - type: precision_at_100 value: 1.762 - type: precision_at_1000 value: 0.233 - type: precision_at_20 value: 6.497 - type: precision_at_3 value: 23.044999999999998 - type: precision_at_5 value: 17.284 - type: recall_at_1 value: 19.126 - type: recall_at_10 value: 46.062 - type: recall_at_100 value: 70.22800000000001 - type: recall_at_1000 value: 89.803 - type: recall_at_20 value: 53.217999999999996 - type: recall_at_3 value: 30.847 - type: recall_at_5 value: 37.11 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL (default) type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: main_score value: 60.27 - type: map_at_1 value: 35.199000000000005 - type: map_at_10 value: 51.369 - type: map_at_100 value: 52.212 - type: map_at_1000 value: 52.28 - type: map_at_20 value: 51.864 - type: map_at_3 value: 48.446 - type: map_at_5 value: 50.302 - type: mrr_at_1 value: 70.39837947332883 - type: mrr_at_10 value: 76.8346141067273 - type: mrr_at_100 value: 77.10724392048137 - type: mrr_at_1000 value: 77.12037412892865 - type: mrr_at_20 value: 77.01061532947222 - type: mrr_at_3 value: 75.5908170155299 - type: mrr_at_5 value: 76.39095205941899 - type: nauc_map_at_1000_diff1 value: 24.701387884989117 - type: nauc_map_at_1000_max value: 23.25553235642178 - type: nauc_map_at_1000_std value: 7.1803506915661774 - type: nauc_map_at_100_diff1 value: 24.674498622483103 - type: nauc_map_at_100_max value: 23.234948525052175 - type: nauc_map_at_100_std value: 7.168677997105447 - type: nauc_map_at_10_diff1 value: 24.676025039755626 - type: nauc_map_at_10_max value: 23.171971872726964 - type: nauc_map_at_10_std value: 6.485610909852058 - type: nauc_map_at_1_diff1 value: 68.90178464319715 - type: nauc_map_at_1_max value: 46.05537868917558 - type: nauc_map_at_1_std value: 1.7658552480698708 - type: nauc_map_at_20_diff1 value: 24.69297151842494 - type: nauc_map_at_20_max value: 23.213064691673637 - type: nauc_map_at_20_std value: 6.9357946556849 - type: nauc_map_at_3_diff1 value: 26.279128947950507 - type: nauc_map_at_3_max value: 23.929537354117922 - type: nauc_map_at_3_std value: 4.625061565714759 - type: nauc_map_at_5_diff1 value: 25.04448959482816 - type: nauc_map_at_5_max value: 23.432012857899338 - type: nauc_map_at_5_std value: 5.845744681998008 - type: nauc_mrr_at_1000_diff1 value: 66.7503918108276 - type: nauc_mrr_at_1000_max value: 48.42897342336844 - type: nauc_mrr_at_1000_std value: 5.3097517971144415 - type: nauc_mrr_at_100_diff1 value: 66.74645215862695 - type: nauc_mrr_at_100_max value: 48.4368663009989 - type: nauc_mrr_at_100_std value: 5.322297898555188 - type: nauc_mrr_at_10_diff1 value: 66.69310166180729 - type: nauc_mrr_at_10_max value: 48.475437698330225 - type: nauc_mrr_at_10_std value: 5.258183461631702 - type: nauc_mrr_at_1_diff1 value: 68.90178464319715 - type: nauc_mrr_at_1_max value: 46.05537868917558 - type: nauc_mrr_at_1_std value: 1.7658552480698708 - type: nauc_mrr_at_20_diff1 value: 66.72000262431975 - type: nauc_mrr_at_20_max value: 48.45593642981319 - type: nauc_mrr_at_20_std value: 5.353665929072101 - type: nauc_mrr_at_3_diff1 value: 66.84936676396276 - type: nauc_mrr_at_3_max value: 48.466611276778295 - type: nauc_mrr_at_3_std value: 4.485810398557475 - type: nauc_mrr_at_5_diff1 value: 66.62362565394174 - type: nauc_mrr_at_5_max value: 48.456431835482014 - type: nauc_mrr_at_5_std value: 5.08482458391903 - type: nauc_ndcg_at_1000_diff1 value: 29.984825173719443 - type: nauc_ndcg_at_1000_max value: 27.289179238639893 - type: nauc_ndcg_at_1000_std value: 10.661480455527526 - type: nauc_ndcg_at_100_diff1 value: 29.322074257047877 - type: nauc_ndcg_at_100_max value: 26.850650276220605 - type: nauc_ndcg_at_100_std value: 10.599247982501902 - type: nauc_ndcg_at_10_diff1 value: 29.659909113886094 - type: nauc_ndcg_at_10_max value: 26.836139599331005 - type: nauc_ndcg_at_10_std value: 8.12844399452719 - type: nauc_ndcg_at_1_diff1 value: 68.90178464319715 - type: nauc_ndcg_at_1_max value: 46.05537868917558 - type: nauc_ndcg_at_1_std value: 1.7658552480698708 - type: nauc_ndcg_at_20_diff1 value: 29.510802214854294 - type: nauc_ndcg_at_20_max value: 26.775562637730722 - type: nauc_ndcg_at_20_std value: 9.341342661702363 - type: nauc_ndcg_at_3_diff1 value: 32.741885846292966 - type: nauc_ndcg_at_3_max value: 28.44225108761343 - type: nauc_ndcg_at_3_std value: 5.204440768465042 - type: nauc_ndcg_at_5_diff1 value: 30.57856348635919 - type: nauc_ndcg_at_5_max value: 27.475007474301698 - type: nauc_ndcg_at_5_std value: 6.961546044312487 - type: nauc_precision_at_1000_diff1 value: 0.002113156309413332 - type: nauc_precision_at_1000_max value: 11.198242419541286 - type: nauc_precision_at_1000_std value: 28.69676419166541 - type: nauc_precision_at_100_diff1 value: 3.6049575557782627 - type: nauc_precision_at_100_max value: 12.499173524574791 - type: nauc_precision_at_100_std value: 23.3755281004721 - type: nauc_precision_at_10_diff1 value: 10.922574784853193 - type: nauc_precision_at_10_max value: 16.23221529562036 - type: nauc_precision_at_10_std value: 12.45014808813857 - type: nauc_precision_at_1_diff1 value: 68.90178464319715 - type: nauc_precision_at_1_max value: 46.05537868917558 - type: nauc_precision_at_1_std value: 1.7658552480698708 - type: nauc_precision_at_20_diff1 value: 8.840710781302827 - type: nauc_precision_at_20_max value: 14.804644554205524 - type: nauc_precision_at_20_std value: 16.245009770815237 - type: nauc_precision_at_3_diff1 value: 19.447291487137573 - type: nauc_precision_at_3_max value: 21.47123471597057 - type: nauc_precision_at_3_std value: 6.441862800128802 - type: nauc_precision_at_5_diff1 value: 14.078545719721108 - type: nauc_precision_at_5_max value: 18.468288046016387 - type: nauc_precision_at_5_std value: 9.58650641691393 - type: nauc_recall_at_1000_diff1 value: 0.0021131563095336584 - type: nauc_recall_at_1000_max value: 11.198242419541558 - type: nauc_recall_at_1000_std value: 28.6967641916655 - type: nauc_recall_at_100_diff1 value: 3.6049575557781393 - type: nauc_recall_at_100_max value: 12.499173524574765 - type: nauc_recall_at_100_std value: 23.375528100472074 - type: nauc_recall_at_10_diff1 value: 10.922574784853168 - type: nauc_recall_at_10_max value: 16.2322152956203 - type: nauc_recall_at_10_std value: 12.450148088138535 - type: nauc_recall_at_1_diff1 value: 68.90178464319715 - type: nauc_recall_at_1_max value: 46.05537868917558 - type: nauc_recall_at_1_std value: 1.7658552480698708 - type: nauc_recall_at_20_diff1 value: 8.840710781302905 - type: nauc_recall_at_20_max value: 14.804644554205515 - type: nauc_recall_at_20_std value: 16.245009770815273 - type: nauc_recall_at_3_diff1 value: 19.447291487137498 - type: nauc_recall_at_3_max value: 21.47123471597054 - type: nauc_recall_at_3_std value: 6.441862800128763 - type: nauc_recall_at_5_diff1 value: 14.07854571972115 - type: nauc_recall_at_5_max value: 18.468288046016337 - type: nauc_recall_at_5_std value: 9.586506416913904 - type: ndcg_at_1 value: 70.39800000000001 - type: ndcg_at_10 value: 60.27 - type: ndcg_at_100 value: 63.400999999999996 - type: ndcg_at_1000 value: 64.847 - type: ndcg_at_20 value: 61.571 - type: ndcg_at_3 value: 55.875 - type: ndcg_at_5 value: 58.36599999999999 - type: precision_at_1 value: 70.39800000000001 - type: precision_at_10 value: 12.46 - type: precision_at_100 value: 1.493 - type: precision_at_1000 value: 0.169 - type: precision_at_20 value: 6.65 - type: precision_at_3 value: 35.062 - type: precision_at_5 value: 23.009 - type: recall_at_1 value: 35.199000000000005 - type: recall_at_10 value: 62.302 - type: recall_at_100 value: 74.666 - type: recall_at_1000 value: 84.355 - type: recall_at_20 value: 66.496 - type: recall_at_3 value: 52.593 - type: recall_at_5 value: 57.522 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL (default) type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: main_score value: 64.886 - type: map_at_1 value: 1.644 - type: map_at_10 value: 12.24 - type: map_at_100 value: 28.248 - type: map_at_1000 value: 33.506 - type: map_at_20 value: 17.497 - type: map_at_3 value: 4.9399999999999995 - type: map_at_5 value: 8.272 - type: mrr_at_1 value: 83.72093023255815 - type: mrr_at_10 value: 91.08527131782945 - type: mrr_at_100 value: 91.08527131782945 - type: mrr_at_1000 value: 91.08527131782945 - type: mrr_at_20 value: 91.08527131782945 - type: mrr_at_3 value: 91.08527131782945 - type: mrr_at_5 value: 91.08527131782945 - type: nauc_map_at_1000_diff1 value: -36.428271627303424 - type: nauc_map_at_1000_max value: 44.87615127218638 - type: nauc_map_at_1000_std value: 67.92696808824724 - type: nauc_map_at_100_diff1 value: -28.11674206786188 - type: nauc_map_at_100_max value: 36.422779766334955 - type: nauc_map_at_100_std value: 49.99876313755116 - type: nauc_map_at_10_diff1 value: -5.838593619806058 - type: nauc_map_at_10_max value: 11.026519190509742 - type: nauc_map_at_10_std value: 2.5268752263522045 - type: nauc_map_at_1_diff1 value: 17.897907271073016 - type: nauc_map_at_1_max value: 12.229062762540844 - type: nauc_map_at_1_std value: -4.088830895573149 - type: nauc_map_at_20_diff1 value: -13.871097716255626 - type: nauc_map_at_20_max value: 19.291271635609533 - type: nauc_map_at_20_std value: 16.745335606507826 - type: nauc_map_at_3_diff1 value: 4.425238457033843 - type: nauc_map_at_3_max value: 4.611864744680824 - type: nauc_map_at_3_std value: -8.986916608582863 - type: nauc_map_at_5_diff1 value: -6.254849256920095 - type: nauc_map_at_5_max value: 2.729437079919823 - type: nauc_map_at_5_std value: -7.235906279913092 - type: nauc_mrr_at_1000_diff1 value: 52.18669104947672 - type: nauc_mrr_at_1000_max value: 68.26259125411818 - type: nauc_mrr_at_1000_std value: 56.345086428353575 - type: nauc_mrr_at_100_diff1 value: 52.18669104947672 - type: nauc_mrr_at_100_max value: 68.26259125411818 - type: nauc_mrr_at_100_std value: 56.345086428353575 - type: nauc_mrr_at_10_diff1 value: 52.18669104947672 - type: nauc_mrr_at_10_max value: 68.26259125411818 - type: nauc_mrr_at_10_std value: 56.345086428353575 - type: nauc_mrr_at_1_diff1 value: 56.55126663944154 - type: nauc_mrr_at_1_max value: 66.37014285522565 - type: nauc_mrr_at_1_std value: 53.2508271389779 - type: nauc_mrr_at_20_diff1 value: 52.18669104947672 - type: nauc_mrr_at_20_max value: 68.26259125411818 - type: nauc_mrr_at_20_std value: 56.345086428353575 - type: nauc_mrr_at_3_diff1 value: 52.18669104947672 - type: nauc_mrr_at_3_max value: 68.26259125411818 - type: nauc_mrr_at_3_std value: 56.345086428353575 - type: nauc_mrr_at_5_diff1 value: 52.18669104947672 - type: nauc_mrr_at_5_max value: 68.26259125411818 - type: nauc_mrr_at_5_std value: 56.345086428353575 - type: nauc_ndcg_at_1000_diff1 value: -19.06422926483731 - type: nauc_ndcg_at_1000_max value: 56.30853514590265 - type: nauc_ndcg_at_1000_std value: 70.30810947505557 - type: nauc_ndcg_at_100_diff1 value: -25.72587586459692 - type: nauc_ndcg_at_100_max value: 51.433781241604194 - type: nauc_ndcg_at_100_std value: 68.37678512652792 - type: nauc_ndcg_at_10_diff1 value: -23.21198108212602 - type: nauc_ndcg_at_10_max value: 43.5450720846516 - type: nauc_ndcg_at_10_std value: 48.78307907005605 - type: nauc_ndcg_at_1_diff1 value: 44.00179301267447 - type: nauc_ndcg_at_1_max value: 48.202370455680395 - type: nauc_ndcg_at_1_std value: 25.69655992704088 - type: nauc_ndcg_at_20_diff1 value: -33.88168753446507 - type: nauc_ndcg_at_20_max value: 45.16199742613164 - type: nauc_ndcg_at_20_std value: 61.87098383164902 - type: nauc_ndcg_at_3_diff1 value: 11.19174449544048 - type: nauc_ndcg_at_3_max value: 44.34069860560555 - type: nauc_ndcg_at_3_std value: 27.451258369798115 - type: nauc_ndcg_at_5_diff1 value: -7.186520929432436 - type: nauc_ndcg_at_5_max value: 43.41869981139378 - type: nauc_ndcg_at_5_std value: 34.89898115995178 - type: nauc_precision_at_1000_diff1 value: -34.43998154563451 - type: nauc_precision_at_1000_max value: 29.172655907480372 - type: nauc_precision_at_1000_std value: 65.15824469614837 - type: nauc_precision_at_100_diff1 value: -37.82409643259692 - type: nauc_precision_at_100_max value: 38.24986991317909 - type: nauc_precision_at_100_std value: 72.74768183105327 - type: nauc_precision_at_10_diff1 value: -32.21556182780535 - type: nauc_precision_at_10_max value: 34.27170432382651 - type: nauc_precision_at_10_std value: 58.358255004394664 - type: nauc_precision_at_1_diff1 value: 56.55126663944154 - type: nauc_precision_at_1_max value: 66.37014285522565 - type: nauc_precision_at_1_std value: 53.2508271389779 - type: nauc_precision_at_20_diff1 value: -40.18751579026395 - type: nauc_precision_at_20_max value: 33.960783153758896 - type: nauc_precision_at_20_std value: 65.42918390184195 - type: nauc_precision_at_3_diff1 value: -7.073870209006578 - type: nauc_precision_at_3_max value: 50.81535269862325 - type: nauc_precision_at_3_std value: 59.248681565955685 - type: nauc_precision_at_5_diff1 value: -31.136580596983876 - type: nauc_precision_at_5_max value: 45.88147792380426 - type: nauc_precision_at_5_std value: 67.46814230928243 - type: nauc_recall_at_1000_diff1 value: -23.15699999594577 - type: nauc_recall_at_1000_max value: 39.77277799761876 - type: nauc_recall_at_1000_std value: 60.326168012901114 - type: nauc_recall_at_100_diff1 value: -21.636664823598498 - type: nauc_recall_at_100_max value: 31.104969346131583 - type: nauc_recall_at_100_std value: 38.811686891592096 - type: nauc_recall_at_10_diff1 value: -10.542765625053569 - type: nauc_recall_at_10_max value: 2.043876058107446 - type: nauc_recall_at_10_std value: -5.578449908984766 - type: nauc_recall_at_1_diff1 value: 17.897907271073016 - type: nauc_recall_at_1_max value: 12.229062762540844 - type: nauc_recall_at_1_std value: -4.088830895573149 - type: nauc_recall_at_20_diff1 value: -15.132909355710103 - type: nauc_recall_at_20_max value: 12.659765287241065 - type: nauc_recall_at_20_std value: 8.277887800815819 - type: nauc_recall_at_3_diff1 value: -3.1975017812715016 - type: nauc_recall_at_3_max value: -3.5539857085038538 - type: nauc_recall_at_3_std value: -14.712102851318118 - type: nauc_recall_at_5_diff1 value: -14.040507717380743 - type: nauc_recall_at_5_max value: -6.126912150131701 - type: nauc_recall_at_5_std value: -13.821624015640355 - type: ndcg_at_1 value: 71.318 - type: ndcg_at_10 value: 64.886 - type: ndcg_at_100 value: 53.187 - type: ndcg_at_1000 value: 59.897999999999996 - type: ndcg_at_20 value: 58.96 - type: ndcg_at_3 value: 69.736 - type: ndcg_at_5 value: 70.14099999999999 - type: precision_at_1 value: 83.721 - type: precision_at_10 value: 71.163 - type: precision_at_100 value: 29.465000000000003 - type: precision_at_1000 value: 5.665 - type: precision_at_20 value: 57.791000000000004 - type: precision_at_3 value: 82.171 - type: precision_at_5 value: 81.86 - type: recall_at_1 value: 1.644 - type: recall_at_10 value: 14.238000000000001 - type: recall_at_100 value: 39.831 - type: recall_at_1000 value: 64.057 - type: recall_at_20 value: 21.021 - type: recall_at_3 value: 5.53 - type: recall_at_5 value: 9.623 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL (default) type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: main_score value: 31.391000000000002 - type: map_at_1 value: 4.163 - type: map_at_10 value: 10.744 - type: map_at_100 value: 14.038999999999998 - type: map_at_1000 value: 15.434999999999999 - type: map_at_20 value: 12.16 - type: map_at_3 value: 7.614999999999999 - type: map_at_5 value: 9.027000000000001 - type: mrr_at_1 value: 39.0092879256966 - type: mrr_at_10 value: 48.69809327239668 - type: mrr_at_100 value: 49.20788148442068 - type: mrr_at_1000 value: 49.25509336494706 - type: mrr_at_20 value: 48.99606551850896 - type: mrr_at_3 value: 46.284829721362236 - type: mrr_at_5 value: 47.77089783281735 - type: nauc_map_at_1000_diff1 value: 22.75421477116417 - type: nauc_map_at_1000_max value: 49.242283787799046 - type: nauc_map_at_1000_std value: 29.056888272331832 - type: nauc_map_at_100_diff1 value: 23.585977398585594 - type: nauc_map_at_100_max value: 48.25845199409498 - type: nauc_map_at_100_std value: 24.944264511223693 - type: nauc_map_at_10_diff1 value: 27.386613094780255 - type: nauc_map_at_10_max value: 41.52415346691586 - type: nauc_map_at_10_std value: 12.93872448563755 - type: nauc_map_at_1_diff1 value: 46.78688143865053 - type: nauc_map_at_1_max value: 37.20408843995871 - type: nauc_map_at_1_std value: 4.383444959401098 - type: nauc_map_at_20_diff1 value: 25.590969047740288 - type: nauc_map_at_20_max value: 44.57109307999418 - type: nauc_map_at_20_std value: 16.45855141821407 - type: nauc_map_at_3_diff1 value: 36.30017108362863 - type: nauc_map_at_3_max value: 34.66149613991648 - type: nauc_map_at_3_std value: 5.67985905078467 - type: nauc_map_at_5_diff1 value: 31.157644795417223 - type: nauc_map_at_5_max value: 37.274738661636825 - type: nauc_map_at_5_std value: 8.70088872394168 - type: nauc_mrr_at_1000_diff1 value: 25.638564218157384 - type: nauc_mrr_at_1000_max value: 57.77788270285353 - type: nauc_mrr_at_1000_std value: 43.507586592911274 - type: nauc_mrr_at_100_diff1 value: 25.662002580561584 - type: nauc_mrr_at_100_max value: 57.80578394278584 - type: nauc_mrr_at_100_std value: 43.543905743986635 - type: nauc_mrr_at_10_diff1 value: 25.426034796339835 - type: nauc_mrr_at_10_max value: 57.68443186258669 - type: nauc_mrr_at_10_std value: 43.438009108331215 - type: nauc_mrr_at_1_diff1 value: 26.073028156311075 - type: nauc_mrr_at_1_max value: 52.11817916720053 - type: nauc_mrr_at_1_std value: 37.41073893153695 - type: nauc_mrr_at_20_diff1 value: 25.548645553336147 - type: nauc_mrr_at_20_max value: 57.78552760401915 - type: nauc_mrr_at_20_std value: 43.521687428822325 - type: nauc_mrr_at_3_diff1 value: 25.72662577397805 - type: nauc_mrr_at_3_max value: 56.891263536265605 - type: nauc_mrr_at_3_std value: 41.384872305390104 - type: nauc_mrr_at_5_diff1 value: 25.552211551655386 - type: nauc_mrr_at_5_max value: 57.976813828353926 - type: nauc_mrr_at_5_std value: 43.504564461855544 - type: nauc_ndcg_at_1000_diff1 value: 23.456158044182757 - type: nauc_ndcg_at_1000_max value: 60.05411773552709 - type: nauc_ndcg_at_1000_std value: 47.857510017262584 - type: nauc_ndcg_at_100_diff1 value: 19.711635700390772 - type: nauc_ndcg_at_100_max value: 56.178746740470665 - type: nauc_ndcg_at_100_std value: 42.36829180286942 - type: nauc_ndcg_at_10_diff1 value: 18.364428967788413 - type: nauc_ndcg_at_10_max value: 54.38372506578223 - type: nauc_ndcg_at_10_std value: 41.75765411340369 - type: nauc_ndcg_at_1_diff1 value: 26.571093272640773 - type: nauc_ndcg_at_1_max value: 51.061788341958284 - type: nauc_ndcg_at_1_std value: 36.514987974075986 - type: nauc_ndcg_at_20_diff1 value: 18.345487193027697 - type: nauc_ndcg_at_20_max value: 54.62621882656994 - type: nauc_ndcg_at_20_std value: 41.42835554714241 - type: nauc_ndcg_at_3_diff1 value: 23.260105658139025 - type: nauc_ndcg_at_3_max value: 52.07747385334546 - type: nauc_ndcg_at_3_std value: 36.91985577837284 - type: nauc_ndcg_at_5_diff1 value: 20.40428109665566 - type: nauc_ndcg_at_5_max value: 53.52015347884604 - type: nauc_ndcg_at_5_std value: 39.46008849580017 - type: nauc_precision_at_1000_diff1 value: -7.3487344916380035 - type: nauc_precision_at_1000_max value: 16.58045221394852 - type: nauc_precision_at_1000_std value: 38.94030932397075 - type: nauc_precision_at_100_diff1 value: -5.257743986683922 - type: nauc_precision_at_100_max value: 34.43071687475306 - type: nauc_precision_at_100_std value: 53.499519170670474 - type: nauc_precision_at_10_diff1 value: 2.385136433119139 - type: nauc_precision_at_10_max value: 47.210743878631064 - type: nauc_precision_at_10_std value: 47.22767704186548 - type: nauc_precision_at_1_diff1 value: 26.073028156311075 - type: nauc_precision_at_1_max value: 52.11817916720053 - type: nauc_precision_at_1_std value: 37.41073893153695 - type: nauc_precision_at_20_diff1 value: -0.3531531127238474 - type: nauc_precision_at_20_max value: 44.78044604856974 - type: nauc_precision_at_20_std value: 49.532804150743615 - type: nauc_precision_at_3_diff1 value: 15.350050569991447 - type: nauc_precision_at_3_max value: 51.01572315596549 - type: nauc_precision_at_3_std value: 38.801125728413155 - type: nauc_precision_at_5_diff1 value: 9.109003666144694 - type: nauc_precision_at_5_max value: 50.935269774898494 - type: nauc_precision_at_5_std value: 43.323548180559676 - type: nauc_recall_at_1000_diff1 value: 16.64743647648886 - type: nauc_recall_at_1000_max value: 38.46012283772285 - type: nauc_recall_at_1000_std value: 36.02016164796441 - type: nauc_recall_at_100_diff1 value: 14.005834785186744 - type: nauc_recall_at_100_max value: 37.70026105513647 - type: nauc_recall_at_100_std value: 27.085222642129697 - type: nauc_recall_at_10_diff1 value: 21.204106627422632 - type: nauc_recall_at_10_max value: 36.737624881893424 - type: nauc_recall_at_10_std value: 13.755054514272702 - type: nauc_recall_at_1_diff1 value: 46.78688143865053 - type: nauc_recall_at_1_max value: 37.20408843995871 - type: nauc_recall_at_1_std value: 4.383444959401098 - type: nauc_recall_at_20_diff1 value: 19.740977611421933 - type: nauc_recall_at_20_max value: 39.21908969539783 - type: nauc_recall_at_20_std value: 16.560269670318494 - type: nauc_recall_at_3_diff1 value: 32.189359545367815 - type: nauc_recall_at_3_max value: 31.693634445562758 - type: nauc_recall_at_3_std value: 6.246326281543587 - type: nauc_recall_at_5_diff1 value: 25.51586860499901 - type: nauc_recall_at_5_max value: 33.15934725342885 - type: nauc_recall_at_5_std value: 9.677778511696705 - type: ndcg_at_1 value: 37.307 - type: ndcg_at_10 value: 31.391000000000002 - type: ndcg_at_100 value: 28.877999999999997 - type: ndcg_at_1000 value: 37.16 - type: ndcg_at_20 value: 29.314 - type: ndcg_at_3 value: 35.405 - type: ndcg_at_5 value: 33.922999999999995 - type: precision_at_1 value: 39.009 - type: precision_at_10 value: 24.52 - type: precision_at_100 value: 7.703 - type: precision_at_1000 value: 2.04 - type: precision_at_20 value: 18.08 - type: precision_at_3 value: 34.469 - type: precision_at_5 value: 30.712 - type: recall_at_1 value: 4.163 - type: recall_at_10 value: 15.015999999999998 - type: recall_at_100 value: 30.606 - type: recall_at_1000 value: 59.606 - type: recall_at_20 value: 19.09 - type: recall_at_3 value: 9.139 - type: recall_at_5 value: 11.477 - task: type: Retrieval dataset: name: MTEB NQ-PL (default) type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: main_score value: 54.017 - type: map_at_1 value: 34.193 - type: map_at_10 value: 47.497 - type: map_at_100 value: 48.441 - type: map_at_1000 value: 48.481 - type: map_at_20 value: 48.093 - type: map_at_3 value: 44.017 - type: map_at_5 value: 46.111000000000004 - type: mrr_at_1 value: 37.949015063731174 - type: mrr_at_10 value: 49.915772315105954 - type: mrr_at_100 value: 50.62841255829997 - type: mrr_at_1000 value: 50.656773027666745 - type: mrr_at_20 value: 50.37785276657083 - type: mrr_at_3 value: 46.98725376593267 - type: mrr_at_5 value: 48.763035921205066 - type: nauc_map_at_1000_diff1 value: 39.5632191792873 - type: nauc_map_at_1000_max value: 37.4728247053629 - type: nauc_map_at_1000_std value: 5.742498414663762 - type: nauc_map_at_100_diff1 value: 39.555570352061906 - type: nauc_map_at_100_max value: 37.497880976847334 - type: nauc_map_at_100_std value: 5.7798021019465375 - type: nauc_map_at_10_diff1 value: 39.5423723444454 - type: nauc_map_at_10_max value: 37.41661971723365 - type: nauc_map_at_10_std value: 5.2378002164144695 - type: nauc_map_at_1_diff1 value: 41.52697034146981 - type: nauc_map_at_1_max value: 28.558995576942863 - type: nauc_map_at_1_std value: 0.13094542859192052 - type: nauc_map_at_20_diff1 value: 39.55484628943701 - type: nauc_map_at_20_max value: 37.5247794933719 - type: nauc_map_at_20_std value: 5.702881342279231 - type: nauc_map_at_3_diff1 value: 39.949323925425325 - type: nauc_map_at_3_max value: 35.770298168901924 - type: nauc_map_at_3_std value: 2.9127112432479874 - type: nauc_map_at_5_diff1 value: 39.768310617004545 - type: nauc_map_at_5_max value: 37.1549191664796 - type: nauc_map_at_5_std value: 4.4681285748269515 - type: nauc_mrr_at_1000_diff1 value: 39.14001746706457 - type: nauc_mrr_at_1000_max value: 37.477376518267775 - type: nauc_mrr_at_1000_std value: 6.8088891531621565 - type: nauc_mrr_at_100_diff1 value: 39.13054707413684 - type: nauc_mrr_at_100_max value: 37.498126443766274 - type: nauc_mrr_at_100_std value: 6.839411380129971 - type: nauc_mrr_at_10_diff1 value: 39.09764730048156 - type: nauc_mrr_at_10_max value: 37.58593798217306 - type: nauc_mrr_at_10_std value: 6.713795164982413 - type: nauc_mrr_at_1_diff1 value: 41.581599918664075 - type: nauc_mrr_at_1_max value: 31.500589231378722 - type: nauc_mrr_at_1_std value: 2.059116370339438 - type: nauc_mrr_at_20_diff1 value: 39.09011023988447 - type: nauc_mrr_at_20_max value: 37.55856008791344 - type: nauc_mrr_at_20_std value: 6.847165397615844 - type: nauc_mrr_at_3_diff1 value: 39.382542043738 - type: nauc_mrr_at_3_max value: 36.49265363659468 - type: nauc_mrr_at_3_std value: 4.759157976438336 - type: nauc_mrr_at_5_diff1 value: 39.304826333759976 - type: nauc_mrr_at_5_max value: 37.46326016736024 - type: nauc_mrr_at_5_std value: 6.122608305766621 - type: nauc_ndcg_at_1000_diff1 value: 38.568500038453266 - type: nauc_ndcg_at_1000_max value: 39.799710882413166 - type: nauc_ndcg_at_1000_std value: 9.357010223096639 - type: nauc_ndcg_at_100_diff1 value: 38.38026091343228 - type: nauc_ndcg_at_100_max value: 40.48398173542486 - type: nauc_ndcg_at_100_std value: 10.373054013302214 - type: nauc_ndcg_at_10_diff1 value: 38.27340980909964 - type: nauc_ndcg_at_10_max value: 40.35241649744093 - type: nauc_ndcg_at_10_std value: 8.579139930345168 - type: nauc_ndcg_at_1_diff1 value: 41.581599918664075 - type: nauc_ndcg_at_1_max value: 31.500589231378722 - type: nauc_ndcg_at_1_std value: 2.059116370339438 - type: nauc_ndcg_at_20_diff1 value: 38.26453028884807 - type: nauc_ndcg_at_20_max value: 40.70517858426641 - type: nauc_ndcg_at_20_std value: 9.987693876137905 - type: nauc_ndcg_at_3_diff1 value: 39.2078971733273 - type: nauc_ndcg_at_3_max value: 37.48672195565316 - type: nauc_ndcg_at_3_std value: 4.051464994659221 - type: nauc_ndcg_at_5_diff1 value: 38.883693595665285 - type: nauc_ndcg_at_5_max value: 39.763115634437135 - type: nauc_ndcg_at_5_std value: 6.738980451582073 - type: nauc_precision_at_1000_diff1 value: -7.223215910619012 - type: nauc_precision_at_1000_max value: 13.075844604892161 - type: nauc_precision_at_1000_std value: 19.864336920890107 - type: nauc_precision_at_100_diff1 value: 1.3305994810812418 - type: nauc_precision_at_100_max value: 25.9219108557104 - type: nauc_precision_at_100_std value: 27.5076605928207 - type: nauc_precision_at_10_diff1 value: 18.441551484970326 - type: nauc_precision_at_10_max value: 39.85995330437054 - type: nauc_precision_at_10_std value: 20.561269077428914 - type: nauc_precision_at_1_diff1 value: 41.581599918664075 - type: nauc_precision_at_1_max value: 31.500589231378722 - type: nauc_precision_at_1_std value: 2.059116370339438 - type: nauc_precision_at_20_diff1 value: 12.579593891480531 - type: nauc_precision_at_20_max value: 36.620221830588775 - type: nauc_precision_at_20_std value: 26.40364876775059 - type: nauc_precision_at_3_diff1 value: 30.158859294487073 - type: nauc_precision_at_3_max value: 41.168215766389174 - type: nauc_precision_at_3_std value: 9.44345004450809 - type: nauc_precision_at_5_diff1 value: 25.438624678672785 - type: nauc_precision_at_5_max value: 42.72802023518524 - type: nauc_precision_at_5_std value: 15.357657388511099 - type: nauc_recall_at_1000_diff1 value: 24.987564782718003 - type: nauc_recall_at_1000_max value: 70.508416373353 - type: nauc_recall_at_1000_std value: 69.75092280398808 - type: nauc_recall_at_100_diff1 value: 29.504202856421397 - type: nauc_recall_at_100_max value: 63.41356585545318 - type: nauc_recall_at_100_std value: 50.09250954437847 - type: nauc_recall_at_10_diff1 value: 32.355776022971774 - type: nauc_recall_at_10_max value: 49.47121901667283 - type: nauc_recall_at_10_std value: 19.418439406631244 - type: nauc_recall_at_1_diff1 value: 41.52697034146981 - type: nauc_recall_at_1_max value: 28.558995576942863 - type: nauc_recall_at_1_std value: 0.13094542859192052 - type: nauc_recall_at_20_diff1 value: 31.57334731023589 - type: nauc_recall_at_20_max value: 54.06567225197383 - type: nauc_recall_at_20_std value: 29.222029720570468 - type: nauc_recall_at_3_diff1 value: 36.45033533275773 - type: nauc_recall_at_3_max value: 40.39529713780803 - type: nauc_recall_at_3_std value: 5.21893897772794 - type: nauc_recall_at_5_diff1 value: 35.18471678478859 - type: nauc_recall_at_5_max value: 46.20100816867823 - type: nauc_recall_at_5_std value: 11.94481894633221 - type: ndcg_at_1 value: 37.949 - type: ndcg_at_10 value: 54.017 - type: ndcg_at_100 value: 58.126 - type: ndcg_at_1000 value: 59.073 - type: ndcg_at_20 value: 55.928 - type: ndcg_at_3 value: 47.494 - type: ndcg_at_5 value: 50.975 - type: precision_at_1 value: 37.949 - type: precision_at_10 value: 8.450000000000001 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.117 - type: precision_at_20 value: 4.689 - type: precision_at_3 value: 21.051000000000002 - type: precision_at_5 value: 14.664 - type: recall_at_1 value: 34.193 - type: recall_at_10 value: 71.357 - type: recall_at_100 value: 89.434 - type: recall_at_1000 value: 96.536 - type: recall_at_20 value: 78.363 - type: recall_at_3 value: 54.551 - type: recall_at_5 value: 62.543000000000006 - task: type: Retrieval dataset: name: MTEB Quora-PL (default) type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: main_score value: 84.114 - type: map_at_1 value: 65.848 - type: map_at_10 value: 79.85900000000001 - type: map_at_100 value: 80.582 - type: map_at_1000 value: 80.60300000000001 - type: map_at_20 value: 80.321 - type: map_at_3 value: 76.741 - type: map_at_5 value: 78.72200000000001 - type: mrr_at_1 value: 75.97 - type: mrr_at_10 value: 83.04630158730119 - type: mrr_at_100 value: 83.22785731032968 - type: mrr_at_1000 value: 83.23123717623899 - type: mrr_at_20 value: 83.17412021320565 - type: mrr_at_3 value: 81.83333333333287 - type: mrr_at_5 value: 82.61933333333275 - type: nauc_map_at_1000_diff1 value: 73.26316553371083 - type: nauc_map_at_1000_max value: 27.92567859085245 - type: nauc_map_at_1000_std value: -47.477909533360446 - type: nauc_map_at_100_diff1 value: 73.2690602807223 - type: nauc_map_at_100_max value: 27.915868327849996 - type: nauc_map_at_100_std value: -47.525777766107595 - type: nauc_map_at_10_diff1 value: 73.45464428464894 - type: nauc_map_at_10_max value: 27.451611487246296 - type: nauc_map_at_10_std value: -49.35818715843809 - type: nauc_map_at_1_diff1 value: 77.29690208952982 - type: nauc_map_at_1_max value: 19.839875762282293 - type: nauc_map_at_1_std value: -45.355684654708284 - type: nauc_map_at_20_diff1 value: 73.35102731979796 - type: nauc_map_at_20_max value: 27.741506490134583 - type: nauc_map_at_20_std value: -48.22006207310331 - type: nauc_map_at_3_diff1 value: 73.94878241064137 - type: nauc_map_at_3_max value: 24.761321386766728 - type: nauc_map_at_3_std value: -51.20638883618126 - type: nauc_map_at_5_diff1 value: 73.66143558047698 - type: nauc_map_at_5_max value: 26.53483405013543 - type: nauc_map_at_5_std value: -50.697541279640056 - type: nauc_mrr_at_1000_diff1 value: 73.84632320009759 - type: nauc_mrr_at_1000_max value: 30.50182733610048 - type: nauc_mrr_at_1000_std value: -44.3021647995251 - type: nauc_mrr_at_100_diff1 value: 73.84480792662302 - type: nauc_mrr_at_100_max value: 30.50749424571614 - type: nauc_mrr_at_100_std value: -44.29615086388113 - type: nauc_mrr_at_10_diff1 value: 73.79442772949346 - type: nauc_mrr_at_10_max value: 30.55724252219984 - type: nauc_mrr_at_10_std value: -44.50997069462057 - type: nauc_mrr_at_1_diff1 value: 75.23369827945945 - type: nauc_mrr_at_1_max value: 29.20073967447664 - type: nauc_mrr_at_1_std value: -43.1920147658285 - type: nauc_mrr_at_20_diff1 value: 73.82731678072307 - type: nauc_mrr_at_20_max value: 30.566328605497667 - type: nauc_mrr_at_20_std value: -44.24683607643705 - type: nauc_mrr_at_3_diff1 value: 73.61997576749954 - type: nauc_mrr_at_3_max value: 30.150393853381917 - type: nauc_mrr_at_3_std value: -44.96847297506626 - type: nauc_mrr_at_5_diff1 value: 73.69084310616132 - type: nauc_mrr_at_5_max value: 30.578033703441125 - type: nauc_mrr_at_5_std value: -44.74920746066566 - type: nauc_ndcg_at_1000_diff1 value: 72.89349862557452 - type: nauc_ndcg_at_1000_max value: 29.824725190462086 - type: nauc_ndcg_at_1000_std value: -44.96284395063211 - type: nauc_ndcg_at_100_diff1 value: 72.85212753715273 - type: nauc_ndcg_at_100_max value: 29.933114207845605 - type: nauc_ndcg_at_100_std value: -44.944225570663754 - type: nauc_ndcg_at_10_diff1 value: 72.80576740454528 - type: nauc_ndcg_at_10_max value: 29.16829118320828 - type: nauc_ndcg_at_10_std value: -48.149473740079614 - type: nauc_ndcg_at_1_diff1 value: 75.00032534968587 - type: nauc_ndcg_at_1_max value: 29.61849062038547 - type: nauc_ndcg_at_1_std value: -42.560207043864054 - type: nauc_ndcg_at_20_diff1 value: 72.88440406302502 - type: nauc_ndcg_at_20_max value: 29.65496676092656 - type: nauc_ndcg_at_20_std value: -46.21238462167732 - type: nauc_ndcg_at_3_diff1 value: 72.37916962766987 - type: nauc_ndcg_at_3_max value: 27.125094834547586 - type: nauc_ndcg_at_3_std value: -48.62942991399391 - type: nauc_ndcg_at_5_diff1 value: 72.57017330527658 - type: nauc_ndcg_at_5_max value: 28.470485561757254 - type: nauc_ndcg_at_5_std value: -49.07593345591059 - type: nauc_precision_at_1000_diff1 value: -41.67915575853946 - type: nauc_precision_at_1000_max value: 1.2012264478568844 - type: nauc_precision_at_1000_std value: 44.723834559400466 - type: nauc_precision_at_100_diff1 value: -40.45196679236971 - type: nauc_precision_at_100_max value: 2.3525450401714894 - type: nauc_precision_at_100_std value: 43.7092529413952 - type: nauc_precision_at_10_diff1 value: -30.256026923068767 - type: nauc_precision_at_10_max value: 8.313422052132559 - type: nauc_precision_at_10_std value: 25.929372356449694 - type: nauc_precision_at_1_diff1 value: 75.00032534968587 - type: nauc_precision_at_1_max value: 29.61849062038547 - type: nauc_precision_at_1_std value: -42.560207043864054 - type: nauc_precision_at_20_diff1 value: -35.61971069986584 - type: nauc_precision_at_20_max value: 5.4664303079116765 - type: nauc_precision_at_20_std value: 34.992352471692826 - type: nauc_precision_at_3_diff1 value: -5.691231842471157 - type: nauc_precision_at_3_max value: 14.797949087742444 - type: nauc_precision_at_3_std value: -0.1930317395644928 - type: nauc_precision_at_5_diff1 value: -20.03913781462645 - type: nauc_precision_at_5_max value: 11.956771408712749 - type: nauc_precision_at_5_std value: 13.179251389859731 - type: nauc_recall_at_1000_diff1 value: 64.03509042729674 - type: nauc_recall_at_1000_max value: 40.91691485428493 - type: nauc_recall_at_1000_std value: 16.12968625875372 - type: nauc_recall_at_100_diff1 value: 63.83116179628575 - type: nauc_recall_at_100_max value: 43.72908117676382 - type: nauc_recall_at_100_std value: -20.50966716852155 - type: nauc_recall_at_10_diff1 value: 66.42071960186394 - type: nauc_recall_at_10_max value: 28.983207818687205 - type: nauc_recall_at_10_std value: -56.61417798753744 - type: nauc_recall_at_1_diff1 value: 77.29690208952982 - type: nauc_recall_at_1_max value: 19.839875762282293 - type: nauc_recall_at_1_std value: -45.355684654708284 - type: nauc_recall_at_20_diff1 value: 66.32360705219874 - type: nauc_recall_at_20_max value: 33.30698111822631 - type: nauc_recall_at_20_std value: -43.89233781737452 - type: nauc_recall_at_3_diff1 value: 69.67029394927077 - type: nauc_recall_at_3_max value: 22.67803039327696 - type: nauc_recall_at_3_std value: -56.43327209861502 - type: nauc_recall_at_5_diff1 value: 68.05622143936131 - type: nauc_recall_at_5_max value: 26.67795559040675 - type: nauc_recall_at_5_std value: -58.158231198510954 - type: ndcg_at_1 value: 76.08 - type: ndcg_at_10 value: 84.114 - type: ndcg_at_100 value: 85.784 - type: ndcg_at_1000 value: 85.992 - type: ndcg_at_20 value: 84.976 - type: ndcg_at_3 value: 80.74799999999999 - type: ndcg_at_5 value: 82.626 - type: precision_at_1 value: 76.08 - type: precision_at_10 value: 12.926000000000002 - type: precision_at_100 value: 1.509 - type: precision_at_1000 value: 0.156 - type: precision_at_20 value: 6.912999999999999 - type: precision_at_3 value: 35.5 - type: precision_at_5 value: 23.541999999999998 - type: recall_at_1 value: 65.848 - type: recall_at_10 value: 92.611 - type: recall_at_100 value: 98.69 - type: recall_at_1000 value: 99.83999999999999 - type: recall_at_20 value: 95.47200000000001 - type: recall_at_3 value: 83.122 - type: recall_at_5 value: 88.23 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL (default) type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: main_score value: 15.379999999999999 - type: map_at_1 value: 3.6029999999999998 - type: map_at_10 value: 8.843 - type: map_at_100 value: 10.433 - type: map_at_1000 value: 10.689 - type: map_at_20 value: 9.597 - type: map_at_3 value: 6.363 - type: map_at_5 value: 7.603 - type: mrr_at_1 value: 17.7 - type: mrr_at_10 value: 26.58900793650793 - type: mrr_at_100 value: 27.699652322890987 - type: mrr_at_1000 value: 27.78065313118353 - type: mrr_at_20 value: 27.215020950411816 - type: mrr_at_3 value: 23.36666666666668 - type: mrr_at_5 value: 25.211666666666666 - type: nauc_map_at_1000_diff1 value: 21.92235143827129 - type: nauc_map_at_1000_max value: 37.50300940750989 - type: nauc_map_at_1000_std value: 20.872586122198552 - type: nauc_map_at_100_diff1 value: 21.917408170465833 - type: nauc_map_at_100_max value: 37.4654466815513 - type: nauc_map_at_100_std value: 20.621643878648534 - type: nauc_map_at_10_diff1 value: 22.914388723621183 - type: nauc_map_at_10_max value: 36.468131213468794 - type: nauc_map_at_10_std value: 16.760980140791492 - type: nauc_map_at_1_diff1 value: 29.00799502838457 - type: nauc_map_at_1_max value: 26.64926291797503 - type: nauc_map_at_1_std value: 8.167291261637361 - type: nauc_map_at_20_diff1 value: 22.46580947804047 - type: nauc_map_at_20_max value: 36.656294842562275 - type: nauc_map_at_20_std value: 18.099232417722078 - type: nauc_map_at_3_diff1 value: 23.436009032045934 - type: nauc_map_at_3_max value: 31.325807212280914 - type: nauc_map_at_3_std value: 9.780905232048852 - type: nauc_map_at_5_diff1 value: 22.891704394665528 - type: nauc_map_at_5_max value: 35.40584466642894 - type: nauc_map_at_5_std value: 13.476986099394656 - type: nauc_mrr_at_1000_diff1 value: 25.052937655397866 - type: nauc_mrr_at_1000_max value: 29.64431912670108 - type: nauc_mrr_at_1000_std value: 14.549744963988044 - type: nauc_mrr_at_100_diff1 value: 25.070871266969224 - type: nauc_mrr_at_100_max value: 29.68743604652336 - type: nauc_mrr_at_100_std value: 14.582010154574432 - type: nauc_mrr_at_10_diff1 value: 24.88881466938897 - type: nauc_mrr_at_10_max value: 29.488430770768144 - type: nauc_mrr_at_10_std value: 14.269241073852266 - type: nauc_mrr_at_1_diff1 value: 29.220540327267503 - type: nauc_mrr_at_1_max value: 26.81908580507911 - type: nauc_mrr_at_1_std value: 8.00840295809718 - type: nauc_mrr_at_20_diff1 value: 25.067912695721944 - type: nauc_mrr_at_20_max value: 29.759227563849628 - type: nauc_mrr_at_20_std value: 14.685076859257357 - type: nauc_mrr_at_3_diff1 value: 24.645848739182696 - type: nauc_mrr_at_3_max value: 27.73368549660351 - type: nauc_mrr_at_3_std value: 11.475742805586943 - type: nauc_mrr_at_5_diff1 value: 24.895295760909946 - type: nauc_mrr_at_5_max value: 29.130755033240423 - type: nauc_mrr_at_5_std value: 12.955802929145404 - type: nauc_ndcg_at_1000_diff1 value: 20.68434434777729 - type: nauc_ndcg_at_1000_max value: 37.67055146424174 - type: nauc_ndcg_at_1000_std value: 29.57493715069776 - type: nauc_ndcg_at_100_diff1 value: 20.396834816492383 - type: nauc_ndcg_at_100_max value: 37.460575228670514 - type: nauc_ndcg_at_100_std value: 27.826534756761944 - type: nauc_ndcg_at_10_diff1 value: 22.640844106236027 - type: nauc_ndcg_at_10_max value: 35.21291764462327 - type: nauc_ndcg_at_10_std value: 19.53289455984506 - type: nauc_ndcg_at_1_diff1 value: 29.220540327267503 - type: nauc_ndcg_at_1_max value: 26.81908580507911 - type: nauc_ndcg_at_1_std value: 8.00840295809718 - type: nauc_ndcg_at_20_diff1 value: 22.117126657768623 - type: nauc_ndcg_at_20_max value: 35.79395781940806 - type: nauc_ndcg_at_20_std value: 22.242748346260786 - type: nauc_ndcg_at_3_diff1 value: 23.00596063212187 - type: nauc_ndcg_at_3_max value: 30.149013627580523 - type: nauc_ndcg_at_3_std value: 11.07904064662722 - type: nauc_ndcg_at_5_diff1 value: 22.81875419630523 - type: nauc_ndcg_at_5_max value: 34.24267468356626 - type: nauc_ndcg_at_5_std value: 15.307780280752088 - type: nauc_precision_at_1000_diff1 value: 9.606677689029972 - type: nauc_precision_at_1000_max value: 32.74855550489271 - type: nauc_precision_at_1000_std value: 42.65372585937895 - type: nauc_precision_at_100_diff1 value: 11.528981313529545 - type: nauc_precision_at_100_max value: 35.642529490132404 - type: nauc_precision_at_100_std value: 38.146151426052306 - type: nauc_precision_at_10_diff1 value: 18.783957183811836 - type: nauc_precision_at_10_max value: 36.1982008334257 - type: nauc_precision_at_10_std value: 25.09349473195891 - type: nauc_precision_at_1_diff1 value: 29.220540327267503 - type: nauc_precision_at_1_max value: 26.81908580507911 - type: nauc_precision_at_1_std value: 8.00840295809718 - type: nauc_precision_at_20_diff1 value: 17.458766320828214 - type: nauc_precision_at_20_max value: 36.000404903025235 - type: nauc_precision_at_20_std value: 29.1608044138323 - type: nauc_precision_at_3_diff1 value: 20.213669462067166 - type: nauc_precision_at_3_max value: 31.120650847205912 - type: nauc_precision_at_3_std value: 12.390972418818118 - type: nauc_precision_at_5_diff1 value: 20.114245715785678 - type: nauc_precision_at_5_max value: 37.30360111495823 - type: nauc_precision_at_5_std value: 19.053109037822853 - type: nauc_recall_at_1000_diff1 value: 9.85800049032612 - type: nauc_recall_at_1000_max value: 32.48319160802687 - type: nauc_recall_at_1000_std value: 43.79941601741161 - type: nauc_recall_at_100_diff1 value: 11.375255270968337 - type: nauc_recall_at_100_max value: 35.1868784124497 - type: nauc_recall_at_100_std value: 38.422680583482666 - type: nauc_recall_at_10_diff1 value: 18.445783123521938 - type: nauc_recall_at_10_max value: 35.633267936276766 - type: nauc_recall_at_10_std value: 24.94469506254716 - type: nauc_recall_at_1_diff1 value: 29.00799502838457 - type: nauc_recall_at_1_max value: 26.64926291797503 - type: nauc_recall_at_1_std value: 8.167291261637361 - type: nauc_recall_at_20_diff1 value: 17.314906604151936 - type: nauc_recall_at_20_max value: 35.66067699203996 - type: nauc_recall_at_20_std value: 29.400137012506082 - type: nauc_recall_at_3_diff1 value: 19.873710875648698 - type: nauc_recall_at_3_max value: 30.92404718742849 - type: nauc_recall_at_3_std value: 12.400871018075199 - type: nauc_recall_at_5_diff1 value: 19.869948324233192 - type: nauc_recall_at_5_max value: 37.06832511687574 - type: nauc_recall_at_5_std value: 19.0798814966156 - type: ndcg_at_1 value: 17.7 - type: ndcg_at_10 value: 15.379999999999999 - type: ndcg_at_100 value: 22.09 - type: ndcg_at_1000 value: 27.151999999999997 - type: ndcg_at_20 value: 17.576 - type: ndcg_at_3 value: 14.219999999999999 - type: ndcg_at_5 value: 12.579 - type: precision_at_1 value: 17.7 - type: precision_at_10 value: 8.08 - type: precision_at_100 value: 1.7840000000000003 - type: precision_at_1000 value: 0.3 - type: precision_at_20 value: 5.305 - type: precision_at_3 value: 13.167000000000002 - type: precision_at_5 value: 11.06 - type: recall_at_1 value: 3.6029999999999998 - type: recall_at_10 value: 16.413 - type: recall_at_100 value: 36.263 - type: recall_at_1000 value: 61.016999999999996 - type: recall_at_20 value: 21.587999999999997 - type: recall_at_3 value: 8.013 - type: recall_at_5 value: 11.198 - task: type: Retrieval dataset: name: MTEB SciFact-PL (default) type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: main_score value: 64.764 - type: map_at_1 value: 49.778 - type: map_at_10 value: 59.88 - type: map_at_100 value: 60.707 - type: map_at_1000 value: 60.729 - type: map_at_20 value: 60.419999999999995 - type: map_at_3 value: 57.45400000000001 - type: map_at_5 value: 58.729 - type: mrr_at_1 value: 52.33333333333333 - type: mrr_at_10 value: 61.29193121693122 - type: mrr_at_100 value: 61.95817765126313 - type: mrr_at_1000 value: 61.97583284368782 - type: mrr_at_20 value: 61.72469949641003 - type: mrr_at_3 value: 59.44444444444444 - type: mrr_at_5 value: 60.494444444444454 - type: nauc_map_at_1000_diff1 value: 62.21235294015774 - type: nauc_map_at_1000_max value: 48.83996609100249 - type: nauc_map_at_1000_std value: 5.23892781043174 - type: nauc_map_at_100_diff1 value: 62.20170226789429 - type: nauc_map_at_100_max value: 48.8391766453537 - type: nauc_map_at_100_std value: 5.2664077457917715 - type: nauc_map_at_10_diff1 value: 61.961975488329024 - type: nauc_map_at_10_max value: 48.397109987625186 - type: nauc_map_at_10_std value: 4.314859710827481 - type: nauc_map_at_1_diff1 value: 65.0865197011516 - type: nauc_map_at_1_max value: 41.38862781954889 - type: nauc_map_at_1_std value: -0.9182122632530586 - type: nauc_map_at_20_diff1 value: 61.99173935851292 - type: nauc_map_at_20_max value: 48.79961814179307 - type: nauc_map_at_20_std value: 5.262181845825118 - type: nauc_map_at_3_diff1 value: 62.37910539880477 - type: nauc_map_at_3_max value: 47.13627890977091 - type: nauc_map_at_3_std value: 2.327897198087264 - type: nauc_map_at_5_diff1 value: 61.60080757149592 - type: nauc_map_at_5_max value: 47.60052458345962 - type: nauc_map_at_5_std value: 3.1770196981231047 - type: nauc_mrr_at_1000_diff1 value: 62.86810952814966 - type: nauc_mrr_at_1000_max value: 52.13248094447774 - type: nauc_mrr_at_1000_std value: 10.100485746570733 - type: nauc_mrr_at_100_diff1 value: 62.85364829491874 - type: nauc_mrr_at_100_max value: 52.134528010631854 - type: nauc_mrr_at_100_std value: 10.120945685447369 - type: nauc_mrr_at_10_diff1 value: 62.65679301829915 - type: nauc_mrr_at_10_max value: 52.09270719182349 - type: nauc_mrr_at_10_std value: 9.913834434725441 - type: nauc_mrr_at_1_diff1 value: 66.84108271415636 - type: nauc_mrr_at_1_max value: 46.67646429855176 - type: nauc_mrr_at_1_std value: 5.5505252956352304 - type: nauc_mrr_at_20_diff1 value: 62.72473227039611 - type: nauc_mrr_at_20_max value: 52.13479097802757 - type: nauc_mrr_at_20_std value: 10.188278833464084 - type: nauc_mrr_at_3_diff1 value: 63.797429185518496 - type: nauc_mrr_at_3_max value: 52.16486999573481 - type: nauc_mrr_at_3_std value: 9.094360767062762 - type: nauc_mrr_at_5_diff1 value: 62.592917975475494 - type: nauc_mrr_at_5_max value: 52.330741486107414 - type: nauc_mrr_at_5_std value: 9.742175534421389 - type: nauc_ndcg_at_1000_diff1 value: 61.38859337672476 - type: nauc_ndcg_at_1000_max value: 51.48380058339184 - type: nauc_ndcg_at_1000_std value: 9.670547660897673 - type: nauc_ndcg_at_100_diff1 value: 61.02438489641434 - type: nauc_ndcg_at_100_max value: 51.781246646780865 - type: nauc_ndcg_at_100_std value: 10.592961553245187 - type: nauc_ndcg_at_10_diff1 value: 60.03678353308358 - type: nauc_ndcg_at_10_max value: 50.70725688848762 - type: nauc_ndcg_at_10_std value: 7.9472446491016315 - type: nauc_ndcg_at_1_diff1 value: 66.84108271415636 - type: nauc_ndcg_at_1_max value: 46.67646429855176 - type: nauc_ndcg_at_1_std value: 5.5505252956352304 - type: nauc_ndcg_at_20_diff1 value: 59.828482718480224 - type: nauc_ndcg_at_20_max value: 51.45831789601284 - type: nauc_ndcg_at_20_std value: 10.722673683272049 - type: nauc_ndcg_at_3_diff1 value: 61.68982937524109 - type: nauc_ndcg_at_3_max value: 49.745326748604775 - type: nauc_ndcg_at_3_std value: 4.948298621202247 - type: nauc_ndcg_at_5_diff1 value: 59.67396171973207 - type: nauc_ndcg_at_5_max value: 49.87855139298281 - type: nauc_ndcg_at_5_std value: 6.08990428055584 - type: nauc_precision_at_1000_diff1 value: -1.594227972036865 - type: nauc_precision_at_1000_max value: 32.48431723086185 - type: nauc_precision_at_1000_std value: 53.84748466965268 - type: nauc_precision_at_100_diff1 value: 8.06411455192293 - type: nauc_precision_at_100_max value: 39.91003601878948 - type: nauc_precision_at_100_std value: 55.52979711075091 - type: nauc_precision_at_10_diff1 value: 26.610514456014066 - type: nauc_precision_at_10_max value: 47.09062494321172 - type: nauc_precision_at_10_std value: 33.91984226498748 - type: nauc_precision_at_1_diff1 value: 66.84108271415636 - type: nauc_precision_at_1_max value: 46.67646429855176 - type: nauc_precision_at_1_std value: 5.5505252956352304 - type: nauc_precision_at_20_diff1 value: 16.947688843085583 - type: nauc_precision_at_20_max value: 45.40488186572008 - type: nauc_precision_at_20_std value: 48.354421924500905 - type: nauc_precision_at_3_diff1 value: 49.11263981720622 - type: nauc_precision_at_3_max value: 52.7084625111683 - type: nauc_precision_at_3_std value: 16.734612173556453 - type: nauc_precision_at_5_diff1 value: 39.06503705015792 - type: nauc_precision_at_5_max value: 52.21710506893391 - type: nauc_precision_at_5_std value: 23.350948149460233 - type: nauc_recall_at_1000_diff1 value: 43.1559290382817 - type: nauc_recall_at_1000_max value: 83.66013071895456 - type: nauc_recall_at_1000_std value: 86.27450980392177 - type: nauc_recall_at_100_diff1 value: 46.016860850620375 - type: nauc_recall_at_100_max value: 69.3944888744547 - type: nauc_recall_at_100_std value: 55.286945696152735 - type: nauc_recall_at_10_diff1 value: 49.65877895350921 - type: nauc_recall_at_10_max value: 53.02636695700889 - type: nauc_recall_at_10_std value: 13.967608945823828 - type: nauc_recall_at_1_diff1 value: 65.0865197011516 - type: nauc_recall_at_1_max value: 41.38862781954889 - type: nauc_recall_at_1_std value: -0.9182122632530586 - type: nauc_recall_at_20_diff1 value: 43.355308229973524 - type: nauc_recall_at_20_max value: 57.04187909533764 - type: nauc_recall_at_20_std value: 33.578720846660524 - type: nauc_recall_at_3_diff1 value: 56.922996057428165 - type: nauc_recall_at_3_max value: 50.74417041895424 - type: nauc_recall_at_3_std value: 5.623890124328387 - type: nauc_recall_at_5_diff1 value: 50.55620076865238 - type: nauc_recall_at_5_max value: 51.3316854622085 - type: nauc_recall_at_5_std value: 8.995457887269255 - type: ndcg_at_1 value: 52.333 - type: ndcg_at_10 value: 64.764 - type: ndcg_at_100 value: 68.167 - type: ndcg_at_1000 value: 68.816 - type: ndcg_at_20 value: 66.457 - type: ndcg_at_3 value: 60.346 - type: ndcg_at_5 value: 62.365 - type: precision_at_1 value: 52.333 - type: precision_at_10 value: 8.799999999999999 - type: precision_at_100 value: 1.057 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_20 value: 4.8 - type: precision_at_3 value: 23.889 - type: precision_at_5 value: 15.6 - type: recall_at_1 value: 49.778 - type: recall_at_10 value: 78.206 - type: recall_at_100 value: 93.10000000000001 - type: recall_at_1000 value: 98.333 - type: recall_at_20 value: 84.467 - type: recall_at_3 value: 66.367 - type: recall_at_5 value: 71.35000000000001 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL (default) type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: main_score value: 72.18900000000001 - type: map_at_1 value: 0.214 - type: map_at_10 value: 1.755 - type: map_at_100 value: 9.944 - type: map_at_1000 value: 24.205 - type: map_at_20 value: 3.1510000000000002 - type: map_at_3 value: 0.6 - type: map_at_5 value: 0.9560000000000001 - type: mrr_at_1 value: 82.0 - type: mrr_at_10 value: 89.06666666666666 - type: mrr_at_100 value: 89.06666666666666 - type: mrr_at_1000 value: 89.06666666666666 - type: mrr_at_20 value: 89.06666666666666 - type: mrr_at_3 value: 87.66666666666666 - type: mrr_at_5 value: 89.06666666666666 - type: nauc_map_at_1000_diff1 value: -9.342037623635543 - type: nauc_map_at_1000_max value: 45.71499810252398 - type: nauc_map_at_1000_std value: 76.86482845196852 - type: nauc_map_at_100_diff1 value: -6.932395299866198 - type: nauc_map_at_100_max value: 36.097801891181604 - type: nauc_map_at_100_std value: 65.6085215411685 - type: nauc_map_at_10_diff1 value: -6.3654843824342775 - type: nauc_map_at_10_max value: 9.564437521432714 - type: nauc_map_at_10_std value: 21.8377319336476 - type: nauc_map_at_1_diff1 value: 8.269590874255034 - type: nauc_map_at_1_max value: 3.482498491294516 - type: nauc_map_at_1_std value: 8.985226819412189 - type: nauc_map_at_20_diff1 value: -4.971435767877232 - type: nauc_map_at_20_max value: 22.88801858567121 - type: nauc_map_at_20_std value: 32.38492618534027 - type: nauc_map_at_3_diff1 value: 1.1615973694623123 - type: nauc_map_at_3_max value: 1.935417800315643 - type: nauc_map_at_3_std value: 10.289328305818698 - type: nauc_map_at_5_diff1 value: -2.4675967231444105 - type: nauc_map_at_5_max value: 2.4611483736622373 - type: nauc_map_at_5_std value: 15.082324305750811 - type: nauc_mrr_at_1000_diff1 value: 13.098526703499063 - type: nauc_mrr_at_1000_max value: 56.37362177417431 - type: nauc_mrr_at_1000_std value: 73.2456769749587 - type: nauc_mrr_at_100_diff1 value: 13.098526703499063 - type: nauc_mrr_at_100_max value: 56.37362177417431 - type: nauc_mrr_at_100_std value: 73.2456769749587 - type: nauc_mrr_at_10_diff1 value: 13.098526703499063 - type: nauc_mrr_at_10_max value: 56.37362177417431 - type: nauc_mrr_at_10_std value: 73.2456769749587 - type: nauc_mrr_at_1_diff1 value: 12.099350148694809 - type: nauc_mrr_at_1_max value: 53.75041304108387 - type: nauc_mrr_at_1_std value: 68.84018063663402 - type: nauc_mrr_at_20_diff1 value: 13.098526703499063 - type: nauc_mrr_at_20_max value: 56.37362177417431 - type: nauc_mrr_at_20_std value: 73.2456769749587 - type: nauc_mrr_at_3_diff1 value: 12.173557857011161 - type: nauc_mrr_at_3_max value: 57.540780562363395 - type: nauc_mrr_at_3_std value: 75.42098189580211 - type: nauc_mrr_at_5_diff1 value: 13.098526703499063 - type: nauc_mrr_at_5_max value: 56.37362177417431 - type: nauc_mrr_at_5_std value: 73.2456769749587 - type: nauc_ndcg_at_1000_diff1 value: -8.951471847310401 - type: nauc_ndcg_at_1000_max value: 43.86942237288822 - type: nauc_ndcg_at_1000_std value: 74.61077735148591 - type: nauc_ndcg_at_100_diff1 value: -17.754559361083817 - type: nauc_ndcg_at_100_max value: 53.97187119773482 - type: nauc_ndcg_at_100_std value: 80.7944136146514 - type: nauc_ndcg_at_10_diff1 value: -26.637734697836414 - type: nauc_ndcg_at_10_max value: 47.70102699133149 - type: nauc_ndcg_at_10_std value: 70.26909560828646 - type: nauc_ndcg_at_1_diff1 value: -1.2250530785563207 - type: nauc_ndcg_at_1_max value: 46.60509554140131 - type: nauc_ndcg_at_1_std value: 62.63906581740976 - type: nauc_ndcg_at_20_diff1 value: -22.44286466550908 - type: nauc_ndcg_at_20_max value: 55.40492058090103 - type: nauc_ndcg_at_20_std value: 72.11813912145738 - type: nauc_ndcg_at_3_diff1 value: -14.8152721896563 - type: nauc_ndcg_at_3_max value: 38.952259383027595 - type: nauc_ndcg_at_3_std value: 59.819750166537766 - type: nauc_ndcg_at_5_diff1 value: -19.150105688904375 - type: nauc_ndcg_at_5_max value: 42.311180547775315 - type: nauc_ndcg_at_5_std value: 66.6632229321094 - type: nauc_precision_at_1000_diff1 value: -11.555591477978941 - type: nauc_precision_at_1000_max value: 43.7311644834851 - type: nauc_precision_at_1000_std value: 52.10644767999648 - type: nauc_precision_at_100_diff1 value: -16.94803099801117 - type: nauc_precision_at_100_max value: 54.08281631067633 - type: nauc_precision_at_100_std value: 82.77237347891331 - type: nauc_precision_at_10_diff1 value: -27.351332814863355 - type: nauc_precision_at_10_max value: 48.08237549065846 - type: nauc_precision_at_10_std value: 69.37250843534329 - type: nauc_precision_at_1_diff1 value: 12.099350148694809 - type: nauc_precision_at_1_max value: 53.75041304108387 - type: nauc_precision_at_1_std value: 68.84018063663402 - type: nauc_precision_at_20_diff1 value: -18.2422222283388 - type: nauc_precision_at_20_max value: 59.517328129343696 - type: nauc_precision_at_20_std value: 72.05149307342747 - type: nauc_precision_at_3_diff1 value: -10.226547543075897 - type: nauc_precision_at_3_max value: 43.14684818832875 - type: nauc_precision_at_3_std value: 57.31936467418288 - type: nauc_precision_at_5_diff1 value: -14.28521589468673 - type: nauc_precision_at_5_max value: 41.633426753962596 - type: nauc_precision_at_5_std value: 64.94400576804541 - type: nauc_recall_at_1000_diff1 value: -0.9648831207497152 - type: nauc_recall_at_1000_max value: 31.70832946085005 - type: nauc_recall_at_1000_std value: 63.21471613968869 - type: nauc_recall_at_100_diff1 value: -1.360254380933586 - type: nauc_recall_at_100_max value: 25.960597782099605 - type: nauc_recall_at_100_std value: 51.52757589609674 - type: nauc_recall_at_10_diff1 value: -0.3899439424189566 - type: nauc_recall_at_10_max value: 5.094341897886072 - type: nauc_recall_at_10_std value: 11.266045616925698 - type: nauc_recall_at_1_diff1 value: 8.269590874255034 - type: nauc_recall_at_1_max value: 3.482498491294516 - type: nauc_recall_at_1_std value: 8.985226819412189 - type: nauc_recall_at_20_diff1 value: 6.4797098359254175 - type: nauc_recall_at_20_max value: 15.663700985336124 - type: nauc_recall_at_20_std value: 17.154099587904913 - type: nauc_recall_at_3_diff1 value: 3.7245972450393507 - type: nauc_recall_at_3_max value: 0.4063857187240345 - type: nauc_recall_at_3_std value: 6.641948062821941 - type: nauc_recall_at_5_diff1 value: 4.013879477591466 - type: nauc_recall_at_5_max value: -1.4266586618013566 - type: nauc_recall_at_5_std value: 7.311601874411205 - type: ndcg_at_1 value: 75.0 - type: ndcg_at_10 value: 72.18900000000001 - type: ndcg_at_100 value: 54.022999999999996 - type: ndcg_at_1000 value: 49.492000000000004 - type: ndcg_at_20 value: 68.51 - type: ndcg_at_3 value: 73.184 - type: ndcg_at_5 value: 72.811 - type: precision_at_1 value: 82.0 - type: precision_at_10 value: 77.4 - type: precision_at_100 value: 55.24 - type: precision_at_1000 value: 21.822 - type: precision_at_20 value: 73.0 - type: precision_at_3 value: 79.333 - type: precision_at_5 value: 79.2 - type: recall_at_1 value: 0.214 - type: recall_at_10 value: 1.9980000000000002 - type: recall_at_100 value: 13.328999999999999 - type: recall_at_1000 value: 47.204 - type: recall_at_20 value: 3.7310000000000003 - type: recall_at_3 value: 0.628 - type: recall_at_5 value: 1.049 - task: type: MultilabelClassification dataset: name: MTEB CEDRClassification (default) type: ai-forever/cedr-classification config: default split: test revision: c0ba03d058e3e1b2f3fd20518875a4563dd12db4 metrics: - type: accuracy value: 47.30605738575983 - type: f1 value: 41.26091043925065 - type: lrap value: 72.89452709883206 - type: main_score value: 47.30605738575983 - task: type: Reranking dataset: name: MTEB MIRACLReranking (ru) type: miracl/mmteb-miracl-reranking config: ru split: dev revision: 6d1962c527217f8927fca80f890f14f36b2802af metrics: - type: MAP@1(MIRACL) value: 20.721999999999998 - type: MAP@10(MIRACL) value: 33.900999999999996 - type: MAP@100(MIRACL) value: 36.813 - type: MAP@1000(MIRACL) value: 36.813 - type: MAP@20(MIRACL) value: 35.684 - type: MAP@3(MIRACL) value: 28.141 - type: MAP@5(MIRACL) value: 31.075000000000003 - type: NDCG@1(MIRACL) value: 32.799 - type: NDCG@10(MIRACL) value: 42.065000000000005 - type: NDCG@100(MIRACL) value: 49.730999999999995 - type: NDCG@1000(MIRACL) value: 49.730999999999995 - type: NDCG@20(MIRACL) value: 46.0 - type: NDCG@3(MIRACL) value: 34.481 - type: NDCG@5(MIRACL) value: 37.452999999999996 - type: P@1(MIRACL) value: 32.799 - type: P@10(MIRACL) value: 11.668000000000001 - type: P@100(MIRACL) value: 1.9529999999999998 - type: P@1000(MIRACL) value: 0.19499999999999998 - type: P@20(MIRACL) value: 7.51 - type: P@3(MIRACL) value: 20.823 - type: P@5(MIRACL) value: 16.728 - type: Recall@1(MIRACL) value: 20.721999999999998 - type: Recall@10(MIRACL) value: 54.762 - type: Recall@100(MIRACL) value: 79.952 - type: Recall@1000(MIRACL) value: 79.952 - type: Recall@20(MIRACL) value: 66.26100000000001 - type: Recall@3(MIRACL) value: 34.410000000000004 - type: Recall@5(MIRACL) value: 42.659000000000006 - type: main_score value: 42.065000000000005 - type: nAUC_MAP@1000_diff1(MIRACL) value: 14.33534992502818 - type: nAUC_MAP@1000_max(MIRACL) value: 12.367998764646115 - type: nAUC_MAP@1000_std(MIRACL) value: 4.569686002935006 - type: nAUC_MAP@100_diff1(MIRACL) value: 14.33534992502818 - type: nAUC_MAP@100_max(MIRACL) value: 12.367998764646115 - type: nAUC_MAP@100_std(MIRACL) value: 4.569686002935006 - type: nAUC_MAP@10_diff1(MIRACL) value: 16.920323975680027 - type: nAUC_MAP@10_max(MIRACL) value: 9.327171297204082 - type: nAUC_MAP@10_std(MIRACL) value: 3.2039133783079015 - type: nAUC_MAP@1_diff1(MIRACL) value: 28.698973487482206 - type: nAUC_MAP@1_max(MIRACL) value: 2.9217687660885034 - type: nAUC_MAP@1_std(MIRACL) value: -1.1247408800976524 - type: nAUC_MAP@20_diff1(MIRACL) value: 15.359083081640476 - type: nAUC_MAP@20_max(MIRACL) value: 11.310494233946345 - type: nAUC_MAP@20_std(MIRACL) value: 4.4171898386022885 - type: nAUC_MAP@3_diff1(MIRACL) value: 22.27430591851617 - type: nAUC_MAP@3_max(MIRACL) value: 6.407438291284658 - type: nAUC_MAP@3_std(MIRACL) value: 0.9799184530397409 - type: nAUC_MAP@5_diff1(MIRACL) value: 19.20571689941054 - type: nAUC_MAP@5_max(MIRACL) value: 7.987468654026893 - type: nAUC_MAP@5_std(MIRACL) value: 1.8324246565938962 - type: nAUC_NDCG@1000_diff1(MIRACL) value: 3.7537669018914768 - type: nAUC_NDCG@1000_max(MIRACL) value: 20.7944707840533 - type: nAUC_NDCG@1000_std(MIRACL) value: 8.444837055303063 - type: nAUC_NDCG@100_diff1(MIRACL) value: 3.7537669018914768 - type: nAUC_NDCG@100_max(MIRACL) value: 20.7944707840533 - type: nAUC_NDCG@100_std(MIRACL) value: 8.444837055303063 - type: nAUC_NDCG@10_diff1(MIRACL) value: 10.829575656103888 - type: nAUC_NDCG@10_max(MIRACL) value: 13.0445496498929 - type: nAUC_NDCG@10_std(MIRACL) value: 6.050412212625362 - type: nAUC_NDCG@1_diff1(MIRACL) value: 19.1388712233292 - type: nAUC_NDCG@1_max(MIRACL) value: 10.871900994781642 - type: nAUC_NDCG@1_std(MIRACL) value: 3.218568248751811 - type: nAUC_NDCG@20_diff1(MIRACL) value: 7.093172181746442 - type: nAUC_NDCG@20_max(MIRACL) value: 16.955238078958836 - type: nAUC_NDCG@20_std(MIRACL) value: 8.325656379573035 - type: nAUC_NDCG@3_diff1(MIRACL) value: 17.134437303330802 - type: nAUC_NDCG@3_max(MIRACL) value: 10.235328822955793 - type: nAUC_NDCG@3_std(MIRACL) value: 3.2341358691084814 - type: nAUC_NDCG@5_diff1(MIRACL) value: 14.733664618337636 - type: nAUC_NDCG@5_max(MIRACL) value: 11.181897412035282 - type: nAUC_NDCG@5_std(MIRACL) value: 3.642277088791985 - type: nAUC_P@1000_diff1(MIRACL) value: -26.330038284867573 - type: nAUC_P@1000_max(MIRACL) value: 28.450694137240458 - type: nAUC_P@1000_std(MIRACL) value: 9.892993775474912 - type: nAUC_P@100_diff1(MIRACL) value: -26.330038284867552 - type: nAUC_P@100_max(MIRACL) value: 28.45069413724051 - type: nAUC_P@100_std(MIRACL) value: 9.892993775474928 - type: nAUC_P@10_diff1(MIRACL) value: -17.436937353231112 - type: nAUC_P@10_max(MIRACL) value: 24.327018012947857 - type: nAUC_P@10_std(MIRACL) value: 11.78803527706634 - type: nAUC_P@1_diff1(MIRACL) value: 19.1388712233292 - type: nAUC_P@1_max(MIRACL) value: 10.871900994781642 - type: nAUC_P@1_std(MIRACL) value: 3.218568248751811 - type: nAUC_P@20_diff1(MIRACL) value: -22.947528755272426 - type: nAUC_P@20_max(MIRACL) value: 27.773093471902538 - type: nAUC_P@20_std(MIRACL) value: 14.898619107087221 - type: nAUC_P@3_diff1(MIRACL) value: 1.4100426412400944 - type: nAUC_P@3_max(MIRACL) value: 17.397472872058845 - type: nAUC_P@3_std(MIRACL) value: 8.240008229861875 - type: nAUC_P@5_diff1(MIRACL) value: -7.971349332207021 - type: nAUC_P@5_max(MIRACL) value: 22.198441167940963 - type: nAUC_P@5_std(MIRACL) value: 9.00265164460082 - type: nAUC_Recall@1000_diff1(MIRACL) value: -38.69835271863148 - type: nAUC_Recall@1000_max(MIRACL) value: 50.9545152809108 - type: nAUC_Recall@1000_std(MIRACL) value: 20.44270887092116 - type: nAUC_Recall@100_diff1(MIRACL) value: -38.69835271863148 - type: nAUC_Recall@100_max(MIRACL) value: 50.9545152809108 - type: nAUC_Recall@100_std(MIRACL) value: 20.44270887092116 - type: nAUC_Recall@10_diff1(MIRACL) value: -0.08109036309433801 - type: nAUC_Recall@10_max(MIRACL) value: 12.696619907773568 - type: nAUC_Recall@10_std(MIRACL) value: 8.791982704261589 - type: nAUC_Recall@1_diff1(MIRACL) value: 28.698973487482206 - type: nAUC_Recall@1_max(MIRACL) value: 2.9217687660885034 - type: nAUC_Recall@1_std(MIRACL) value: -1.1247408800976524 - type: nAUC_Recall@20_diff1(MIRACL) value: -13.312171017942623 - type: nAUC_Recall@20_max(MIRACL) value: 24.19847346821666 - type: nAUC_Recall@20_std(MIRACL) value: 15.8157702609797 - type: nAUC_Recall@3_diff1(MIRACL) value: 16.909128321353343 - type: nAUC_Recall@3_max(MIRACL) value: 6.552122731902991 - type: nAUC_Recall@3_std(MIRACL) value: 1.9963898223457228 - type: nAUC_Recall@5_diff1(MIRACL) value: 9.990292655247721 - type: nAUC_Recall@5_max(MIRACL) value: 9.361722273507574 - type: nAUC_Recall@5_std(MIRACL) value: 3.270918827854495 - task: type: MultilabelClassification dataset: name: MTEB SensitiveTopicsClassification (default) type: ai-forever/sensitive-topics-classification config: default split: test revision: 416b34a802308eac30e4192afc0ff99bb8dcc7f2 metrics: - type: accuracy value: 30.634765625 - type: f1 value: 32.647559808678665 - type: lrap value: 45.94319661458259 - type: main_score value: 30.634765625 - task: type: STS dataset: name: MTEB ATEC (default) type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cosine_pearson value: 47.541497334563296 - type: cosine_spearman value: 49.06268944206629 - type: euclidean_pearson value: 51.838926748581635 - type: euclidean_spearman value: 48.930697157135356 - type: main_score value: 49.06268944206629 - type: manhattan_pearson value: 51.835306769406365 - type: manhattan_spearman value: 48.86135493444834 - type: pearson value: 47.541497334563296 - type: spearman value: 49.06268944206629 - task: type: Classification dataset: name: MTEB AllegroReviews (default) type: PL-MTEB/allegro-reviews config: default split: test revision: b89853e6de927b0e3bfa8ecc0e56fe4e02ceafc6 metrics: - type: accuracy value: 49.51292246520874 - type: f1 value: 44.14350234332397 - type: f1_weighted value: 51.65508998354552 - type: main_score value: 49.51292246520874 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P (default) type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: main_score value: 63.883383458621665 - type: v_measure value: 63.883383458621665 - type: v_measure_std value: 2.693666879958465 - type: main_score value: 46.85924588755251 - type: v_measure value: 46.85924588755251 - type: v_measure_std value: 2.1918258880872377 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 43.65721212452554 - task: type: Reranking dataset: name: MTEB AlloprofReranking (default) type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: e40c8a63ce02da43200eccb5b0846fcaa888f562 metrics: - type: map value: 66.39013753839347 - type: mrr value: 67.68045617786551 - type: main_score value: 66.39013753839347 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval (default) type: lyon-nlp/alloprof config: default split: test revision: fcf295ea64c750f41fadbaa37b9b861558e1bfbd metrics: - type: main_score value: 54.284 - type: map_at_1 value: 37.047000000000004 - type: map_at_10 value: 48.53 - type: map_at_100 value: 49.357 - type: map_at_1000 value: 49.39 - type: map_at_20 value: 49.064 - type: map_at_3 value: 45.675 - type: map_at_5 value: 47.441 - type: mrr_at_1 value: 37.04663212435233 - type: mrr_at_10 value: 48.5300326232969 - type: mrr_at_100 value: 49.35708199037581 - type: mrr_at_1000 value: 49.39005824603193 - type: mrr_at_20 value: 49.06417416464799 - type: mrr_at_3 value: 45.67501439263105 - type: mrr_at_5 value: 47.44099021301103 - type: nauc_map_at_1000_diff1 value: 43.32474221868009 - type: nauc_map_at_1000_max value: 39.407334029058575 - type: nauc_map_at_1000_std value: -2.3728154448932606 - type: nauc_map_at_100_diff1 value: 43.32336300929909 - type: nauc_map_at_100_max value: 39.432174777554835 - type: nauc_map_at_100_std value: -2.356396922384349 - type: nauc_map_at_10_diff1 value: 43.1606520154482 - type: nauc_map_at_10_max value: 39.33734650558226 - type: nauc_map_at_10_std value: -2.5156222475075256 - type: nauc_map_at_1_diff1 value: 46.2178975214499 - type: nauc_map_at_1_max value: 36.26173199049361 - type: nauc_map_at_1_std value: -3.0897555582816443 - type: nauc_map_at_20_diff1 value: 43.272980702916456 - type: nauc_map_at_20_max value: 39.4896977052276 - type: nauc_map_at_20_std value: -2.3305501742917043 - type: nauc_map_at_3_diff1 value: 43.49525042967079 - type: nauc_map_at_3_max value: 38.66352501824728 - type: nauc_map_at_3_std value: -3.202794391620473 - type: nauc_map_at_5_diff1 value: 43.2266692546611 - type: nauc_map_at_5_max value: 38.77368661115743 - type: nauc_map_at_5_std value: -3.0897532130127954 - type: nauc_mrr_at_1000_diff1 value: 43.32474221868009 - type: nauc_mrr_at_1000_max value: 39.407334029058575 - type: nauc_mrr_at_1000_std value: -2.3728154448932606 - type: nauc_mrr_at_100_diff1 value: 43.32336300929909 - type: nauc_mrr_at_100_max value: 39.432174777554835 - type: nauc_mrr_at_100_std value: -2.356396922384349 - type: nauc_mrr_at_10_diff1 value: 43.1606520154482 - type: nauc_mrr_at_10_max value: 39.33734650558226 - type: nauc_mrr_at_10_std value: -2.5156222475075256 - type: nauc_mrr_at_1_diff1 value: 46.2178975214499 - type: nauc_mrr_at_1_max value: 36.26173199049361 - type: nauc_mrr_at_1_std value: -3.0897555582816443 - type: nauc_mrr_at_20_diff1 value: 43.272980702916456 - type: nauc_mrr_at_20_max value: 39.4896977052276 - type: nauc_mrr_at_20_std value: -2.3305501742917043 - type: nauc_mrr_at_3_diff1 value: 43.49525042967079 - type: nauc_mrr_at_3_max value: 38.66352501824728 - type: nauc_mrr_at_3_std value: -3.202794391620473 - type: nauc_mrr_at_5_diff1 value: 43.2266692546611 - type: nauc_mrr_at_5_max value: 38.77368661115743 - type: nauc_mrr_at_5_std value: -3.0897532130127954 - type: nauc_ndcg_at_1000_diff1 value: 43.01903168202974 - type: nauc_ndcg_at_1000_max value: 40.75496622942232 - type: nauc_ndcg_at_1000_std value: -1.3150412981845496 - type: nauc_ndcg_at_100_diff1 value: 42.98016493758145 - type: nauc_ndcg_at_100_max value: 41.55869635162325 - type: nauc_ndcg_at_100_std value: -0.5355252976886055 - type: nauc_ndcg_at_10_diff1 value: 42.218755211347506 - type: nauc_ndcg_at_10_max value: 41.305042275175765 - type: nauc_ndcg_at_10_std value: -1.4034484444573714 - type: nauc_ndcg_at_1_diff1 value: 46.2178975214499 - type: nauc_ndcg_at_1_max value: 36.26173199049361 - type: nauc_ndcg_at_1_std value: -3.0897555582816443 - type: nauc_ndcg_at_20_diff1 value: 42.66574440095576 - type: nauc_ndcg_at_20_max value: 42.014620115124515 - type: nauc_ndcg_at_20_std value: -0.5176162553751498 - type: nauc_ndcg_at_3_diff1 value: 42.837450505106055 - type: nauc_ndcg_at_3_max value: 39.525369733082414 - type: nauc_ndcg_at_3_std value: -3.1605948245795155 - type: nauc_ndcg_at_5_diff1 value: 42.37951815451173 - type: nauc_ndcg_at_5_max value: 39.78840132935179 - type: nauc_ndcg_at_5_std value: -2.936898430768135 - type: nauc_precision_at_1000_diff1 value: 49.69224988612385 - type: nauc_precision_at_1000_max value: 79.57897547128005 - type: nauc_precision_at_1000_std value: 45.040371354764645 - type: nauc_precision_at_100_diff1 value: 42.70597486048422 - type: nauc_precision_at_100_max value: 65.74628759606188 - type: nauc_precision_at_100_std value: 25.49157745244855 - type: nauc_precision_at_10_diff1 value: 38.565609931689345 - type: nauc_precision_at_10_max value: 50.0239696180852 - type: nauc_precision_at_10_std value: 3.976354829503967 - type: nauc_precision_at_1_diff1 value: 46.2178975214499 - type: nauc_precision_at_1_max value: 36.26173199049361 - type: nauc_precision_at_1_std value: -3.0897555582816443 - type: nauc_precision_at_20_diff1 value: 40.4134718566864 - type: nauc_precision_at_20_max value: 57.121778108665374 - type: nauc_precision_at_20_std value: 11.46021975428544 - type: nauc_precision_at_3_diff1 value: 40.90538379461529 - type: nauc_precision_at_3_max value: 42.18393248057992 - type: nauc_precision_at_3_std value: -3.005249943837297 - type: nauc_precision_at_5_diff1 value: 39.60162965860782 - type: nauc_precision_at_5_max value: 43.28317158174058 - type: nauc_precision_at_5_std value: -2.3469094487738054 - type: nauc_recall_at_1000_diff1 value: 49.69224988612252 - type: nauc_recall_at_1000_max value: 79.57897547127862 - type: nauc_recall_at_1000_std value: 45.04037135476256 - type: nauc_recall_at_100_diff1 value: 42.70597486048432 - type: nauc_recall_at_100_max value: 65.74628759606213 - type: nauc_recall_at_100_std value: 25.491577452448727 - type: nauc_recall_at_10_diff1 value: 38.56560993168935 - type: nauc_recall_at_10_max value: 50.02396961808522 - type: nauc_recall_at_10_std value: 3.9763548295040314 - type: nauc_recall_at_1_diff1 value: 46.2178975214499 - type: nauc_recall_at_1_max value: 36.26173199049361 - type: nauc_recall_at_1_std value: -3.0897555582816443 - type: nauc_recall_at_20_diff1 value: 40.41347185668637 - type: nauc_recall_at_20_max value: 57.12177810866533 - type: nauc_recall_at_20_std value: 11.460219754285431 - type: nauc_recall_at_3_diff1 value: 40.90538379461527 - type: nauc_recall_at_3_max value: 42.18393248057989 - type: nauc_recall_at_3_std value: -3.005249943837297 - type: nauc_recall_at_5_diff1 value: 39.601629658607784 - type: nauc_recall_at_5_max value: 43.28317158174053 - type: nauc_recall_at_5_std value: -2.3469094487738054 - type: ndcg_at_1 value: 37.047000000000004 - type: ndcg_at_10 value: 54.284 - type: ndcg_at_100 value: 58.34 - type: ndcg_at_1000 value: 59.303 - type: ndcg_at_20 value: 56.235 - type: ndcg_at_3 value: 48.503 - type: ndcg_at_5 value: 51.686 - type: precision_at_1 value: 37.047000000000004 - type: precision_at_10 value: 7.237 - type: precision_at_100 value: 0.914 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.005 - type: precision_at_3 value: 18.898 - type: precision_at_5 value: 12.884 - type: recall_at_1 value: 37.047000000000004 - type: recall_at_10 value: 72.366 - type: recall_at_100 value: 91.408 - type: recall_at_1000 value: 99.136 - type: recall_at_20 value: 80.095 - type: recall_at_3 value: 56.693000000000005 - type: recall_at_5 value: 64.42099999999999 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 89.49253731343283 - type: ap value: 61.88098616359918 - type: ap_weighted value: 61.88098616359918 - type: f1 value: 84.76516623679144 - type: f1_weighted value: 89.92745276292968 - type: main_score value: 89.49253731343283 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 89.61456102783727 - type: ap value: 93.11816566733742 - type: ap_weighted value: 93.11816566733742 - type: f1 value: 88.27635757733722 - type: f1_weighted value: 89.82581568285453 - type: main_score value: 89.61456102783727 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 95.3825 - type: ap value: 93.393033869502 - type: ap_weighted value: 93.393033869502 - type: f1 value: 95.38109007966307 - type: f1_weighted value: 95.38109007966305 - type: main_score value: 95.3825 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.768 - type: f1 value: 48.95084821944411 - type: f1_weighted value: 48.9508482194441 - type: main_score value: 49.768 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.071999999999996 - type: f1 value: 47.24171107487612 - type: f1_weighted value: 47.24171107487612 - type: main_score value: 48.071999999999996 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.102000000000004 - type: f1 value: 47.27193805278696 - type: f1_weighted value: 47.27193805278696 - type: main_score value: 48.102000000000004 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.30800000000001 - type: f1 value: 46.41683358017851 - type: f1_weighted value: 46.41683358017851 - type: main_score value: 47.30800000000001 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.944 - type: f1 value: 44.223824487744395 - type: f1_weighted value: 44.22382448774439 - type: main_score value: 44.944 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 29.232000000000003 - type: map_at_10 value: 45.117000000000004 - type: map_at_100 value: 45.977000000000004 - type: map_at_1000 value: 45.98 - type: map_at_20 value: 45.815 - type: map_at_3 value: 39.912 - type: map_at_5 value: 42.693 - type: mrr_at_1 value: 29.659000000000002 - type: mrr_at_10 value: 45.253 - type: mrr_at_100 value: 46.125 - type: mrr_at_1000 value: 46.129 - type: mrr_at_20 value: 45.964 - type: mrr_at_3 value: 40.043 - type: mrr_at_5 value: 42.870000000000005 - type: ndcg_at_1 value: 29.232000000000003 - type: ndcg_at_10 value: 54.327999999999996 - type: ndcg_at_100 value: 57.86 - type: ndcg_at_1000 value: 57.935 - type: ndcg_at_20 value: 56.794 - type: ndcg_at_3 value: 43.516 - type: ndcg_at_5 value: 48.512 - type: precision_at_1 value: 29.232000000000003 - type: precision_at_10 value: 8.393 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.676 - type: precision_at_3 value: 17.994 - type: precision_at_5 value: 13.215 - type: recall_at_1 value: 29.232000000000003 - type: recall_at_10 value: 83.926 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 93.528 - type: recall_at_3 value: 53.983000000000004 - type: recall_at_5 value: 66.074 - type: main_score value: 54.327999999999996 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 46.6636824632419 - type: v_measure value: 46.6636824632419 - type: v_measure_std value: 13.817129140714963 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 39.271141892800024 - type: v_measure value: 39.271141892800024 - type: v_measure_std value: 14.276782483454827 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 65.04363277324629 - type: mrr value: 78.2372598162072 - type: main_score value: 65.04363277324629 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.83 - type: main_score value: 30.83 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 88.80382082011027 - type: cosine_spearman value: 88.68876782169106 - type: euclidean_pearson value: 87.00802890147176 - type: euclidean_spearman value: 87.43211268192712 - type: main_score value: 88.68876782169106 - type: manhattan_pearson value: 87.14062537179474 - type: manhattan_spearman value: 87.59115245033443 - type: pearson value: 88.80382082011027 - type: spearman value: 88.68876782169106 - task: type: STS dataset: name: MTEB BQ (default) type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cosine_pearson value: 61.588006604878196 - type: cosine_spearman value: 63.20615427154465 - type: euclidean_pearson value: 61.818547092516496 - type: euclidean_spearman value: 63.21558009151778 - type: main_score value: 63.20615427154465 - type: manhattan_pearson value: 61.665588158487616 - type: manhattan_spearman value: 63.051544488238584 - type: pearson value: 61.588006604878196 - type: spearman value: 63.20615427154465 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval (default) type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: main_score value: 64.414 - type: map_at_1 value: 14.865 - type: map_at_10 value: 21.605 - type: map_at_100 value: 22.762 - type: map_at_1000 value: 22.854 - type: map_at_20 value: 22.259999999999998 - type: map_at_3 value: 20.119999999999997 - type: map_at_5 value: 20.931 - type: mrr_at_1 value: 14.864864864864865 - type: mrr_at_10 value: 21.605176605176606 - type: mrr_at_100 value: 22.7622306460065 - type: mrr_at_1000 value: 22.85383406410312 - type: mrr_at_20 value: 22.259528463088845 - type: mrr_at_3 value: 20.12012012012012 - type: mrr_at_5 value: 20.930930930930934 - type: nauc_map_at_1000_diff1 value: 17.486265968689338 - type: nauc_map_at_1000_max value: 22.736799291688836 - type: nauc_map_at_1000_std value: 9.831687441977147 - type: nauc_map_at_100_diff1 value: 17.50754492049086 - type: nauc_map_at_100_max value: 22.77693662806787 - type: nauc_map_at_100_std value: 9.853899509675395 - type: nauc_map_at_10_diff1 value: 17.42133968580952 - type: nauc_map_at_10_max value: 22.45861793882279 - type: nauc_map_at_10_std value: 8.964888472915938 - type: nauc_map_at_1_diff1 value: 19.433947086968093 - type: nauc_map_at_1_max value: 24.75657047550517 - type: nauc_map_at_1_std value: 15.122329157218505 - type: nauc_map_at_20_diff1 value: 17.429856756008785 - type: nauc_map_at_20_max value: 22.438850987431017 - type: nauc_map_at_20_std value: 9.172746012213558 - type: nauc_map_at_3_diff1 value: 18.218182689678475 - type: nauc_map_at_3_max value: 23.57169444088667 - type: nauc_map_at_3_std value: 10.464473559366356 - type: nauc_map_at_5_diff1 value: 18.6075342519133 - type: nauc_map_at_5_max value: 23.308845973576673 - type: nauc_map_at_5_std value: 9.364009996445652 - type: nauc_mrr_at_1000_diff1 value: 17.486265968689338 - type: nauc_mrr_at_1000_max value: 22.736799291688836 - type: nauc_mrr_at_1000_std value: 9.831687441977147 - type: nauc_mrr_at_100_diff1 value: 17.50754492049086 - type: nauc_mrr_at_100_max value: 22.77693662806787 - type: nauc_mrr_at_100_std value: 9.853899509675395 - type: nauc_mrr_at_10_diff1 value: 17.42133968580952 - type: nauc_mrr_at_10_max value: 22.45861793882279 - type: nauc_mrr_at_10_std value: 8.964888472915938 - type: nauc_mrr_at_1_diff1 value: 19.433947086968093 - type: nauc_mrr_at_1_max value: 24.75657047550517 - type: nauc_mrr_at_1_std value: 15.122329157218505 - type: nauc_mrr_at_20_diff1 value: 17.429856756008785 - type: nauc_mrr_at_20_max value: 22.438850987431017 - type: nauc_mrr_at_20_std value: 9.172746012213558 - type: nauc_mrr_at_3_diff1 value: 18.218182689678475 - type: nauc_mrr_at_3_max value: 23.57169444088667 - type: nauc_mrr_at_3_std value: 10.464473559366356 - type: nauc_mrr_at_5_diff1 value: 18.6075342519133 - type: nauc_mrr_at_5_max value: 23.308845973576673 - type: nauc_mrr_at_5_std value: 9.364009996445652 - type: nauc_ndcg_at_1000_diff1 value: 16.327871824135745 - type: nauc_ndcg_at_1000_max value: 23.308241052911495 - type: nauc_ndcg_at_1000_std value: 11.50905911184097 - type: nauc_ndcg_at_100_diff1 value: 16.676226744692773 - type: nauc_ndcg_at_100_max value: 24.323253721240974 - type: nauc_ndcg_at_100_std value: 11.952612443651557 - type: nauc_ndcg_at_10_diff1 value: 16.030325121764594 - type: nauc_ndcg_at_10_max value: 21.306799242079542 - type: nauc_ndcg_at_10_std value: 6.63359364302513 - type: nauc_ndcg_at_1_diff1 value: 19.433947086968093 - type: nauc_ndcg_at_1_max value: 24.75657047550517 - type: nauc_ndcg_at_1_std value: 15.122329157218505 - type: nauc_ndcg_at_20_diff1 value: 16.013173605999857 - type: nauc_ndcg_at_20_max value: 21.607217260736576 - type: nauc_ndcg_at_20_std value: 7.319482417138996 - type: nauc_ndcg_at_3_diff1 value: 17.97958548328493 - type: nauc_ndcg_at_3_max value: 23.58346522810145 - type: nauc_ndcg_at_3_std value: 9.392582854708314 - type: nauc_ndcg_at_5_diff1 value: 18.734733324685287 - type: nauc_ndcg_at_5_max value: 23.273244317623742 - type: nauc_ndcg_at_5_std value: 7.638611545253834 - type: nauc_precision_at_1000_diff1 value: 7.919843339380295 - type: nauc_precision_at_1000_max value: 31.575386234270486 - type: nauc_precision_at_1000_std value: 39.332224386769404 - type: nauc_precision_at_100_diff1 value: 15.018050960000052 - type: nauc_precision_at_100_max value: 34.98209513759861 - type: nauc_precision_at_100_std value: 26.970034484359022 - type: nauc_precision_at_10_diff1 value: 12.102191084210922 - type: nauc_precision_at_10_max value: 18.112541150340675 - type: nauc_precision_at_10_std value: 0.7358784689406018 - type: nauc_precision_at_1_diff1 value: 19.433947086968093 - type: nauc_precision_at_1_max value: 24.75657047550517 - type: nauc_precision_at_1_std value: 15.122329157218505 - type: nauc_precision_at_20_diff1 value: 12.018814361204328 - type: nauc_precision_at_20_max value: 19.75123746049928 - type: nauc_precision_at_20_std value: 3.012204650582264 - type: nauc_precision_at_3_diff1 value: 17.41375604940955 - type: nauc_precision_at_3_max value: 23.699834627021037 - type: nauc_precision_at_3_std value: 6.793486779050103 - type: nauc_precision_at_5_diff1 value: 19.194631963780257 - type: nauc_precision_at_5_max value: 23.31708702442155 - type: nauc_precision_at_5_std value: 3.4591358279667332 - type: nauc_recall_at_1000_diff1 value: 7.919843339380378 - type: nauc_recall_at_1000_max value: 31.57538623427063 - type: nauc_recall_at_1000_std value: 39.332224386769546 - type: nauc_recall_at_100_diff1 value: 15.018050960000085 - type: nauc_recall_at_100_max value: 34.9820951375986 - type: nauc_recall_at_100_std value: 26.97003448435901 - type: nauc_recall_at_10_diff1 value: 12.102191084210837 - type: nauc_recall_at_10_max value: 18.112541150340594 - type: nauc_recall_at_10_std value: 0.7358784689405188 - type: nauc_recall_at_1_diff1 value: 19.433947086968093 - type: nauc_recall_at_1_max value: 24.75657047550517 - type: nauc_recall_at_1_std value: 15.122329157218505 - type: nauc_recall_at_20_diff1 value: 12.01881436120429 - type: nauc_recall_at_20_max value: 19.751237460499222 - type: nauc_recall_at_20_std value: 3.0122046505822135 - type: nauc_recall_at_3_diff1 value: 17.413756049409503 - type: nauc_recall_at_3_max value: 23.699834627020998 - type: nauc_recall_at_3_std value: 6.793486779050083 - type: nauc_recall_at_5_diff1 value: 19.194631963780203 - type: nauc_recall_at_5_max value: 23.3170870244215 - type: nauc_recall_at_5_std value: 3.459135827966664 - type: ndcg_at_1 value: 14.865 - type: ndcg_at_10 value: 24.764 - type: ndcg_at_100 value: 30.861 - type: ndcg_at_1000 value: 33.628 - type: ndcg_at_20 value: 27.078000000000003 - type: ndcg_at_3 value: 21.675 - type: ndcg_at_5 value: 23.148 - type: precision_at_1 value: 14.865 - type: precision_at_10 value: 3.4680000000000004 - type: precision_at_100 value: 0.644 - type: precision_at_1000 value: 0.087 - type: precision_at_20 value: 2.185 - type: precision_at_3 value: 8.709 - type: precision_at_5 value: 5.946 - type: recall_at_1 value: 14.865 - type: recall_at_10 value: 34.685 - type: recall_at_100 value: 64.414 - type: recall_at_1000 value: 86.937 - type: recall_at_20 value: 43.694 - type: recall_at_3 value: 26.125999999999998 - type: recall_at_5 value: 29.73 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.08116883116882 - type: f1 value: 84.05587055990273 - type: f1_weighted value: 84.05587055990274 - type: main_score value: 84.08116883116882 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 38.1941007822277 - type: v_measure value: 38.1941007822277 - type: v_measure_std value: 0.7502113547288178 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 34.42075599178318 - type: v_measure value: 34.42075599178318 - type: v_measure_std value: 0.600256720497283 - task: type: Clustering dataset: name: MTEB BlurbsClusteringP2P (default) type: slvnwhrl/blurbs-clustering-p2p config: default split: test revision: a2dd5b02a77de3466a3eaa98ae586b5610314496 metrics: - type: main_score value: 41.634627363047265 - type: v_measure value: 41.634627363047265 - type: v_measure_std value: 9.726923191225307 - task: type: Clustering dataset: name: MTEB BlurbsClusteringS2S (default) type: slvnwhrl/blurbs-clustering-s2s config: default split: test revision: 22793b6a6465bf00120ad525e38c51210858132c metrics: - type: main_score value: 20.996468295584197 - type: v_measure value: 20.996468295584197 - type: v_measure_std value: 9.225766688272197 - task: type: Classification dataset: name: MTEB CBD (default) type: PL-MTEB/cbd config: default split: test revision: 36ddb419bcffe6a5374c3891957912892916f28d metrics: - type: accuracy value: 69.99 - type: ap value: 22.57826353116948 - type: ap_weighted value: 22.57826353116948 - type: f1 value: 59.04574955548393 - type: f1_weighted value: 74.36235022309789 - type: main_score value: 69.99 - task: type: PairClassification dataset: name: MTEB CDSC-E (default) type: PL-MTEB/cdsce-pairclassification config: default split: test revision: 0a3d4aa409b22f80eb22cbf59b492637637b536d metrics: - type: cosine_accuracy value: 88.7 - type: cosine_accuracy_threshold value: 97.37848043441772 - type: cosine_ap value: 73.0405088928302 - type: cosine_f1 value: 63.52201257861635 - type: cosine_f1_threshold value: 96.98888063430786 - type: cosine_precision value: 78.90625 - type: cosine_recall value: 53.1578947368421 - type: dot_accuracy value: 84.89999999999999 - type: dot_accuracy_threshold value: 43603.09753417969 - type: dot_ap value: 56.98157569085279 - type: dot_f1 value: 57.606490872210955 - type: dot_f1_threshold value: 40406.23779296875 - type: dot_precision value: 46.864686468646866 - type: dot_recall value: 74.73684210526315 - type: euclidean_accuracy value: 88.5 - type: euclidean_accuracy_threshold value: 498.0483055114746 - type: euclidean_ap value: 72.97328234816734 - type: euclidean_f1 value: 63.722397476340696 - type: euclidean_f1_threshold value: 508.6186408996582 - type: euclidean_precision value: 79.52755905511812 - type: euclidean_recall value: 53.1578947368421 - type: main_score value: 73.0405088928302 - type: manhattan_accuracy value: 88.6 - type: manhattan_accuracy_threshold value: 12233.079528808594 - type: manhattan_ap value: 72.92148503992615 - type: manhattan_f1 value: 63.69426751592356 - type: manhattan_f1_threshold value: 12392.754364013672 - type: manhattan_precision value: 80.64516129032258 - type: manhattan_recall value: 52.63157894736842 - type: max_accuracy value: 88.7 - type: max_ap value: 73.0405088928302 - type: max_f1 value: 63.722397476340696 - type: max_precision value: 80.64516129032258 - type: max_recall value: 74.73684210526315 - type: similarity_accuracy value: 88.7 - type: similarity_accuracy_threshold value: 97.37848043441772 - type: similarity_ap value: 73.0405088928302 - type: similarity_f1 value: 63.52201257861635 - type: similarity_f1_threshold value: 96.98888063430786 - type: similarity_precision value: 78.90625 - type: similarity_recall value: 53.1578947368421 - task: type: STS dataset: name: MTEB CDSC-R (default) type: PL-MTEB/cdscr-sts config: default split: test revision: 1cd6abbb00df7d14be3dbd76a7dcc64b3a79a7cd metrics: - type: cosine_pearson value: 92.97492495289738 - type: cosine_spearman value: 92.63248098608472 - type: euclidean_pearson value: 92.04712487782031 - type: euclidean_spearman value: 92.19679486755008 - type: main_score value: 92.63248098608472 - type: manhattan_pearson value: 92.0101187740438 - type: manhattan_spearman value: 92.20926859332754 - type: pearson value: 92.97492495289738 - type: spearman value: 92.63248098608472 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P (default) type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: main_score value: 39.96377851800628 - type: v_measure value: 39.96377851800628 - type: v_measure_std value: 0.9793033243093288 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S (default) type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: main_score value: 38.788850224595784 - type: v_measure value: 38.788850224595784 - type: v_measure_std value: 1.0712604145916924 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 77.95952507806115 - type: mrr value: 80.8643253968254 - type: main_score value: 77.95952507806115 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 78.21522500165045 - type: mrr value: 81.28194444444443 - type: main_score value: 78.21522500165045 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 33.377 - type: map_at_10 value: 46.371 - type: map_at_100 value: 47.829 - type: map_at_1000 value: 47.94 - type: map_at_20 value: 47.205000000000005 - type: map_at_3 value: 42.782 - type: map_at_5 value: 44.86 - type: mrr_at_1 value: 41.345 - type: mrr_at_10 value: 52.187 - type: mrr_at_100 value: 52.893 - type: mrr_at_1000 value: 52.929 - type: mrr_at_20 value: 52.637 - type: mrr_at_3 value: 49.714000000000006 - type: mrr_at_5 value: 51.373000000000005 - type: ndcg_at_1 value: 41.345 - type: ndcg_at_10 value: 52.946000000000005 - type: ndcg_at_100 value: 57.92699999999999 - type: ndcg_at_1000 value: 59.609 - type: ndcg_at_20 value: 54.900999999999996 - type: ndcg_at_3 value: 48.357 - type: ndcg_at_5 value: 50.739000000000004 - type: precision_at_1 value: 41.345 - type: precision_at_10 value: 10.186 - type: precision_at_100 value: 1.554 - type: precision_at_1000 value: 0.2 - type: precision_at_20 value: 5.959 - type: precision_at_3 value: 23.796 - type: precision_at_5 value: 17.024 - type: recall_at_1 value: 33.377 - type: recall_at_10 value: 65.067 - type: recall_at_100 value: 86.04899999999999 - type: recall_at_1000 value: 96.54899999999999 - type: recall_at_20 value: 72.071 - type: recall_at_3 value: 51.349999999999994 - type: recall_at_5 value: 58.41 - type: main_score value: 52.946000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 31.097 - type: map_at_10 value: 42.183 - type: map_at_100 value: 43.580999999999996 - type: map_at_1000 value: 43.718 - type: map_at_20 value: 42.921 - type: map_at_3 value: 38.963 - type: map_at_5 value: 40.815 - type: mrr_at_1 value: 39.745000000000005 - type: mrr_at_10 value: 48.736000000000004 - type: mrr_at_100 value: 49.405 - type: mrr_at_1000 value: 49.452 - type: mrr_at_20 value: 49.118 - type: mrr_at_3 value: 46.497 - type: mrr_at_5 value: 47.827999999999996 - type: ndcg_at_1 value: 39.745000000000005 - type: ndcg_at_10 value: 48.248000000000005 - type: ndcg_at_100 value: 52.956 - type: ndcg_at_1000 value: 54.99699999999999 - type: ndcg_at_20 value: 50.01 - type: ndcg_at_3 value: 43.946000000000005 - type: ndcg_at_5 value: 46.038000000000004 - type: precision_at_1 value: 39.745000000000005 - type: precision_at_10 value: 9.229 - type: precision_at_100 value: 1.5070000000000001 - type: precision_at_1000 value: 0.199 - type: precision_at_20 value: 5.489999999999999 - type: precision_at_3 value: 21.38 - type: precision_at_5 value: 15.274 - type: recall_at_1 value: 31.097 - type: recall_at_10 value: 58.617 - type: recall_at_100 value: 78.55199999999999 - type: recall_at_1000 value: 91.13900000000001 - type: recall_at_20 value: 64.92 - type: recall_at_3 value: 45.672000000000004 - type: recall_at_5 value: 51.669 - type: main_score value: 48.248000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.745000000000005 - type: map_at_10 value: 52.063 - type: map_at_100 value: 53.077 - type: map_at_1000 value: 53.13 - type: map_at_20 value: 52.66 - type: map_at_3 value: 48.662 - type: map_at_5 value: 50.507000000000005 - type: mrr_at_1 value: 45.391999999999996 - type: mrr_at_10 value: 55.528 - type: mrr_at_100 value: 56.16100000000001 - type: mrr_at_1000 value: 56.192 - type: mrr_at_20 value: 55.923 - type: mrr_at_3 value: 52.93600000000001 - type: mrr_at_5 value: 54.435 - type: ndcg_at_1 value: 45.391999999999996 - type: ndcg_at_10 value: 58.019 - type: ndcg_at_100 value: 61.936 - type: ndcg_at_1000 value: 63.015 - type: ndcg_at_20 value: 59.691 - type: ndcg_at_3 value: 52.294 - type: ndcg_at_5 value: 55.017 - type: precision_at_1 value: 45.391999999999996 - type: precision_at_10 value: 9.386 - type: precision_at_100 value: 1.232 - type: precision_at_1000 value: 0.136 - type: precision_at_20 value: 5.223 - type: precision_at_3 value: 23.177 - type: precision_at_5 value: 15.9 - type: recall_at_1 value: 39.745000000000005 - type: recall_at_10 value: 72.08099999999999 - type: recall_at_100 value: 88.85300000000001 - type: recall_at_1000 value: 96.569 - type: recall_at_20 value: 78.203 - type: recall_at_3 value: 56.957 - type: recall_at_5 value: 63.63100000000001 - type: main_score value: 58.019 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 26.651999999999997 - type: map_at_10 value: 35.799 - type: map_at_100 value: 36.846000000000004 - type: map_at_1000 value: 36.931000000000004 - type: map_at_20 value: 36.341 - type: map_at_3 value: 32.999 - type: map_at_5 value: 34.597 - type: mrr_at_1 value: 28.814 - type: mrr_at_10 value: 37.869 - type: mrr_at_100 value: 38.728 - type: mrr_at_1000 value: 38.795 - type: mrr_at_20 value: 38.317 - type: mrr_at_3 value: 35.235 - type: mrr_at_5 value: 36.738 - type: ndcg_at_1 value: 28.814 - type: ndcg_at_10 value: 41.028 - type: ndcg_at_100 value: 46.162 - type: ndcg_at_1000 value: 48.15 - type: ndcg_at_20 value: 42.824 - type: ndcg_at_3 value: 35.621 - type: ndcg_at_5 value: 38.277 - type: precision_at_1 value: 28.814 - type: precision_at_10 value: 6.361999999999999 - type: precision_at_100 value: 0.9450000000000001 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_20 value: 3.6159999999999997 - type: precision_at_3 value: 15.140999999999998 - type: precision_at_5 value: 10.712000000000002 - type: recall_at_1 value: 26.651999999999997 - type: recall_at_10 value: 55.038 - type: recall_at_100 value: 78.806 - type: recall_at_1000 value: 93.485 - type: recall_at_20 value: 61.742 - type: recall_at_3 value: 40.682 - type: recall_at_5 value: 46.855000000000004 - type: main_score value: 41.028 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 17.627000000000002 - type: map_at_10 value: 26.436999999999998 - type: map_at_100 value: 27.85 - type: map_at_1000 value: 27.955999999999996 - type: map_at_20 value: 27.233 - type: map_at_3 value: 23.777 - type: map_at_5 value: 25.122 - type: mrr_at_1 value: 22.387999999999998 - type: mrr_at_10 value: 31.589 - type: mrr_at_100 value: 32.641999999999996 - type: mrr_at_1000 value: 32.696999999999996 - type: mrr_at_20 value: 32.201 - type: mrr_at_3 value: 28.98 - type: mrr_at_5 value: 30.342000000000002 - type: ndcg_at_1 value: 22.387999999999998 - type: ndcg_at_10 value: 32.129999999999995 - type: ndcg_at_100 value: 38.562999999999995 - type: ndcg_at_1000 value: 40.903 - type: ndcg_at_20 value: 34.652 - type: ndcg_at_3 value: 27.26 - type: ndcg_at_5 value: 29.235 - type: precision_at_1 value: 22.387999999999998 - type: precision_at_10 value: 5.970000000000001 - type: precision_at_100 value: 1.068 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_20 value: 3.6999999999999997 - type: precision_at_3 value: 13.267000000000001 - type: precision_at_5 value: 9.403 - type: recall_at_1 value: 17.627000000000002 - type: recall_at_10 value: 44.71 - type: recall_at_100 value: 72.426 - type: recall_at_1000 value: 88.64699999999999 - type: recall_at_20 value: 53.65 - type: recall_at_3 value: 30.989 - type: recall_at_5 value: 36.237 - type: main_score value: 32.129999999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 30.891000000000002 - type: map_at_10 value: 41.519 - type: map_at_100 value: 42.896 - type: map_at_1000 value: 42.992999999999995 - type: map_at_20 value: 42.287 - type: map_at_3 value: 37.822 - type: map_at_5 value: 39.976 - type: mrr_at_1 value: 37.921 - type: mrr_at_10 value: 47.260999999999996 - type: mrr_at_100 value: 48.044 - type: mrr_at_1000 value: 48.08 - type: mrr_at_20 value: 47.699999999999996 - type: mrr_at_3 value: 44.513999999999996 - type: mrr_at_5 value: 46.064 - type: ndcg_at_1 value: 37.921 - type: ndcg_at_10 value: 47.806 - type: ndcg_at_100 value: 53.274 - type: ndcg_at_1000 value: 55.021 - type: ndcg_at_20 value: 49.973 - type: ndcg_at_3 value: 42.046 - type: ndcg_at_5 value: 44.835 - type: precision_at_1 value: 37.921 - type: precision_at_10 value: 8.767999999999999 - type: precision_at_100 value: 1.353 - type: precision_at_1000 value: 0.168 - type: precision_at_20 value: 5.135 - type: precision_at_3 value: 20.051 - type: precision_at_5 value: 14.398 - type: recall_at_1 value: 30.891000000000002 - type: recall_at_10 value: 60.897999999999996 - type: recall_at_100 value: 83.541 - type: recall_at_1000 value: 94.825 - type: recall_at_20 value: 68.356 - type: recall_at_3 value: 44.65 - type: recall_at_5 value: 51.919000000000004 - type: main_score value: 47.806 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 27.654 - type: map_at_10 value: 38.025999999999996 - type: map_at_100 value: 39.425 - type: map_at_1000 value: 39.528 - type: map_at_20 value: 38.838 - type: map_at_3 value: 34.745 - type: map_at_5 value: 36.537 - type: mrr_at_1 value: 34.018 - type: mrr_at_10 value: 43.314 - type: mrr_at_100 value: 44.283 - type: mrr_at_1000 value: 44.327 - type: mrr_at_20 value: 43.929 - type: mrr_at_3 value: 40.868 - type: mrr_at_5 value: 42.317 - type: ndcg_at_1 value: 34.018 - type: ndcg_at_10 value: 43.887 - type: ndcg_at_100 value: 49.791000000000004 - type: ndcg_at_1000 value: 51.834 - type: ndcg_at_20 value: 46.376 - type: ndcg_at_3 value: 38.769999999999996 - type: ndcg_at_5 value: 41.144 - type: precision_at_1 value: 34.018 - type: precision_at_10 value: 8.001999999999999 - type: precision_at_100 value: 1.2630000000000001 - type: precision_at_1000 value: 0.16 - type: precision_at_20 value: 4.737 - type: precision_at_3 value: 18.417 - type: precision_at_5 value: 13.150999999999998 - type: recall_at_1 value: 27.654 - type: recall_at_10 value: 56.111 - type: recall_at_100 value: 81.136 - type: recall_at_1000 value: 94.788 - type: recall_at_20 value: 65.068 - type: recall_at_3 value: 41.713 - type: recall_at_5 value: 48.106 - type: main_score value: 43.887 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 42.58858333333333 - type: ndcg_at_10 value: 42.58858333333333 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.501 - type: map_at_10 value: 32.814 - type: map_at_100 value: 33.754 - type: map_at_1000 value: 33.859 - type: map_at_20 value: 33.324 - type: map_at_3 value: 30.758000000000003 - type: map_at_5 value: 31.936999999999998 - type: mrr_at_1 value: 27.761000000000003 - type: mrr_at_10 value: 35.662 - type: mrr_at_100 value: 36.443999999999996 - type: mrr_at_1000 value: 36.516999999999996 - type: mrr_at_20 value: 36.085 - type: mrr_at_3 value: 33.742 - type: mrr_at_5 value: 34.931 - type: ndcg_at_1 value: 27.761000000000003 - type: ndcg_at_10 value: 37.208000000000006 - type: ndcg_at_100 value: 41.839 - type: ndcg_at_1000 value: 44.421 - type: ndcg_at_20 value: 38.917 - type: ndcg_at_3 value: 33.544000000000004 - type: ndcg_at_5 value: 35.374 - type: precision_at_1 value: 27.761000000000003 - type: precision_at_10 value: 5.92 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.12 - type: precision_at_20 value: 3.4130000000000003 - type: precision_at_3 value: 15.031 - type: precision_at_5 value: 10.306999999999999 - type: recall_at_1 value: 24.501 - type: recall_at_10 value: 47.579 - type: recall_at_100 value: 69.045 - type: recall_at_1000 value: 88.032 - type: recall_at_20 value: 54.125 - type: recall_at_3 value: 37.202 - type: recall_at_5 value: 41.927 - type: main_score value: 37.208000000000006 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.29 - type: map_at_10 value: 26.183 - type: map_at_100 value: 27.351999999999997 - type: map_at_1000 value: 27.483999999999998 - type: map_at_20 value: 26.798 - type: map_at_3 value: 23.629 - type: map_at_5 value: 24.937 - type: mrr_at_1 value: 22.299 - type: mrr_at_10 value: 30.189 - type: mrr_at_100 value: 31.098 - type: mrr_at_1000 value: 31.177 - type: mrr_at_20 value: 30.697000000000003 - type: mrr_at_3 value: 27.862 - type: mrr_at_5 value: 29.066 - type: ndcg_at_1 value: 22.299 - type: ndcg_at_10 value: 31.202 - type: ndcg_at_100 value: 36.617 - type: ndcg_at_1000 value: 39.544000000000004 - type: ndcg_at_20 value: 33.177 - type: ndcg_at_3 value: 26.639000000000003 - type: ndcg_at_5 value: 28.526 - type: precision_at_1 value: 22.299 - type: precision_at_10 value: 5.8020000000000005 - type: precision_at_100 value: 1.0070000000000001 - type: precision_at_1000 value: 0.14400000000000002 - type: precision_at_20 value: 3.505 - type: precision_at_3 value: 12.698 - type: precision_at_5 value: 9.174 - type: recall_at_1 value: 18.29 - type: recall_at_10 value: 42.254999999999995 - type: recall_at_100 value: 66.60000000000001 - type: recall_at_1000 value: 87.31400000000001 - type: recall_at_20 value: 49.572 - type: recall_at_3 value: 29.342000000000002 - type: recall_at_5 value: 34.221000000000004 - type: main_score value: 31.202 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 27.722 - type: map_at_10 value: 37.698 - type: map_at_100 value: 38.899 - type: map_at_1000 value: 38.998 - type: map_at_20 value: 38.381 - type: map_at_3 value: 34.244 - type: map_at_5 value: 36.295 - type: mrr_at_1 value: 32.183 - type: mrr_at_10 value: 41.429 - type: mrr_at_100 value: 42.308 - type: mrr_at_1000 value: 42.358000000000004 - type: mrr_at_20 value: 41.957 - type: mrr_at_3 value: 38.401999999999994 - type: mrr_at_5 value: 40.294999999999995 - type: ndcg_at_1 value: 32.183 - type: ndcg_at_10 value: 43.519000000000005 - type: ndcg_at_100 value: 48.786 - type: ndcg_at_1000 value: 50.861999999999995 - type: ndcg_at_20 value: 45.654 - type: ndcg_at_3 value: 37.521 - type: ndcg_at_5 value: 40.615 - type: precision_at_1 value: 32.183 - type: precision_at_10 value: 7.603 - type: precision_at_100 value: 1.135 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_20 value: 4.408 - type: precision_at_3 value: 17.071 - type: precision_at_5 value: 12.668 - type: recall_at_1 value: 27.722 - type: recall_at_10 value: 57.230000000000004 - type: recall_at_100 value: 79.97999999999999 - type: recall_at_1000 value: 94.217 - type: recall_at_20 value: 64.864 - type: recall_at_3 value: 41.215 - type: recall_at_5 value: 48.774 - type: main_score value: 43.519000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 25.852999999999998 - type: map_at_10 value: 35.394999999999996 - type: map_at_100 value: 37.291999999999994 - type: map_at_1000 value: 37.495 - type: map_at_20 value: 36.372 - type: map_at_3 value: 32.336 - type: map_at_5 value: 34.159 - type: mrr_at_1 value: 31.818 - type: mrr_at_10 value: 40.677 - type: mrr_at_100 value: 41.728 - type: mrr_at_1000 value: 41.778 - type: mrr_at_20 value: 41.301 - type: mrr_at_3 value: 38.208 - type: mrr_at_5 value: 39.592 - type: ndcg_at_1 value: 31.818 - type: ndcg_at_10 value: 41.559000000000005 - type: ndcg_at_100 value: 48.012 - type: ndcg_at_1000 value: 50.234 - type: ndcg_at_20 value: 44.15 - type: ndcg_at_3 value: 36.918 - type: ndcg_at_5 value: 39.227000000000004 - type: precision_at_1 value: 31.818 - type: precision_at_10 value: 8.043 - type: precision_at_100 value: 1.625 - type: precision_at_1000 value: 0.245 - type: precision_at_20 value: 5.2170000000000005 - type: precision_at_3 value: 17.655 - type: precision_at_5 value: 12.845999999999998 - type: recall_at_1 value: 25.852999999999998 - type: recall_at_10 value: 53.093 - type: recall_at_100 value: 81.05799999999999 - type: recall_at_1000 value: 94.657 - type: recall_at_20 value: 62.748000000000005 - type: recall_at_3 value: 39.300000000000004 - type: recall_at_5 value: 45.754 - type: main_score value: 41.559000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 19.23 - type: map_at_10 value: 28.128999999999998 - type: map_at_100 value: 29.195 - type: map_at_1000 value: 29.310000000000002 - type: map_at_20 value: 28.713 - type: map_at_3 value: 25.191000000000003 - type: map_at_5 value: 26.69 - type: mrr_at_1 value: 21.257 - type: mrr_at_10 value: 30.253999999999998 - type: mrr_at_100 value: 31.195 - type: mrr_at_1000 value: 31.270999999999997 - type: mrr_at_20 value: 30.747999999999998 - type: mrr_at_3 value: 27.633999999999997 - type: mrr_at_5 value: 28.937 - type: ndcg_at_1 value: 21.257 - type: ndcg_at_10 value: 33.511 - type: ndcg_at_100 value: 38.733000000000004 - type: ndcg_at_1000 value: 41.489 - type: ndcg_at_20 value: 35.476 - type: ndcg_at_3 value: 27.845 - type: ndcg_at_5 value: 30.264999999999997 - type: precision_at_1 value: 21.257 - type: precision_at_10 value: 5.619 - type: precision_at_100 value: 0.893 - type: precision_at_1000 value: 0.124 - type: precision_at_20 value: 3.29 - type: precision_at_3 value: 12.508 - type: precision_at_5 value: 8.946 - type: recall_at_1 value: 19.23 - type: recall_at_10 value: 48.185 - type: recall_at_100 value: 71.932 - type: recall_at_1000 value: 92.587 - type: recall_at_20 value: 55.533 - type: recall_at_3 value: 32.865 - type: recall_at_5 value: 38.577 - type: main_score value: 33.511 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 19.594 - type: map_at_10 value: 32.519 - type: map_at_100 value: 34.1 - type: map_at_1000 value: 34.263 - type: map_at_20 value: 33.353 - type: map_at_3 value: 27.898 - type: map_at_5 value: 30.524 - type: mrr_at_1 value: 46.515 - type: mrr_at_10 value: 56.958 - type: mrr_at_100 value: 57.54899999999999 - type: mrr_at_1000 value: 57.574999999999996 - type: mrr_at_20 value: 57.315000000000005 - type: mrr_at_3 value: 54.852999999999994 - type: mrr_at_5 value: 56.153 - type: ndcg_at_1 value: 46.515 - type: ndcg_at_10 value: 42.363 - type: ndcg_at_100 value: 48.233 - type: ndcg_at_1000 value: 50.993 - type: ndcg_at_20 value: 44.533 - type: ndcg_at_3 value: 37.297000000000004 - type: ndcg_at_5 value: 38.911 - type: precision_at_1 value: 46.515 - type: precision_at_10 value: 12.520999999999999 - type: precision_at_100 value: 1.8980000000000001 - type: precision_at_1000 value: 0.242 - type: precision_at_20 value: 7.212000000000001 - type: precision_at_3 value: 27.752 - type: precision_at_5 value: 20.391000000000002 - type: recall_at_1 value: 19.594 - type: recall_at_10 value: 46.539 - type: recall_at_100 value: 66.782 - type: recall_at_1000 value: 82.049 - type: recall_at_20 value: 52.611 - type: recall_at_3 value: 32.528 - type: recall_at_5 value: 38.933 - type: main_score value: 42.363 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval (default) type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: main_score value: 35.927 - type: map_at_1 value: 20.144000000000002 - type: map_at_10 value: 29.94 - type: map_at_100 value: 31.630000000000003 - type: map_at_1000 value: 31.778000000000002 - type: map_at_20 value: 30.798 - type: map_at_3 value: 26.534999999999997 - type: map_at_5 value: 28.33 - type: mrr_at_1 value: 31.23280820205051 - type: mrr_at_10 value: 38.66781179421835 - type: mrr_at_100 value: 39.656936166081785 - type: mrr_at_1000 value: 39.724602893117414 - type: mrr_at_20 value: 39.21272461558451 - type: mrr_at_3 value: 36.30907726931729 - type: mrr_at_5 value: 37.59814953738436 - type: nauc_map_at_1000_diff1 value: 44.5755334437146 - type: nauc_map_at_1000_max value: 40.726916781400746 - type: nauc_map_at_1000_std value: -19.591835061497367 - type: nauc_map_at_100_diff1 value: 44.54542899921038 - type: nauc_map_at_100_max value: 40.68305902532837 - type: nauc_map_at_100_std value: -19.658902089283487 - type: nauc_map_at_10_diff1 value: 44.56110529630953 - type: nauc_map_at_10_max value: 39.89826167846008 - type: nauc_map_at_10_std value: -20.62910633667902 - type: nauc_map_at_1_diff1 value: 50.82120107004449 - type: nauc_map_at_1_max value: 33.208851367861584 - type: nauc_map_at_1_std value: -20.29409730258174 - type: nauc_map_at_20_diff1 value: 44.51171242433788 - type: nauc_map_at_20_max value: 40.30431132782945 - type: nauc_map_at_20_std value: -20.290524142792417 - type: nauc_map_at_3_diff1 value: 45.80394138665133 - type: nauc_map_at_3_max value: 37.766191281426956 - type: nauc_map_at_3_std value: -21.223601997333876 - type: nauc_map_at_5_diff1 value: 45.00457218474283 - type: nauc_map_at_5_max value: 38.901044576388365 - type: nauc_map_at_5_std value: -20.893069613941634 - type: nauc_mrr_at_1000_diff1 value: 50.09855359231429 - type: nauc_mrr_at_1000_max value: 46.481000170008826 - type: nauc_mrr_at_1000_std value: -16.053461377096102 - type: nauc_mrr_at_100_diff1 value: 50.08205026347746 - type: nauc_mrr_at_100_max value: 46.47262126963331 - type: nauc_mrr_at_100_std value: -16.049112778748693 - type: nauc_mrr_at_10_diff1 value: 50.02363239081706 - type: nauc_mrr_at_10_max value: 46.39287859062042 - type: nauc_mrr_at_10_std value: -16.280866744769657 - type: nauc_mrr_at_1_diff1 value: 55.692503735317445 - type: nauc_mrr_at_1_max value: 47.334834529801014 - type: nauc_mrr_at_1_std value: -16.985483585693512 - type: nauc_mrr_at_20_diff1 value: 50.07725225722074 - type: nauc_mrr_at_20_max value: 46.47279295070193 - type: nauc_mrr_at_20_std value: -16.15168364678318 - type: nauc_mrr_at_3_diff1 value: 51.18685337274134 - type: nauc_mrr_at_3_max value: 46.7286365021621 - type: nauc_mrr_at_3_std value: -16.708451287313718 - type: nauc_mrr_at_5_diff1 value: 50.46777237893576 - type: nauc_mrr_at_5_max value: 46.5352076502249 - type: nauc_mrr_at_5_std value: -16.557413659905034 - type: nauc_ndcg_at_1000_diff1 value: 43.974299434438066 - type: nauc_ndcg_at_1000_max value: 43.44628675071857 - type: nauc_ndcg_at_1000_std value: -15.3495102005021 - type: nauc_ndcg_at_100_diff1 value: 43.336365081508504 - type: nauc_ndcg_at_100_max value: 43.11345604460776 - type: nauc_ndcg_at_100_std value: -15.571128070860615 - type: nauc_ndcg_at_10_diff1 value: 43.41266214720136 - type: nauc_ndcg_at_10_max value: 41.519676787851914 - type: nauc_ndcg_at_10_std value: -19.217175017223568 - type: nauc_ndcg_at_1_diff1 value: 55.692503735317445 - type: nauc_ndcg_at_1_max value: 47.334834529801014 - type: nauc_ndcg_at_1_std value: -16.985483585693512 - type: nauc_ndcg_at_20_diff1 value: 43.351653862834496 - type: nauc_ndcg_at_20_max value: 42.11608469750499 - type: nauc_ndcg_at_20_std value: -18.485363540641664 - type: nauc_ndcg_at_3_diff1 value: 45.64193888236677 - type: nauc_ndcg_at_3_max value: 42.497135099009995 - type: nauc_ndcg_at_3_std value: -18.764012041130094 - type: nauc_ndcg_at_5_diff1 value: 44.523392133895186 - type: nauc_ndcg_at_5_max value: 41.564242030096345 - type: nauc_ndcg_at_5_std value: -19.31080790984941 - type: nauc_precision_at_1000_diff1 value: 6.383464615714393 - type: nauc_precision_at_1000_max value: 27.439930931284657 - type: nauc_precision_at_1000_std value: 19.070716188143034 - type: nauc_precision_at_100_diff1 value: 12.599136754501284 - type: nauc_precision_at_100_max value: 35.886310962337795 - type: nauc_precision_at_100_std value: 14.06587592659196 - type: nauc_precision_at_10_diff1 value: 25.388891173150206 - type: nauc_precision_at_10_max value: 46.10269270777384 - type: nauc_precision_at_10_std value: -5.993803607158499 - type: nauc_precision_at_1_diff1 value: 55.692503735317445 - type: nauc_precision_at_1_max value: 47.334834529801014 - type: nauc_precision_at_1_std value: -16.985483585693512 - type: nauc_precision_at_20_diff1 value: 20.984013463099707 - type: nauc_precision_at_20_max value: 42.9471854616888 - type: nauc_precision_at_20_std value: -0.8045549929346024 - type: nauc_precision_at_3_diff1 value: 36.191850547148356 - type: nauc_precision_at_3_max value: 48.09923832376049 - type: nauc_precision_at_3_std value: -13.159407051271321 - type: nauc_precision_at_5_diff1 value: 31.04967966700407 - type: nauc_precision_at_5_max value: 47.62867673349624 - type: nauc_precision_at_5_std value: -10.345790325137353 - type: nauc_recall_at_1000_diff1 value: 11.03436839065707 - type: nauc_recall_at_1000_max value: 42.32265076651575 - type: nauc_recall_at_1000_std value: 30.478521053399206 - type: nauc_recall_at_100_diff1 value: 24.788349084510806 - type: nauc_recall_at_100_max value: 36.72097184821956 - type: nauc_recall_at_100_std value: -0.2241144179522076 - type: nauc_recall_at_10_diff1 value: 31.613053567704885 - type: nauc_recall_at_10_max value: 34.4597322828833 - type: nauc_recall_at_10_std value: -18.00022912690819 - type: nauc_recall_at_1_diff1 value: 50.82120107004449 - type: nauc_recall_at_1_max value: 33.208851367861584 - type: nauc_recall_at_1_std value: -20.29409730258174 - type: nauc_recall_at_20_diff1 value: 30.277002670708384 - type: nauc_recall_at_20_max value: 35.212475675060375 - type: nauc_recall_at_20_std value: -15.822788854733687 - type: nauc_recall_at_3_diff1 value: 38.87844958322257 - type: nauc_recall_at_3_max value: 34.66914910044104 - type: nauc_recall_at_3_std value: -20.234707300209127 - type: nauc_recall_at_5_diff1 value: 35.551139991687776 - type: nauc_recall_at_5_max value: 34.61009958820695 - type: nauc_recall_at_5_std value: -19.519180149293444 - type: ndcg_at_1 value: 31.233 - type: ndcg_at_10 value: 35.927 - type: ndcg_at_100 value: 43.037 - type: ndcg_at_1000 value: 45.900999999999996 - type: ndcg_at_20 value: 38.39 - type: ndcg_at_3 value: 31.366 - type: ndcg_at_5 value: 33.108 - type: precision_at_1 value: 31.233 - type: precision_at_10 value: 8.15 - type: precision_at_100 value: 1.402 - type: precision_at_1000 value: 0.17700000000000002 - type: precision_at_20 value: 4.91 - type: precision_at_3 value: 17.871000000000002 - type: precision_at_5 value: 12.948 - type: recall_at_1 value: 20.144000000000002 - type: recall_at_10 value: 44.985 - type: recall_at_100 value: 74.866 - type: recall_at_1000 value: 94.477 - type: recall_at_20 value: 53.37 - type: recall_at_3 value: 31.141000000000002 - type: recall_at_5 value: 36.721 - task: type: PairClassification dataset: name: MTEB Cmnli (default) type: C-MTEB/CMNLI config: default split: validation revision: None metrics: - type: cos_sim_accuracy value: 71.25676488274203 - type: cos_sim_accuracy_threshold value: 78.11152935028076 - type: cos_sim_ap value: 79.10444825556077 - type: cos_sim_f1 value: 74.10750923266312 - type: cos_sim_f1_threshold value: 75.2312421798706 - type: cos_sim_precision value: 66.02083714129044 - type: cos_sim_recall value: 84.45171849427169 - type: dot_accuracy value: 68.11785929043896 - type: dot_accuracy_threshold value: 34783.23974609375 - type: dot_ap value: 75.80201827987712 - type: dot_f1 value: 72.31670990679349 - type: dot_f1_threshold value: 31978.036499023438 - type: dot_precision value: 61.386623164763456 - type: dot_recall value: 87.98223053542202 - type: euclidean_accuracy value: 71.41310883944678 - type: euclidean_accuracy_threshold value: 1374.9353408813477 - type: euclidean_ap value: 79.23359768836457 - type: euclidean_f1 value: 74.38512297540491 - type: euclidean_f1_threshold value: 1512.6035690307617 - type: euclidean_precision value: 64.97816593886463 - type: euclidean_recall value: 86.97685293429974 - type: manhattan_accuracy value: 71.32892363199038 - type: manhattan_accuracy_threshold value: 33340.49072265625 - type: manhattan_ap value: 79.11973684118587 - type: manhattan_f1 value: 74.29401993355481 - type: manhattan_f1_threshold value: 36012.52746582031 - type: manhattan_precision value: 66.81605975723622 - type: manhattan_recall value: 83.65676876315175 - type: max_accuracy value: 71.41310883944678 - type: max_ap value: 79.23359768836457 - type: max_f1 value: 74.38512297540491 - task: type: Retrieval dataset: name: MTEB CovidRetrieval (default) type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: main_score value: 78.917 - type: map_at_1 value: 67.281 - type: map_at_10 value: 75.262 - type: map_at_100 value: 75.60900000000001 - type: map_at_1000 value: 75.618 - type: map_at_20 value: 75.50200000000001 - type: map_at_3 value: 73.455 - type: map_at_5 value: 74.657 - type: mrr_at_1 value: 67.43940990516333 - type: mrr_at_10 value: 75.27367989696756 - type: mrr_at_100 value: 75.62029353306437 - type: mrr_at_1000 value: 75.62934741874726 - type: mrr_at_20 value: 75.51356607409173 - type: mrr_at_3 value: 73.5159817351598 - type: mrr_at_5 value: 74.73832103969093 - type: nauc_map_at_1000_diff1 value: 77.26666391867634 - type: nauc_map_at_1000_max value: 49.928541012203496 - type: nauc_map_at_1000_std value: -40.494469470474456 - type: nauc_map_at_100_diff1 value: 77.26087423162396 - type: nauc_map_at_100_max value: 49.944275615664424 - type: nauc_map_at_100_std value: -40.48299992715398 - type: nauc_map_at_10_diff1 value: 76.97400113500906 - type: nauc_map_at_10_max value: 49.84177029115674 - type: nauc_map_at_10_std value: -40.829250876511445 - type: nauc_map_at_1_diff1 value: 81.44050620630395 - type: nauc_map_at_1_max value: 48.97711944070578 - type: nauc_map_at_1_std value: -38.963689457570254 - type: nauc_map_at_20_diff1 value: 77.21791353089375 - type: nauc_map_at_20_max value: 49.958206759079424 - type: nauc_map_at_20_std value: -40.53067571658996 - type: nauc_map_at_3_diff1 value: 77.3555925208868 - type: nauc_map_at_3_max value: 49.32158146451256 - type: nauc_map_at_3_std value: -41.93552426981978 - type: nauc_map_at_5_diff1 value: 77.07099950431504 - type: nauc_map_at_5_max value: 49.54190504495002 - type: nauc_map_at_5_std value: -41.814968130918096 - type: nauc_mrr_at_1000_diff1 value: 77.31388774540477 - type: nauc_mrr_at_1000_max value: 49.96779699175759 - type: nauc_mrr_at_1000_std value: -40.43739645160277 - type: nauc_mrr_at_100_diff1 value: 77.30817786449413 - type: nauc_mrr_at_100_max value: 49.982514428937655 - type: nauc_mrr_at_100_std value: -40.42876582797744 - type: nauc_mrr_at_10_diff1 value: 77.02048060465756 - type: nauc_mrr_at_10_max value: 49.87937207270602 - type: nauc_mrr_at_10_std value: -40.77596560333177 - type: nauc_mrr_at_1_diff1 value: 81.27219599516599 - type: nauc_mrr_at_1_max value: 49.3083394026327 - type: nauc_mrr_at_1_std value: -38.31023037552026 - type: nauc_mrr_at_20_diff1 value: 77.26497089316055 - type: nauc_mrr_at_20_max value: 49.996257597621415 - type: nauc_mrr_at_20_std value: -40.476723608868014 - type: nauc_mrr_at_3_diff1 value: 77.38971294099257 - type: nauc_mrr_at_3_max value: 49.38110328987404 - type: nauc_mrr_at_3_std value: -41.7118646715979 - type: nauc_mrr_at_5_diff1 value: 77.08286142519952 - type: nauc_mrr_at_5_max value: 49.655249374588685 - type: nauc_mrr_at_5_std value: -41.48173039989406 - type: nauc_ndcg_at_1000_diff1 value: 76.47399204021758 - type: nauc_ndcg_at_1000_max value: 50.55770139961048 - type: nauc_ndcg_at_1000_std value: -39.55650430279072 - type: nauc_ndcg_at_100_diff1 value: 76.29355616618253 - type: nauc_ndcg_at_100_max value: 51.003608112592936 - type: nauc_ndcg_at_100_std value: -39.24769744605206 - type: nauc_ndcg_at_10_diff1 value: 74.88697528447634 - type: nauc_ndcg_at_10_max value: 50.398416372815234 - type: nauc_ndcg_at_10_std value: -40.76526585772833 - type: nauc_ndcg_at_1_diff1 value: 81.27219599516599 - type: nauc_ndcg_at_1_max value: 49.3083394026327 - type: nauc_ndcg_at_1_std value: -38.31023037552026 - type: nauc_ndcg_at_20_diff1 value: 75.85463512091866 - type: nauc_ndcg_at_20_max value: 50.97338683654334 - type: nauc_ndcg_at_20_std value: -39.353128774903404 - type: nauc_ndcg_at_3_diff1 value: 75.94015726123543 - type: nauc_ndcg_at_3_max value: 49.22194251063148 - type: nauc_ndcg_at_3_std value: -43.040457030630435 - type: nauc_ndcg_at_5_diff1 value: 75.19166189770303 - type: nauc_ndcg_at_5_max value: 49.65696229797189 - type: nauc_ndcg_at_5_std value: -42.81534909184424 - type: nauc_precision_at_1000_diff1 value: -14.830901395815788 - type: nauc_precision_at_1000_max value: 19.686297136854623 - type: nauc_precision_at_1000_std value: 61.19310360166978 - type: nauc_precision_at_100_diff1 value: 20.55469986751769 - type: nauc_precision_at_100_max value: 50.78431835075583 - type: nauc_precision_at_100_std value: 31.54986568374813 - type: nauc_precision_at_10_diff1 value: 45.991938532558656 - type: nauc_precision_at_10_max value: 46.386318595630385 - type: nauc_precision_at_10_std value: -23.463011435224608 - type: nauc_precision_at_1_diff1 value: 81.27219599516599 - type: nauc_precision_at_1_max value: 49.3083394026327 - type: nauc_precision_at_1_std value: -38.31023037552026 - type: nauc_precision_at_20_diff1 value: 41.53180472410822 - type: nauc_precision_at_20_max value: 49.89800247204318 - type: nauc_precision_at_20_std value: -2.4192847331537095 - type: nauc_precision_at_3_diff1 value: 67.37504651209993 - type: nauc_precision_at_3_max value: 47.893537208629496 - type: nauc_precision_at_3_std value: -43.2362212382819 - type: nauc_precision_at_5_diff1 value: 60.03438883791718 - type: nauc_precision_at_5_max value: 48.29770502354206 - type: nauc_precision_at_5_std value: -40.39588448271546 - type: nauc_recall_at_1000_diff1 value: 71.04741174480844 - type: nauc_recall_at_1000_max value: 93.19056506596002 - type: nauc_recall_at_1000_std value: 62.96994797650912 - type: nauc_recall_at_100_diff1 value: 65.00418176852641 - type: nauc_recall_at_100_max value: 85.27352708427193 - type: nauc_recall_at_100_std value: 2.8812005546518886 - type: nauc_recall_at_10_diff1 value: 61.263254794998865 - type: nauc_recall_at_10_max value: 54.17618329507141 - type: nauc_recall_at_10_std value: -39.80603966142593 - type: nauc_recall_at_1_diff1 value: 81.44050620630395 - type: nauc_recall_at_1_max value: 48.97711944070578 - type: nauc_recall_at_1_std value: -38.963689457570254 - type: nauc_recall_at_20_diff1 value: 64.42106091745396 - type: nauc_recall_at_20_max value: 63.10796640821887 - type: nauc_recall_at_20_std value: -22.60117424572222 - type: nauc_recall_at_3_diff1 value: 70.66311436592945 - type: nauc_recall_at_3_max value: 48.69498944323469 - type: nauc_recall_at_3_std value: -47.37847524874532 - type: nauc_recall_at_5_diff1 value: 66.12701111728848 - type: nauc_recall_at_5_max value: 49.91763957934711 - type: nauc_recall_at_5_std value: -48.173252920584126 - type: ndcg_at_1 value: 67.43900000000001 - type: ndcg_at_10 value: 78.917 - type: ndcg_at_100 value: 80.53399999999999 - type: ndcg_at_1000 value: 80.768 - type: ndcg_at_20 value: 79.813 - type: ndcg_at_3 value: 75.37 - type: ndcg_at_5 value: 77.551 - type: precision_at_1 value: 67.43900000000001 - type: precision_at_10 value: 9.115 - type: precision_at_100 value: 0.985 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.737 - type: precision_at_3 value: 27.081 - type: precision_at_5 value: 17.345 - type: recall_at_1 value: 67.281 - type: recall_at_10 value: 90.2 - type: recall_at_100 value: 97.576 - type: recall_at_1000 value: 99.368 - type: recall_at_20 value: 93.783 - type: recall_at_3 value: 80.822 - type: recall_at_5 value: 86.091 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.041 - type: map_at_10 value: 18.662 - type: map_at_100 value: 26.054 - type: map_at_1000 value: 27.769 - type: map_at_20 value: 21.499 - type: map_at_3 value: 13.628000000000002 - type: map_at_5 value: 15.617 - type: mrr_at_1 value: 67.25 - type: mrr_at_10 value: 74.673 - type: mrr_at_100 value: 75.022 - type: mrr_at_1000 value: 75.031 - type: mrr_at_20 value: 74.895 - type: mrr_at_3 value: 73.042 - type: mrr_at_5 value: 74.179 - type: ndcg_at_1 value: 55.75 - type: ndcg_at_10 value: 41.004000000000005 - type: ndcg_at_100 value: 44.912 - type: ndcg_at_1000 value: 51.946000000000005 - type: ndcg_at_20 value: 40.195 - type: ndcg_at_3 value: 45.803 - type: ndcg_at_5 value: 42.976 - type: precision_at_1 value: 67.25 - type: precision_at_10 value: 31.874999999999996 - type: precision_at_100 value: 10.37 - type: precision_at_1000 value: 2.1430000000000002 - type: precision_at_20 value: 24.275 - type: precision_at_3 value: 48.417 - type: precision_at_5 value: 40.2 - type: recall_at_1 value: 9.041 - type: recall_at_10 value: 23.592 - type: recall_at_100 value: 49.476 - type: recall_at_1000 value: 71.677 - type: recall_at_20 value: 30.153000000000002 - type: recall_at_3 value: 14.777000000000001 - type: recall_at_5 value: 17.829 - type: main_score value: 41.004000000000005 - task: type: Retrieval dataset: name: MTEB DuRetrieval (default) type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: main_score value: 83.134 - type: map_at_1 value: 23.907999999999998 - type: map_at_10 value: 74.566 - type: map_at_100 value: 77.706 - type: map_at_1000 value: 77.762 - type: map_at_20 value: 76.943 - type: map_at_3 value: 50.971999999999994 - type: map_at_5 value: 64.429 - type: mrr_at_1 value: 84.8 - type: mrr_at_10 value: 89.73218253968246 - type: mrr_at_100 value: 89.82853630655774 - type: mrr_at_1000 value: 89.83170411703153 - type: mrr_at_20 value: 89.79582030091501 - type: mrr_at_3 value: 89.32499999999992 - type: mrr_at_5 value: 89.58749999999992 - type: nauc_map_at_1000_diff1 value: -2.2736020650163717 - type: nauc_map_at_1000_max value: 45.3937519555142 - type: nauc_map_at_1000_std value: 10.824778228268581 - type: nauc_map_at_100_diff1 value: -2.2662939752750066 - type: nauc_map_at_100_max value: 45.423960626031366 - type: nauc_map_at_100_std value: 10.804239351738717 - type: nauc_map_at_10_diff1 value: 0.9395752585654343 - type: nauc_map_at_10_max value: 42.53814836940551 - type: nauc_map_at_10_std value: 0.7199313235265218 - type: nauc_map_at_1_diff1 value: 45.19415865267676 - type: nauc_map_at_1_max value: -1.7261947382471912 - type: nauc_map_at_1_std value: -32.16144291613605 - type: nauc_map_at_20_diff1 value: -1.884514152147472 - type: nauc_map_at_20_max value: 44.830401115927174 - type: nauc_map_at_20_std value: 8.118530414377219 - type: nauc_map_at_3_diff1 value: 25.678881127059967 - type: nauc_map_at_3_max value: 12.191400431839758 - type: nauc_map_at_3_std value: -27.201740587642327 - type: nauc_map_at_5_diff1 value: 13.227128780829572 - type: nauc_map_at_5_max value: 26.978282739708977 - type: nauc_map_at_5_std value: -17.555610348070584 - type: nauc_mrr_at_1000_diff1 value: 21.073512437502178 - type: nauc_mrr_at_1000_max value: 64.9680257861005 - type: nauc_mrr_at_1000_std value: 19.626288754404293 - type: nauc_mrr_at_100_diff1 value: 21.074637426957732 - type: nauc_mrr_at_100_max value: 64.97612675661915 - type: nauc_mrr_at_100_std value: 19.649504127800878 - type: nauc_mrr_at_10_diff1 value: 21.12003267626651 - type: nauc_mrr_at_10_max value: 65.24362289059766 - type: nauc_mrr_at_10_std value: 19.92351276180984 - type: nauc_mrr_at_1_diff1 value: 22.711430629147635 - type: nauc_mrr_at_1_max value: 58.4059429497403 - type: nauc_mrr_at_1_std value: 11.967886722567973 - type: nauc_mrr_at_20_diff1 value: 20.98220830510272 - type: nauc_mrr_at_20_max value: 65.05737535197835 - type: nauc_mrr_at_20_std value: 19.66672900782771 - type: nauc_mrr_at_3_diff1 value: 20.924796220048528 - type: nauc_mrr_at_3_max value: 65.71388669932584 - type: nauc_mrr_at_3_std value: 20.05912197134477 - type: nauc_mrr_at_5_diff1 value: 20.61978649468208 - type: nauc_mrr_at_5_max value: 65.50709154526211 - type: nauc_mrr_at_5_std value: 20.241434276181838 - type: nauc_ndcg_at_1000_diff1 value: 0.25363171946133656 - type: nauc_ndcg_at_1000_max value: 54.12840465309885 - type: nauc_ndcg_at_1000_std value: 20.749184325412546 - type: nauc_ndcg_at_100_diff1 value: 0.15649430250272792 - type: nauc_ndcg_at_100_max value: 54.47995322413234 - type: nauc_ndcg_at_100_std value: 21.266786634233267 - type: nauc_ndcg_at_10_diff1 value: 0.14579250840386346 - type: nauc_ndcg_at_10_max value: 49.8643037948353 - type: nauc_ndcg_at_10_std value: 12.960701643914216 - type: nauc_ndcg_at_1_diff1 value: 22.711430629147635 - type: nauc_ndcg_at_1_max value: 58.4059429497403 - type: nauc_ndcg_at_1_std value: 11.967886722567973 - type: nauc_ndcg_at_20_diff1 value: -0.6701559981776763 - type: nauc_ndcg_at_20_max value: 52.95443437012488 - type: nauc_ndcg_at_20_std value: 16.708883972005758 - type: nauc_ndcg_at_3_diff1 value: -0.19084922341962388 - type: nauc_ndcg_at_3_max value: 46.2110230886874 - type: nauc_ndcg_at_3_std value: 13.363250229683038 - type: nauc_ndcg_at_5_diff1 value: 0.9840019268192548 - type: nauc_ndcg_at_5_max value: 43.56594891798146 - type: nauc_ndcg_at_5_std value: 8.577017104088146 - type: nauc_precision_at_1000_diff1 value: -30.779179091501145 - type: nauc_precision_at_1000_max value: 16.056094258615673 - type: nauc_precision_at_1000_std value: 49.96303902363283 - type: nauc_precision_at_100_diff1 value: -31.583236638899585 - type: nauc_precision_at_100_max value: 19.16571713603373 - type: nauc_precision_at_100_std value: 51.870647903980036 - type: nauc_precision_at_10_diff1 value: -35.62134572732597 - type: nauc_precision_at_10_max value: 31.6935186494612 - type: nauc_precision_at_10_std value: 46.68659723766723 - type: nauc_precision_at_1_diff1 value: 22.711430629147635 - type: nauc_precision_at_1_max value: 58.4059429497403 - type: nauc_precision_at_1_std value: 11.967886722567973 - type: nauc_precision_at_20_diff1 value: -33.875460046920495 - type: nauc_precision_at_20_max value: 24.188420133566442 - type: nauc_precision_at_20_std value: 50.02387762958483 - type: nauc_precision_at_3_diff1 value: -28.875998450906827 - type: nauc_precision_at_3_max value: 44.77058831167941 - type: nauc_precision_at_3_std value: 31.77993710437207 - type: nauc_precision_at_5_diff1 value: -34.92525440306491 - type: nauc_precision_at_5_max value: 39.855219917077086 - type: nauc_precision_at_5_std value: 37.95432046169299 - type: nauc_recall_at_1000_diff1 value: -14.293309371874733 - type: nauc_recall_at_1000_max value: 59.06948692482579 - type: nauc_recall_at_1000_std value: 62.586254868312686 - type: nauc_recall_at_100_diff1 value: -4.344100947212704 - type: nauc_recall_at_100_max value: 58.42120421043602 - type: nauc_recall_at_100_std value: 46.48562009316997 - type: nauc_recall_at_10_diff1 value: 0.04948662912161709 - type: nauc_recall_at_10_max value: 42.42809687119093 - type: nauc_recall_at_10_std value: 0.6892504250411409 - type: nauc_recall_at_1_diff1 value: 45.19415865267676 - type: nauc_recall_at_1_max value: -1.7261947382471912 - type: nauc_recall_at_1_std value: -32.16144291613605 - type: nauc_recall_at_20_diff1 value: -7.634587864605111 - type: nauc_recall_at_20_max value: 49.21327187174134 - type: nauc_recall_at_20_std value: 16.408481068336346 - type: nauc_recall_at_3_diff1 value: 24.72546591038644 - type: nauc_recall_at_3_max value: 6.620763400972902 - type: nauc_recall_at_3_std value: -29.994703323331684 - type: nauc_recall_at_5_diff1 value: 12.65527364845842 - type: nauc_recall_at_5_max value: 20.400121385794694 - type: nauc_recall_at_5_std value: -22.34284568447213 - type: ndcg_at_1 value: 84.8 - type: ndcg_at_10 value: 83.134 - type: ndcg_at_100 value: 86.628 - type: ndcg_at_1000 value: 87.151 - type: ndcg_at_20 value: 85.092 - type: ndcg_at_3 value: 81.228 - type: ndcg_at_5 value: 80.2 - type: precision_at_1 value: 84.8 - type: precision_at_10 value: 40.394999999999996 - type: precision_at_100 value: 4.745 - type: precision_at_1000 value: 0.488 - type: precision_at_20 value: 22.245 - type: precision_at_3 value: 73.25 - type: precision_at_5 value: 61.86000000000001 - type: recall_at_1 value: 23.907999999999998 - type: recall_at_10 value: 85.346 - type: recall_at_100 value: 96.515 - type: recall_at_1000 value: 99.156 - type: recall_at_20 value: 91.377 - type: recall_at_3 value: 54.135 - type: recall_at_5 value: 70.488 - task: type: Retrieval dataset: name: MTEB EcomRetrieval (default) type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: main_score value: 60.887 - type: map_at_1 value: 46.6 - type: map_at_10 value: 56.035000000000004 - type: map_at_100 value: 56.741 - type: map_at_1000 value: 56.764 - type: map_at_20 value: 56.513999999999996 - type: map_at_3 value: 53.733 - type: map_at_5 value: 54.913000000000004 - type: mrr_at_1 value: 46.6 - type: mrr_at_10 value: 56.034523809523776 - type: mrr_at_100 value: 56.74056360434383 - type: mrr_at_1000 value: 56.76373487222486 - type: mrr_at_20 value: 56.51374873879128 - type: mrr_at_3 value: 53.73333333333328 - type: mrr_at_5 value: 54.91333333333327 - type: nauc_map_at_1000_diff1 value: 65.13546939953387 - type: nauc_map_at_1000_max value: 43.358890946774494 - type: nauc_map_at_1000_std value: -9.973282105235036 - type: nauc_map_at_100_diff1 value: 65.12449309472493 - type: nauc_map_at_100_max value: 43.377100882923145 - type: nauc_map_at_100_std value: -9.971781228240555 - type: nauc_map_at_10_diff1 value: 64.83020018537475 - type: nauc_map_at_10_max value: 43.25969482323034 - type: nauc_map_at_10_std value: -10.120272176001547 - type: nauc_map_at_1_diff1 value: 69.58727592100516 - type: nauc_map_at_1_max value: 38.236494689522026 - type: nauc_map_at_1_std value: -14.833390831689597 - type: nauc_map_at_20_diff1 value: 65.01159809914586 - type: nauc_map_at_20_max value: 43.33440319829618 - type: nauc_map_at_20_std value: -10.039958228659726 - type: nauc_map_at_3_diff1 value: 65.2396323885909 - type: nauc_map_at_3_max value: 42.26904017378952 - type: nauc_map_at_3_std value: -11.793017036934044 - type: nauc_map_at_5_diff1 value: 64.96397227898036 - type: nauc_map_at_5_max value: 43.231333789145424 - type: nauc_map_at_5_std value: -10.349933732151372 - type: nauc_mrr_at_1000_diff1 value: 65.13546939953387 - type: nauc_mrr_at_1000_max value: 43.358890946774494 - type: nauc_mrr_at_1000_std value: -9.973282105235036 - type: nauc_mrr_at_100_diff1 value: 65.12449309472493 - type: nauc_mrr_at_100_max value: 43.377100882923145 - type: nauc_mrr_at_100_std value: -9.971781228240555 - type: nauc_mrr_at_10_diff1 value: 64.83020018537475 - type: nauc_mrr_at_10_max value: 43.25969482323034 - type: nauc_mrr_at_10_std value: -10.120272176001547 - type: nauc_mrr_at_1_diff1 value: 69.58727592100516 - type: nauc_mrr_at_1_max value: 38.236494689522026 - type: nauc_mrr_at_1_std value: -14.833390831689597 - type: nauc_mrr_at_20_diff1 value: 65.01159809914586 - type: nauc_mrr_at_20_max value: 43.33440319829618 - type: nauc_mrr_at_20_std value: -10.039958228659726 - type: nauc_mrr_at_3_diff1 value: 65.2396323885909 - type: nauc_mrr_at_3_max value: 42.26904017378952 - type: nauc_mrr_at_3_std value: -11.793017036934044 - type: nauc_mrr_at_5_diff1 value: 64.96397227898036 - type: nauc_mrr_at_5_max value: 43.231333789145424 - type: nauc_mrr_at_5_std value: -10.349933732151372 - type: nauc_ndcg_at_1000_diff1 value: 64.26802655199876 - type: nauc_ndcg_at_1000_max value: 45.854310744745185 - type: nauc_ndcg_at_1000_std value: -6.184417305204082 - type: nauc_ndcg_at_100_diff1 value: 63.99268329609827 - type: nauc_ndcg_at_100_max value: 46.31270128748375 - type: nauc_ndcg_at_100_std value: -6.1393433180558965 - type: nauc_ndcg_at_10_diff1 value: 62.6735104141137 - type: nauc_ndcg_at_10_max value: 45.54954799462398 - type: nauc_ndcg_at_10_std value: -7.348851199024871 - type: nauc_ndcg_at_1_diff1 value: 69.58727592100516 - type: nauc_ndcg_at_1_max value: 38.236494689522026 - type: nauc_ndcg_at_1_std value: -14.833390831689597 - type: nauc_ndcg_at_20_diff1 value: 63.25899651677274 - type: nauc_ndcg_at_20_max value: 45.952196968886014 - type: nauc_ndcg_at_20_std value: -6.807607465125713 - type: nauc_ndcg_at_3_diff1 value: 63.65618337476822 - type: nauc_ndcg_at_3_max value: 43.507890965228945 - type: nauc_ndcg_at_3_std value: -10.73845622217601 - type: nauc_ndcg_at_5_diff1 value: 63.079162432921855 - type: nauc_ndcg_at_5_max value: 45.38303443868148 - type: nauc_ndcg_at_5_std value: -8.063657824835534 - type: nauc_precision_at_1000_diff1 value: 63.01459977930557 - type: nauc_precision_at_1000_max value: 92.4253034547151 - type: nauc_precision_at_1000_std value: 84.4845513963158 - type: nauc_precision_at_100_diff1 value: 57.17217119405878 - type: nauc_precision_at_100_max value: 80.70049725316484 - type: nauc_precision_at_100_std value: 41.78392287147403 - type: nauc_precision_at_10_diff1 value: 53.115665404390725 - type: nauc_precision_at_10_max value: 55.73825657341263 - type: nauc_precision_at_10_std value: 5.406226305013257 - type: nauc_precision_at_1_diff1 value: 69.58727592100516 - type: nauc_precision_at_1_max value: 38.236494689522026 - type: nauc_precision_at_1_std value: -14.833390831689597 - type: nauc_precision_at_20_diff1 value: 53.77730697622828 - type: nauc_precision_at_20_max value: 61.88170819253054 - type: nauc_precision_at_20_std value: 13.678730470003856 - type: nauc_precision_at_3_diff1 value: 58.580196992291455 - type: nauc_precision_at_3_max value: 47.404834585376626 - type: nauc_precision_at_3_std value: -7.374978769024051 - type: nauc_precision_at_5_diff1 value: 56.44564652606437 - type: nauc_precision_at_5_max value: 53.08973975162324 - type: nauc_precision_at_5_std value: 0.22762700141423803 - type: nauc_recall_at_1000_diff1 value: 63.01459977930565 - type: nauc_recall_at_1000_max value: 92.42530345471532 - type: nauc_recall_at_1000_std value: 84.48455139631602 - type: nauc_recall_at_100_diff1 value: 57.17217119405904 - type: nauc_recall_at_100_max value: 80.70049725316468 - type: nauc_recall_at_100_std value: 41.783922871474275 - type: nauc_recall_at_10_diff1 value: 53.11566540439087 - type: nauc_recall_at_10_max value: 55.738256573412656 - type: nauc_recall_at_10_std value: 5.406226305013377 - type: nauc_recall_at_1_diff1 value: 69.58727592100516 - type: nauc_recall_at_1_max value: 38.236494689522026 - type: nauc_recall_at_1_std value: -14.833390831689597 - type: nauc_recall_at_20_diff1 value: 53.77730697622846 - type: nauc_recall_at_20_max value: 61.881708192530525 - type: nauc_recall_at_20_std value: 13.678730470003947 - type: nauc_recall_at_3_diff1 value: 58.5801969922914 - type: nauc_recall_at_3_max value: 47.40483458537654 - type: nauc_recall_at_3_std value: -7.37497876902413 - type: nauc_recall_at_5_diff1 value: 56.445646526064394 - type: nauc_recall_at_5_max value: 53.08973975162332 - type: nauc_recall_at_5_std value: 0.22762700141428024 - type: ndcg_at_1 value: 46.6 - type: ndcg_at_10 value: 60.887 - type: ndcg_at_100 value: 64.18199999999999 - type: ndcg_at_1000 value: 64.726 - type: ndcg_at_20 value: 62.614999999999995 - type: ndcg_at_3 value: 56.038 - type: ndcg_at_5 value: 58.150999999999996 - type: precision_at_1 value: 46.6 - type: precision_at_10 value: 7.630000000000001 - type: precision_at_100 value: 0.914 - type: precision_at_1000 value: 0.096 - type: precision_at_20 value: 4.154999999999999 - type: precision_at_3 value: 20.9 - type: precision_at_5 value: 13.56 - type: recall_at_1 value: 46.6 - type: recall_at_10 value: 76.3 - type: recall_at_100 value: 91.4 - type: recall_at_1000 value: 95.6 - type: recall_at_20 value: 83.1 - type: recall_at_3 value: 62.7 - type: recall_at_5 value: 67.80000000000001 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 73.29999999999998 - type: f1 value: 67.71473706580302 - type: f1_weighted value: 74.83537255312045 - type: main_score value: 73.29999999999998 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 78.371 - type: map_at_10 value: 85.762 - type: map_at_100 value: 85.954 - type: map_at_1000 value: 85.966 - type: map_at_20 value: 85.887 - type: map_at_3 value: 84.854 - type: map_at_5 value: 85.408 - type: mrr_at_1 value: 84.443 - type: mrr_at_10 value: 90.432 - type: mrr_at_100 value: 90.483 - type: mrr_at_1000 value: 90.484 - type: mrr_at_20 value: 90.473 - type: mrr_at_3 value: 89.89399999999999 - type: mrr_at_5 value: 90.244 - type: ndcg_at_1 value: 84.443 - type: ndcg_at_10 value: 89.05499999999999 - type: ndcg_at_100 value: 89.68 - type: ndcg_at_1000 value: 89.87899999999999 - type: ndcg_at_20 value: 89.381 - type: ndcg_at_3 value: 87.73100000000001 - type: ndcg_at_5 value: 88.425 - type: precision_at_1 value: 84.443 - type: precision_at_10 value: 10.520999999999999 - type: precision_at_100 value: 1.103 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_20 value: 5.362 - type: precision_at_3 value: 33.198 - type: precision_at_5 value: 20.441000000000003 - type: recall_at_1 value: 78.371 - type: recall_at_10 value: 94.594 - type: recall_at_100 value: 96.97099999999999 - type: recall_at_1000 value: 98.18 - type: recall_at_20 value: 95.707 - type: recall_at_3 value: 90.853 - type: recall_at_5 value: 92.74799999999999 - type: main_score value: 89.05499999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 23.810000000000002 - type: map_at_10 value: 39.051 - type: map_at_100 value: 41.231 - type: map_at_1000 value: 41.376000000000005 - type: map_at_20 value: 40.227000000000004 - type: map_at_3 value: 33.915 - type: map_at_5 value: 36.459 - type: mrr_at_1 value: 48.148 - type: mrr_at_10 value: 55.765 - type: mrr_at_100 value: 56.495 - type: mrr_at_1000 value: 56.525999999999996 - type: mrr_at_20 value: 56.213 - type: mrr_at_3 value: 53.086 - type: mrr_at_5 value: 54.513999999999996 - type: ndcg_at_1 value: 48.148 - type: ndcg_at_10 value: 47.349999999999994 - type: ndcg_at_100 value: 54.61899999999999 - type: ndcg_at_1000 value: 56.830000000000005 - type: ndcg_at_20 value: 50.143 - type: ndcg_at_3 value: 43.108000000000004 - type: ndcg_at_5 value: 44.023 - type: precision_at_1 value: 48.148 - type: precision_at_10 value: 13.441 - type: precision_at_100 value: 2.085 - type: precision_at_1000 value: 0.248 - type: precision_at_20 value: 7.870000000000001 - type: precision_at_3 value: 28.909000000000002 - type: precision_at_5 value: 20.957 - type: recall_at_1 value: 23.810000000000002 - type: recall_at_10 value: 54.303000000000004 - type: recall_at_100 value: 81.363 - type: recall_at_1000 value: 94.391 - type: recall_at_20 value: 63.056999999999995 - type: recall_at_3 value: 38.098 - type: recall_at_5 value: 44.414 - type: main_score value: 47.349999999999994 - task: type: Classification dataset: name: MTEB GeoreviewClassification (default) type: ai-forever/georeview-classification config: default split: test revision: 3765c0d1de6b7d264bc459433c45e5a75513839c metrics: - type: accuracy value: 48.0126953125 - type: f1 value: 47.65764016160488 - type: f1_weighted value: 47.65701659482088 - type: main_score value: 48.0126953125 - task: type: Clustering dataset: name: MTEB GeoreviewClusteringP2P (default) type: ai-forever/georeview-clustering-p2p config: default split: test revision: 97a313c8fc85b47f13f33e7e9a95c1ad888c7fec metrics: - type: main_score value: 73.62357853672266 - type: v_measure value: 73.62357853672266 - type: v_measure_std value: 0.5942247545535766 - task: type: Retrieval dataset: name: MTEB GerDaLIR (default) type: jinaai/ger_da_lir config: default split: test revision: 0bb47f1d73827e96964edb84dfe552f62f4fd5eb metrics: - type: main_score value: 16.227 - type: map_at_1 value: 8.082 - type: map_at_10 value: 12.959999999999999 - type: map_at_100 value: 13.923 - type: map_at_1000 value: 14.030999999999999 - type: map_at_20 value: 13.453000000000001 - type: map_at_3 value: 11.018 - type: map_at_5 value: 12.056000000000001 - type: mrr_at_1 value: 8.993332249146203 - type: mrr_at_10 value: 13.994013092850247 - type: mrr_at_100 value: 14.913737673149308 - type: mrr_at_1000 value: 15.00843809934407 - type: mrr_at_20 value: 14.470268462334007 - type: mrr_at_3 value: 12.000596302921846 - type: mrr_at_5 value: 13.070689000921561 - type: nauc_map_at_1000_diff1 value: 28.559639584013286 - type: nauc_map_at_1000_max value: 25.533800126086714 - type: nauc_map_at_1000_std value: 9.826551026628666 - type: nauc_map_at_100_diff1 value: 28.544724499331696 - type: nauc_map_at_100_max value: 25.46734324526386 - type: nauc_map_at_100_std value: 9.739314481785591 - type: nauc_map_at_10_diff1 value: 28.77447517718118 - type: nauc_map_at_10_max value: 24.7431615237795 - type: nauc_map_at_10_std value: 8.349878188033646 - type: nauc_map_at_1_diff1 value: 37.405452629895514 - type: nauc_map_at_1_max value: 24.444208978394023 - type: nauc_map_at_1_std value: 4.043820373810528 - type: nauc_map_at_20_diff1 value: 28.69764217789062 - type: nauc_map_at_20_max value: 25.111848355996496 - type: nauc_map_at_20_std value: 9.034829905305918 - type: nauc_map_at_3_diff1 value: 30.89053285076882 - type: nauc_map_at_3_max value: 24.862886115911152 - type: nauc_map_at_3_std value: 6.654260832396586 - type: nauc_map_at_5_diff1 value: 29.230629676604263 - type: nauc_map_at_5_max value: 24.374302288018583 - type: nauc_map_at_5_std value: 7.341846952319046 - type: nauc_mrr_at_1000_diff1 value: 28.086147932781426 - type: nauc_mrr_at_1000_max value: 25.98698528264653 - type: nauc_mrr_at_1000_std value: 9.917554348624545 - type: nauc_mrr_at_100_diff1 value: 28.069163279791336 - type: nauc_mrr_at_100_max value: 25.949440010886804 - type: nauc_mrr_at_100_std value: 9.874340979732578 - type: nauc_mrr_at_10_diff1 value: 28.239920869530046 - type: nauc_mrr_at_10_max value: 25.351271409498576 - type: nauc_mrr_at_10_std value: 8.669862759875162 - type: nauc_mrr_at_1_diff1 value: 35.96543040207856 - type: nauc_mrr_at_1_max value: 25.488936487231967 - type: nauc_mrr_at_1_std value: 4.76439131038345 - type: nauc_mrr_at_20_diff1 value: 28.18865871284607 - type: nauc_mrr_at_20_max value: 25.67121763344746 - type: nauc_mrr_at_20_std value: 9.297910707519472 - type: nauc_mrr_at_3_diff1 value: 30.166714199740717 - type: nauc_mrr_at_3_max value: 25.541792491964877 - type: nauc_mrr_at_3_std value: 7.083090296398472 - type: nauc_mrr_at_5_diff1 value: 28.68475284656478 - type: nauc_mrr_at_5_max value: 24.994071363482835 - type: nauc_mrr_at_5_std value: 7.687507254902365 - type: nauc_ndcg_at_1000_diff1 value: 25.292792613586467 - type: nauc_ndcg_at_1000_max value: 29.211905289377178 - type: nauc_ndcg_at_1000_std value: 18.088867467320355 - type: nauc_ndcg_at_100_diff1 value: 25.026905011089152 - type: nauc_ndcg_at_100_max value: 27.98822281254431 - type: nauc_ndcg_at_100_std value: 16.69456904301902 - type: nauc_ndcg_at_10_diff1 value: 25.972279051109503 - type: nauc_ndcg_at_10_max value: 24.86486482734957 - type: nauc_ndcg_at_10_std value: 10.398605822106353 - type: nauc_ndcg_at_1_diff1 value: 36.134710485184826 - type: nauc_ndcg_at_1_max value: 25.384572790326025 - type: nauc_ndcg_at_1_std value: 4.591863033771824 - type: nauc_ndcg_at_20_diff1 value: 25.850033660205536 - type: nauc_ndcg_at_20_max value: 25.944243193140515 - type: nauc_ndcg_at_20_std value: 12.392409721204892 - type: nauc_ndcg_at_3_diff1 value: 29.1966056380018 - type: nauc_ndcg_at_3_max value: 24.978843156259913 - type: nauc_ndcg_at_3_std value: 7.353914459205087 - type: nauc_ndcg_at_5_diff1 value: 26.795315295756282 - type: nauc_ndcg_at_5_max value: 24.1196789150412 - type: nauc_ndcg_at_5_std value: 8.311970988265172 - type: nauc_precision_at_1000_diff1 value: 9.128270550217984 - type: nauc_precision_at_1000_max value: 35.79286915973607 - type: nauc_precision_at_1000_std value: 39.15669472887154 - type: nauc_precision_at_100_diff1 value: 14.770289799034384 - type: nauc_precision_at_100_max value: 34.58262232264337 - type: nauc_precision_at_100_std value: 34.101148102981384 - type: nauc_precision_at_10_diff1 value: 19.899104673118178 - type: nauc_precision_at_10_max value: 26.636940338985625 - type: nauc_precision_at_10_std value: 15.73871357255849 - type: nauc_precision_at_1_diff1 value: 36.134710485184826 - type: nauc_precision_at_1_max value: 25.384572790326025 - type: nauc_precision_at_1_std value: 4.591863033771824 - type: nauc_precision_at_20_diff1 value: 19.423457975148942 - type: nauc_precision_at_20_max value: 29.58123490878582 - type: nauc_precision_at_20_std value: 20.847850110821618 - type: nauc_precision_at_3_diff1 value: 24.986416623492918 - type: nauc_precision_at_3_max value: 25.973548400472975 - type: nauc_precision_at_3_std value: 9.486410455972823 - type: nauc_precision_at_5_diff1 value: 21.237741424923332 - type: nauc_precision_at_5_max value: 24.647141028200164 - type: nauc_precision_at_5_std value: 11.102785032334147 - type: nauc_recall_at_1000_diff1 value: 15.999714888817829 - type: nauc_recall_at_1000_max value: 44.34701908906545 - type: nauc_recall_at_1000_std value: 51.13471291594717 - type: nauc_recall_at_100_diff1 value: 17.401714890483706 - type: nauc_recall_at_100_max value: 33.39042631654808 - type: nauc_recall_at_100_std value: 33.944446168451584 - type: nauc_recall_at_10_diff1 value: 20.30036232399894 - type: nauc_recall_at_10_max value: 24.006718284396786 - type: nauc_recall_at_10_std value: 14.049375108518669 - type: nauc_recall_at_1_diff1 value: 37.405452629895514 - type: nauc_recall_at_1_max value: 24.444208978394023 - type: nauc_recall_at_1_std value: 4.043820373810528 - type: nauc_recall_at_20_diff1 value: 20.23582802609045 - type: nauc_recall_at_20_max value: 26.408063410785243 - type: nauc_recall_at_20_std value: 18.617479515468112 - type: nauc_recall_at_3_diff1 value: 25.53221830103098 - type: nauc_recall_at_3_max value: 24.283712329152678 - type: nauc_recall_at_3_std value: 8.428947805841867 - type: nauc_recall_at_5_diff1 value: 21.741499601020823 - type: nauc_recall_at_5_max value: 22.754924586295296 - type: nauc_recall_at_5_std value: 9.966736688169814 - type: ndcg_at_1 value: 8.977 - type: ndcg_at_10 value: 16.227 - type: ndcg_at_100 value: 21.417 - type: ndcg_at_1000 value: 24.451 - type: ndcg_at_20 value: 17.982 - type: ndcg_at_3 value: 12.206999999999999 - type: ndcg_at_5 value: 14.059 - type: precision_at_1 value: 8.977 - type: precision_at_10 value: 2.933 - type: precision_at_100 value: 0.59 - type: precision_at_1000 value: 0.087 - type: precision_at_20 value: 1.8599999999999999 - type: precision_at_3 value: 5.550999999999999 - type: precision_at_5 value: 4.340999999999999 - type: recall_at_1 value: 8.082 - type: recall_at_10 value: 25.52 - type: recall_at_100 value: 50.32 - type: recall_at_1000 value: 74.021 - type: recall_at_20 value: 32.229 - type: recall_at_3 value: 14.66 - type: recall_at_5 value: 19.062 - task: type: Retrieval dataset: name: MTEB GermanDPR (default) type: deepset/germandpr config: default split: test revision: 5129d02422a66be600ac89cd3e8531b4f97d347d metrics: - type: main_score value: 82.422 - type: map_at_1 value: 64.39 - type: map_at_10 value: 77.273 - type: map_at_100 value: 77.375 - type: map_at_1000 value: 77.376 - type: map_at_20 value: 77.351 - type: map_at_3 value: 75.46300000000001 - type: map_at_5 value: 76.878 - type: mrr_at_1 value: 64.19512195121952 - type: mrr_at_10 value: 77.15842044134736 - type: mrr_at_100 value: 77.2604854308704 - type: mrr_at_1000 value: 77.26087882190109 - type: mrr_at_20 value: 77.23572154560611 - type: mrr_at_3 value: 75.34959349593504 - type: mrr_at_5 value: 76.76422764227652 - type: nauc_map_at_1000_diff1 value: 49.73135253389972 - type: nauc_map_at_1000_max value: 8.665570717396145 - type: nauc_map_at_1000_std value: -25.920927572114522 - type: nauc_map_at_100_diff1 value: 49.729170775336605 - type: nauc_map_at_100_max value: 8.66717979705074 - type: nauc_map_at_100_std value: -25.918338868918596 - type: nauc_map_at_10_diff1 value: 49.708681691445925 - type: nauc_map_at_10_max value: 8.830640635692113 - type: nauc_map_at_10_std value: -25.843238986304858 - type: nauc_map_at_1_diff1 value: 51.750022350988914 - type: nauc_map_at_1_max value: 3.599863010364626 - type: nauc_map_at_1_std value: -27.670122127567314 - type: nauc_map_at_20_diff1 value: 49.72609185887161 - type: nauc_map_at_20_max value: 8.766556053409218 - type: nauc_map_at_20_std value: -25.85975887517904 - type: nauc_map_at_3_diff1 value: 49.328512536255595 - type: nauc_map_at_3_max value: 9.475682028996795 - type: nauc_map_at_3_std value: -26.277349632171017 - type: nauc_map_at_5_diff1 value: 49.42801822186142 - type: nauc_map_at_5_max value: 8.788822474357252 - type: nauc_map_at_5_std value: -25.959260882028573 - type: nauc_mrr_at_1000_diff1 value: 50.13038598302397 - type: nauc_mrr_at_1000_max value: 8.734338637484832 - type: nauc_mrr_at_1000_std value: -26.653343549855908 - type: nauc_mrr_at_100_diff1 value: 50.12820392111392 - type: nauc_mrr_at_100_max value: 8.735940503917966 - type: nauc_mrr_at_100_std value: -26.65074918231251 - type: nauc_mrr_at_10_diff1 value: 50.10567888458267 - type: nauc_mrr_at_10_max value: 8.898451291748575 - type: nauc_mrr_at_10_std value: -26.572046921975655 - type: nauc_mrr_at_1_diff1 value: 52.22769994409465 - type: nauc_mrr_at_1_max value: 3.6490820146062015 - type: nauc_mrr_at_1_std value: -28.535100562320498 - type: nauc_mrr_at_20_diff1 value: 50.12462222100699 - type: nauc_mrr_at_20_max value: 8.83487018268756 - type: nauc_mrr_at_20_std value: -26.591437036958332 - type: nauc_mrr_at_3_diff1 value: 49.6987353700016 - type: nauc_mrr_at_3_max value: 9.531003760756258 - type: nauc_mrr_at_3_std value: -26.949799063124818 - type: nauc_mrr_at_5_diff1 value: 49.823881656376585 - type: nauc_mrr_at_5_max value: 8.850404667985085 - type: nauc_mrr_at_5_std value: -26.680008966088582 - type: nauc_ndcg_at_1000_diff1 value: 49.41721203361181 - type: nauc_ndcg_at_1000_max value: 9.41093067609825 - type: nauc_ndcg_at_1000_std value: -25.499543637737567 - type: nauc_ndcg_at_100_diff1 value: 49.32810419509252 - type: nauc_ndcg_at_100_max value: 9.476216458766897 - type: nauc_ndcg_at_100_std value: -25.393856250990414 - type: nauc_ndcg_at_10_diff1 value: 49.181984436623694 - type: nauc_ndcg_at_10_max value: 10.65234732763274 - type: nauc_ndcg_at_10_std value: -24.737669349012297 - type: nauc_ndcg_at_1_diff1 value: 51.750022350988914 - type: nauc_ndcg_at_1_max value: 3.599863010364626 - type: nauc_ndcg_at_1_std value: -27.670122127567314 - type: nauc_ndcg_at_20_diff1 value: 49.275394594995056 - type: nauc_ndcg_at_20_max value: 10.402059796651923 - type: nauc_ndcg_at_20_std value: -24.82329915806705 - type: nauc_ndcg_at_3_diff1 value: 48.22614352152889 - type: nauc_ndcg_at_3_max value: 11.67464280791404 - type: nauc_ndcg_at_3_std value: -25.867824868234095 - type: nauc_ndcg_at_5_diff1 value: 48.35583502987241 - type: nauc_ndcg_at_5_max value: 10.494278750448451 - type: nauc_ndcg_at_5_std value: -25.11599634172764 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: -56.39478136433852 - type: nauc_precision_at_100_max value: 86.93518577529493 - type: nauc_precision_at_100_std value: 100.0 - type: nauc_precision_at_10_diff1 value: 38.662829729133094 - type: nauc_precision_at_10_max value: 56.38018435740605 - type: nauc_precision_at_10_std value: 6.288091897081105 - type: nauc_precision_at_1_diff1 value: 51.750022350988914 - type: nauc_precision_at_1_max value: 3.599863010364626 - type: nauc_precision_at_1_std value: -27.670122127567314 - type: nauc_precision_at_20_diff1 value: 34.739153182429085 - type: nauc_precision_at_20_max value: 84.86908403000989 - type: nauc_precision_at_20_std value: 29.156199421219455 - type: nauc_precision_at_3_diff1 value: 42.09287362529135 - type: nauc_precision_at_3_max value: 23.629152759287074 - type: nauc_precision_at_3_std value: -23.721376911302492 - type: nauc_precision_at_5_diff1 value: 36.03866171924644 - type: nauc_precision_at_5_max value: 29.166173558775327 - type: nauc_precision_at_5_std value: -15.096374563068448 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: -56.39478136433541 - type: nauc_recall_at_100_max value: 86.93518577528111 - type: nauc_recall_at_100_std value: 100.0 - type: nauc_recall_at_10_diff1 value: 38.66282972913384 - type: nauc_recall_at_10_max value: 56.3801843574071 - type: nauc_recall_at_10_std value: 6.288091897082639 - type: nauc_recall_at_1_diff1 value: 51.750022350988914 - type: nauc_recall_at_1_max value: 3.599863010364626 - type: nauc_recall_at_1_std value: -27.670122127567314 - type: nauc_recall_at_20_diff1 value: 34.7391531824321 - type: nauc_recall_at_20_max value: 84.86908403001016 - type: nauc_recall_at_20_std value: 29.156199421220748 - type: nauc_recall_at_3_diff1 value: 42.09287362529107 - type: nauc_recall_at_3_max value: 23.629152759286946 - type: nauc_recall_at_3_std value: -23.72137691130291 - type: nauc_recall_at_5_diff1 value: 36.0386617192469 - type: nauc_recall_at_5_max value: 29.1661735587759 - type: nauc_recall_at_5_std value: -15.09637456306774 - type: ndcg_at_1 value: 64.39 - type: ndcg_at_10 value: 82.422 - type: ndcg_at_100 value: 82.86099999999999 - type: ndcg_at_1000 value: 82.87299999999999 - type: ndcg_at_20 value: 82.67999999999999 - type: ndcg_at_3 value: 78.967 - type: ndcg_at_5 value: 81.50699999999999 - type: precision_at_1 value: 64.39 - type: precision_at_10 value: 9.795 - type: precision_at_100 value: 0.9990000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.946 - type: precision_at_3 value: 29.691000000000003 - type: precision_at_5 value: 19.044 - type: recall_at_1 value: 64.39 - type: recall_at_10 value: 97.951 - type: recall_at_100 value: 99.902 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 98.92699999999999 - type: recall_at_3 value: 89.07300000000001 - type: recall_at_5 value: 95.22 - task: type: Retrieval dataset: name: MTEB GermanQuAD-Retrieval (default) type: mteb/germanquad-retrieval config: default split: test revision: f5c87ae5a2e7a5106606314eef45255f03151bb3 metrics: - type: main_score value: 94.15532365396247 - type: map_at_1 value: 90.789 - type: map_at_10 value: 94.24 - type: map_at_100 value: 94.283 - type: map_at_1000 value: 94.284 - type: map_at_20 value: 94.272 - type: map_at_3 value: 93.913 - type: map_at_5 value: 94.155 - type: mrr_at_1 value: 90.78947368421053 - type: mrr_at_10 value: 94.23987411056376 - type: mrr_at_100 value: 94.28320936825 - type: mrr_at_1000 value: 94.28350209115848 - type: mrr_at_20 value: 94.271919092559 - type: mrr_at_3 value: 93.91258318209313 - type: mrr_at_5 value: 94.15532365396247 - type: nauc_map_at_1000_diff1 value: 89.29089310650436 - type: nauc_map_at_1000_max value: 73.83868784032414 - type: nauc_map_at_1000_std value: -11.635778561889989 - type: nauc_map_at_100_diff1 value: 89.29077225707755 - type: nauc_map_at_100_max value: 73.84002740580378 - type: nauc_map_at_100_std value: -11.644096256165092 - type: nauc_map_at_10_diff1 value: 89.29117612292366 - type: nauc_map_at_10_max value: 73.97487984981221 - type: nauc_map_at_10_std value: -11.35191794373827 - type: nauc_map_at_1_diff1 value: 89.35436544117584 - type: nauc_map_at_1_max value: 70.35936815057701 - type: nauc_map_at_1_std value: -13.598996360976903 - type: nauc_map_at_20_diff1 value: 89.2530394052653 - type: nauc_map_at_20_max value: 73.83537529419839 - type: nauc_map_at_20_std value: -11.628272822028478 - type: nauc_map_at_3_diff1 value: 89.375111893546 - type: nauc_map_at_3_max value: 74.78900366026112 - type: nauc_map_at_3_std value: -12.720905253503274 - type: nauc_map_at_5_diff1 value: 89.35358300820893 - type: nauc_map_at_5_max value: 74.31996219723239 - type: nauc_map_at_5_std value: -10.768642638210867 - type: nauc_mrr_at_1000_diff1 value: 89.29089310650436 - type: nauc_mrr_at_1000_max value: 73.83868784032414 - type: nauc_mrr_at_1000_std value: -11.635778561889989 - type: nauc_mrr_at_100_diff1 value: 89.29077225707755 - type: nauc_mrr_at_100_max value: 73.84002740580378 - type: nauc_mrr_at_100_std value: -11.644096256165092 - type: nauc_mrr_at_10_diff1 value: 89.29117612292366 - type: nauc_mrr_at_10_max value: 73.97487984981221 - type: nauc_mrr_at_10_std value: -11.35191794373827 - type: nauc_mrr_at_1_diff1 value: 89.35436544117584 - type: nauc_mrr_at_1_max value: 70.35936815057701 - type: nauc_mrr_at_1_std value: -13.598996360976903 - type: nauc_mrr_at_20_diff1 value: 89.2530394052653 - type: nauc_mrr_at_20_max value: 73.83537529419839 - type: nauc_mrr_at_20_std value: -11.628272822028478 - type: nauc_mrr_at_3_diff1 value: 89.375111893546 - type: nauc_mrr_at_3_max value: 74.78900366026112 - type: nauc_mrr_at_3_std value: -12.720905253503274 - type: nauc_mrr_at_5_diff1 value: 89.35358300820893 - type: nauc_mrr_at_5_max value: 74.31996219723239 - type: nauc_mrr_at_5_std value: -10.768642638210867 - type: nauc_ndcg_at_1000_diff1 value: 89.27620775856863 - type: nauc_ndcg_at_1000_max value: 74.2985757362615 - type: nauc_ndcg_at_1000_std value: -11.236142819703023 - type: nauc_ndcg_at_100_diff1 value: 89.27284787540731 - type: nauc_ndcg_at_100_max value: 74.33539303365968 - type: nauc_ndcg_at_100_std value: -11.469413615851936 - type: nauc_ndcg_at_10_diff1 value: 89.21496710661724 - type: nauc_ndcg_at_10_max value: 75.02035398490516 - type: nauc_ndcg_at_10_std value: -9.903255803665814 - type: nauc_ndcg_at_1_diff1 value: 89.35436544117584 - type: nauc_ndcg_at_1_max value: 70.35936815057701 - type: nauc_ndcg_at_1_std value: -13.598996360976903 - type: nauc_ndcg_at_20_diff1 value: 89.03561289544179 - type: nauc_ndcg_at_20_max value: 74.4006766600049 - type: nauc_ndcg_at_20_std value: -11.129237862587743 - type: nauc_ndcg_at_3_diff1 value: 89.46540193201693 - type: nauc_ndcg_at_3_max value: 76.87093548368378 - type: nauc_ndcg_at_3_std value: -12.484902872086767 - type: nauc_ndcg_at_5_diff1 value: 89.39924941584766 - type: nauc_ndcg_at_5_max value: 75.96975269092722 - type: nauc_ndcg_at_5_std value: -8.180295581144833 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_100_diff1 value: 86.93074003795302 - type: nauc_precision_at_100_max value: 100.0 - type: nauc_precision_at_100_std value: -174.07785375176616 - type: nauc_precision_at_10_diff1 value: 87.43064119412082 - type: nauc_precision_at_10_max value: 90.60785783417448 - type: nauc_precision_at_10_std value: 15.378710059645906 - type: nauc_precision_at_1_diff1 value: 89.35436544117584 - type: nauc_precision_at_1_max value: 70.35936815057701 - type: nauc_precision_at_1_std value: -13.598996360976903 - type: nauc_precision_at_20_diff1 value: 78.78206037685919 - type: nauc_precision_at_20_max value: 82.52264166455923 - type: nauc_precision_at_20_std value: -5.95806599216658 - type: nauc_precision_at_3_diff1 value: 90.12709256456401 - type: nauc_precision_at_3_max value: 90.72678805838154 - type: nauc_precision_at_3_std value: -11.047599315631993 - type: nauc_precision_at_5_diff1 value: 89.9066873566561 - type: nauc_precision_at_5_max value: 93.51571626543664 - type: nauc_precision_at_5_std value: 22.632403279126162 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 86.93074003793416 - type: nauc_recall_at_100_max value: 100.0 - type: nauc_recall_at_100_std value: -174.07785375175723 - type: nauc_recall_at_10_diff1 value: 87.43064119411991 - type: nauc_recall_at_10_max value: 90.60785783417579 - type: nauc_recall_at_10_std value: 15.378710059643607 - type: nauc_recall_at_1_diff1 value: 89.35436544117584 - type: nauc_recall_at_1_max value: 70.35936815057701 - type: nauc_recall_at_1_std value: -13.598996360976903 - type: nauc_recall_at_20_diff1 value: 78.78206037685645 - type: nauc_recall_at_20_max value: 82.52264166455791 - type: nauc_recall_at_20_std value: -5.958065992168697 - type: nauc_recall_at_3_diff1 value: 90.12709256456463 - type: nauc_recall_at_3_max value: 90.7267880583832 - type: nauc_recall_at_3_std value: -11.047599315631881 - type: nauc_recall_at_5_diff1 value: 89.90668735665676 - type: nauc_recall_at_5_max value: 93.51571626543753 - type: nauc_recall_at_5_std value: 22.632403279126112 - type: ndcg_at_1 value: 90.789 - type: ndcg_at_10 value: 95.46 - type: ndcg_at_100 value: 95.652 - type: ndcg_at_1000 value: 95.659 - type: ndcg_at_20 value: 95.575 - type: ndcg_at_3 value: 94.82000000000001 - type: ndcg_at_5 value: 95.26400000000001 - type: precision_at_1 value: 90.789 - type: precision_at_10 value: 9.908999999999999 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.977 - type: precision_at_3 value: 32.471 - type: precision_at_5 value: 19.701 - type: recall_at_1 value: 90.789 - type: recall_at_10 value: 99.093 - type: recall_at_100 value: 99.955 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 99.546 - type: recall_at_3 value: 97.414 - type: recall_at_5 value: 98.503 - task: type: STS dataset: name: MTEB GermanSTSBenchmark (default) type: jinaai/german-STSbenchmark config: default split: test revision: e36907544d44c3a247898ed81540310442329e20 metrics: - type: cosine_pearson value: 86.55319003300265 - type: cosine_spearman value: 87.50267373081324 - type: euclidean_pearson value: 87.41630636501863 - type: euclidean_spearman value: 88.02170803409365 - type: main_score value: 87.50267373081324 - type: manhattan_pearson value: 87.33703179056744 - type: manhattan_spearman value: 87.99192826922514 - type: pearson value: 86.55319003300265 - type: spearman value: 87.50267373081324 - task: type: Clustering dataset: name: MTEB HALClusteringS2S (default) type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: main_score value: 27.477557517301303 - type: v_measure value: 27.477557517301303 - type: v_measure_std value: 3.3525736581861336 - task: type: Classification dataset: name: MTEB HeadlineClassification (default) type: ai-forever/headline-classification config: default split: test revision: 2fe05ee6b5832cda29f2ef7aaad7b7fe6a3609eb metrics: - type: accuracy value: 75.0830078125 - type: f1 value: 75.08863209267814 - type: f1_weighted value: 75.08895979060917 - type: main_score value: 75.0830078125 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 38.143 - type: map_at_10 value: 55.916999999999994 - type: map_at_100 value: 56.706 - type: map_at_1000 value: 56.77100000000001 - type: map_at_20 value: 56.367 - type: map_at_3 value: 53.111 - type: map_at_5 value: 54.839000000000006 - type: mrr_at_1 value: 76.286 - type: mrr_at_10 value: 81.879 - type: mrr_at_100 value: 82.09100000000001 - type: mrr_at_1000 value: 82.101 - type: mrr_at_20 value: 82.01 - type: mrr_at_3 value: 80.972 - type: mrr_at_5 value: 81.537 - type: ndcg_at_1 value: 76.286 - type: ndcg_at_10 value: 64.673 - type: ndcg_at_100 value: 67.527 - type: ndcg_at_1000 value: 68.857 - type: ndcg_at_20 value: 65.822 - type: ndcg_at_3 value: 60.616 - type: ndcg_at_5 value: 62.827999999999996 - type: precision_at_1 value: 76.286 - type: precision_at_10 value: 13.196 - type: precision_at_100 value: 1.544 - type: precision_at_1000 value: 0.172 - type: precision_at_20 value: 6.968000000000001 - type: precision_at_3 value: 37.992 - type: precision_at_5 value: 24.54 - type: recall_at_1 value: 38.143 - type: recall_at_10 value: 65.982 - type: recall_at_100 value: 77.225 - type: recall_at_1000 value: 86.077 - type: recall_at_20 value: 69.68299999999999 - type: recall_at_3 value: 56.989000000000004 - type: recall_at_5 value: 61.35 - type: main_score value: 64.673 - task: type: Classification dataset: name: MTEB IFlyTek (default) type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 41.67756829549827 - type: f1 value: 33.929325579581636 - type: f1_weighted value: 43.03952025643197 - type: main_score value: 41.67756829549827 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 91.90440000000001 - type: ap value: 88.78663714603425 - type: ap_weighted value: 88.78663714603425 - type: f1 value: 91.89564361975891 - type: f1_weighted value: 91.89564361975891 - type: main_score value: 91.90440000000001 - task: type: Classification dataset: name: MTEB InappropriatenessClassification (default) type: ai-forever/inappropriateness-classification config: default split: test revision: 601651fdc45ef243751676e62dd7a19f491c0285 metrics: - type: accuracy value: 61.0498046875 - type: ap value: 57.04240566648215 - type: ap_weighted value: 57.04240566648215 - type: f1 value: 60.867630038606954 - type: f1_weighted value: 60.867630038606954 - type: main_score value: 61.0498046875 - task: type: Classification dataset: name: MTEB JDReview (default) type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 83.50844277673546 - type: ap value: 48.46732380712268 - type: ap_weighted value: 48.46732380712268 - type: f1 value: 77.43967451387445 - type: f1_weighted value: 84.78462929014114 - type: main_score value: 83.50844277673546 - task: type: Classification dataset: name: MTEB KinopoiskClassification (default) type: ai-forever/kinopoisk-sentiment-classification config: default split: test revision: 5911f26666ac11af46cb9c6849d0dc80a378af24 metrics: - type: accuracy value: 62.393333333333324 - type: f1 value: 61.35940129568015 - type: f1_weighted value: 61.35940129568015 - type: main_score value: 62.393333333333324 - task: type: STS dataset: name: MTEB LCQMC (default) type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cosine_pearson value: 67.74375505907872 - type: cosine_spearman value: 75.94582231399434 - type: euclidean_pearson value: 74.52501692443582 - type: euclidean_spearman value: 75.88428434746646 - type: main_score value: 75.94582231399434 - type: manhattan_pearson value: 74.55015441749529 - type: manhattan_spearman value: 75.83288262176175 - type: pearson value: 67.74375505907872 - type: spearman value: 75.94582231399434 - task: type: Retrieval dataset: name: MTEB LEMBNarrativeQARetrieval (default) type: dwzhu/LongEmbed config: default split: test revision: 6e346642246bfb4928c560ee08640dc84d074e8c metrics: - type: map_at_1 value: 23.093 - type: map_at_10 value: 30.227999999999998 - type: map_at_100 value: 31.423000000000002 - type: map_at_1000 value: 31.533 - type: map_at_20 value: 30.835 - type: map_at_3 value: 27.983999999999998 - type: map_at_5 value: 29.253 - type: mrr_at_1 value: 23.093 - type: mrr_at_10 value: 30.227999999999998 - type: mrr_at_100 value: 31.423000000000002 - type: mrr_at_1000 value: 31.533 - type: mrr_at_20 value: 30.835 - type: mrr_at_3 value: 27.983999999999998 - type: mrr_at_5 value: 29.253 - type: ndcg_at_1 value: 23.093 - type: ndcg_at_10 value: 34.297 - type: ndcg_at_100 value: 41.049 - type: ndcg_at_1000 value: 43.566 - type: ndcg_at_20 value: 36.52 - type: ndcg_at_3 value: 29.629 - type: ndcg_at_5 value: 31.926 - type: precision_at_1 value: 23.093 - type: precision_at_10 value: 4.735 - type: precision_at_100 value: 0.8109999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 2.8080000000000003 - type: precision_at_3 value: 11.468 - type: precision_at_5 value: 8.001 - type: recall_at_1 value: 23.093 - type: recall_at_10 value: 47.354 - type: recall_at_100 value: 81.147 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 56.16799999999999 - type: recall_at_3 value: 34.405 - type: recall_at_5 value: 40.004 - type: main_score value: 34.297 - type: map_at_1 value: 24.361 - type: map_at_10 value: 33.641 - type: map_at_100 value: 35.104 - type: map_at_1000 value: 35.127 - type: map_at_20 value: 34.388999999999996 - type: map_at_3 value: 30.255 - type: map_at_5 value: 32.079 - type: mrr_at_1 value: 24.361 - type: mrr_at_10 value: 33.641 - type: mrr_at_100 value: 35.104 - type: mrr_at_1000 value: 35.127 - type: mrr_at_20 value: 34.388999999999996 - type: mrr_at_3 value: 30.255 - type: mrr_at_5 value: 32.079 - type: ndcg_at_1 value: 24.361 - type: ndcg_at_10 value: 39.337 - type: ndcg_at_100 value: 47.384 - type: ndcg_at_1000 value: 47.75 - type: ndcg_at_20 value: 42.077999999999996 - type: ndcg_at_3 value: 32.235 - type: ndcg_at_5 value: 35.524 - type: precision_at_1 value: 24.361 - type: precision_at_10 value: 5.783 - type: precision_at_100 value: 0.975 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 3.435 - type: precision_at_3 value: 12.661 - type: precision_at_5 value: 9.193999999999999 - type: recall_at_1 value: 24.361 - type: recall_at_10 value: 57.826 - type: recall_at_100 value: 97.51100000000001 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 68.697 - type: recall_at_3 value: 37.983 - type: recall_at_5 value: 45.972 - type: main_score value: 39.337 - type: map_at_1 value: 53.667 - type: map_at_10 value: 61.719 - type: map_at_100 value: 62.471 - type: map_at_1000 value: 62.492000000000004 - type: map_at_20 value: 62.153000000000006 - type: map_at_3 value: 59.167 - type: map_at_5 value: 60.95 - type: mrr_at_1 value: 53.667 - type: mrr_at_10 value: 61.719 - type: mrr_at_100 value: 62.471 - type: mrr_at_1000 value: 62.492000000000004 - type: mrr_at_20 value: 62.153000000000006 - type: mrr_at_3 value: 59.167 - type: mrr_at_5 value: 60.95 - type: ndcg_at_1 value: 53.667 - type: ndcg_at_10 value: 66.018 - type: ndcg_at_100 value: 69.726 - type: ndcg_at_1000 value: 70.143 - type: ndcg_at_20 value: 67.61399999999999 - type: ndcg_at_3 value: 60.924 - type: ndcg_at_5 value: 64.10900000000001 - type: precision_at_1 value: 53.667 - type: precision_at_10 value: 7.9670000000000005 - type: precision_at_100 value: 0.97 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.3 - type: precision_at_3 value: 22.0 - type: precision_at_5 value: 14.732999999999999 - type: recall_at_1 value: 53.667 - type: recall_at_10 value: 79.667 - type: recall_at_100 value: 97.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 86.0 - type: recall_at_3 value: 66.0 - type: recall_at_5 value: 73.667 - type: main_score value: 66.018 - task: type: Retrieval dataset: name: MTEB LEMBNeedleRetrieval (default) type: dwzhu/LongEmbed config: default split: test_256 revision: 6e346642246bfb4928c560ee08640dc84d074e8c metrics: - type: map_at_1 value: 64.0 - type: map_at_10 value: 77.083 - type: map_at_100 value: 77.265 - type: map_at_1000 value: 77.265 - type: map_at_20 value: 77.265 - type: map_at_3 value: 76.333 - type: map_at_5 value: 76.833 - type: mrr_at_1 value: 64.0 - type: mrr_at_10 value: 77.083 - type: mrr_at_100 value: 77.265 - type: mrr_at_1000 value: 77.265 - type: mrr_at_20 value: 77.265 - type: mrr_at_3 value: 76.333 - type: mrr_at_5 value: 76.833 - type: ndcg_at_1 value: 64.0 - type: ndcg_at_10 value: 82.325 - type: ndcg_at_100 value: 82.883 - type: ndcg_at_1000 value: 82.883 - type: ndcg_at_20 value: 82.883 - type: ndcg_at_3 value: 80.833 - type: ndcg_at_5 value: 81.694 - type: precision_at_1 value: 64.0 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 5.0 - type: precision_at_3 value: 31.333 - type: precision_at_5 value: 19.2 - type: recall_at_1 value: 64.0 - type: recall_at_10 value: 98.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 100.0 - type: recall_at_3 value: 94.0 - type: recall_at_5 value: 96.0 - type: main_score value: 64.0 - type: map_at_1 value: 100.0 - type: map_at_10 value: 100.0 - type: map_at_100 value: 100.0 - type: map_at_1000 value: 100.0 - type: map_at_20 value: 100.0 - type: map_at_3 value: 100.0 - type: map_at_5 value: 100.0 - type: mrr_at_1 value: 100.0 - type: mrr_at_10 value: 100.0 - type: mrr_at_100 value: 100.0 - type: mrr_at_1000 value: 100.0 - type: mrr_at_20 value: 100.0 - type: mrr_at_3 value: 100.0 - type: mrr_at_5 value: 100.0 - type: ndcg_at_1 value: 100.0 - type: ndcg_at_10 value: 100.0 - type: ndcg_at_100 value: 100.0 - type: ndcg_at_1000 value: 100.0 - type: ndcg_at_20 value: 100.0 - type: ndcg_at_3 value: 100.0 - type: ndcg_at_5 value: 100.0 - type: precision_at_1 value: 100.0 - type: precision_at_10 value: 10.0 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 5.0 - type: precision_at_3 value: 33.333 - type: precision_at_5 value: 20.0 - type: recall_at_1 value: 100.0 - type: recall_at_10 value: 100.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 100.0 - type: recall_at_3 value: 100.0 - type: recall_at_5 value: 100.0 - type: main_score value: 100.0 - task: type: Retrieval dataset: name: MTEB LEMBSummScreenFDRetrieval (default) type: dwzhu/LongEmbed config: default split: validation revision: 6e346642246bfb4928c560ee08640dc84d074e8c metrics: - type: map_at_1 value: 84.821 - type: map_at_10 value: 90.11200000000001 - type: map_at_100 value: 90.158 - type: map_at_1000 value: 90.158 - type: map_at_20 value: 90.137 - type: map_at_3 value: 89.385 - type: map_at_5 value: 89.876 - type: mrr_at_1 value: 84.821 - type: mrr_at_10 value: 90.11200000000001 - type: mrr_at_100 value: 90.158 - type: mrr_at_1000 value: 90.158 - type: mrr_at_20 value: 90.137 - type: mrr_at_3 value: 89.385 - type: mrr_at_5 value: 89.876 - type: ndcg_at_1 value: 84.821 - type: ndcg_at_10 value: 92.334 - type: ndcg_at_100 value: 92.535 - type: ndcg_at_1000 value: 92.535 - type: ndcg_at_20 value: 92.414 - type: ndcg_at_3 value: 90.887 - type: ndcg_at_5 value: 91.758 - type: precision_at_1 value: 84.821 - type: precision_at_10 value: 9.911 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.97 - type: precision_at_3 value: 31.746000000000002 - type: precision_at_5 value: 19.464000000000002 - type: recall_at_1 value: 84.821 - type: recall_at_10 value: 99.107 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 99.405 - type: recall_at_3 value: 95.238 - type: recall_at_5 value: 97.321 - type: main_score value: 92.334 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (deu-deu) type: facebook/mlqa config: deu-deu split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 67.548 - type: map_at_1 value: 56.559000000000005 - type: map_at_10 value: 63.867 - type: map_at_100 value: 64.429 - type: map_at_1000 value: 64.457 - type: map_at_20 value: 64.215 - type: map_at_3 value: 62.109 - type: map_at_5 value: 63.101 - type: mrr_at_1 value: 56.56990915134057 - type: mrr_at_10 value: 63.86820789324668 - type: mrr_at_100 value: 64.42973602152581 - type: mrr_at_1000 value: 64.45818598090155 - type: mrr_at_20 value: 64.2163052263868 - type: mrr_at_3 value: 62.10946155550634 - type: mrr_at_5 value: 63.10104143585199 - type: nauc_map_at_1000_diff1 value: 73.78440163370111 - type: nauc_map_at_1000_max value: 66.37875518052162 - type: nauc_map_at_1000_std value: -17.063915098135396 - type: nauc_map_at_100_diff1 value: 73.77180802985815 - type: nauc_map_at_100_max value: 66.38365998362033 - type: nauc_map_at_100_std value: -17.053345109661972 - type: nauc_map_at_10_diff1 value: 73.70041876696037 - type: nauc_map_at_10_max value: 66.33213342705997 - type: nauc_map_at_10_std value: -17.40657791273925 - type: nauc_map_at_1_diff1 value: 76.8784374396948 - type: nauc_map_at_1_max value: 64.07170606935357 - type: nauc_map_at_1_std value: -18.464213686790654 - type: nauc_map_at_20_diff1 value: 73.72371377231813 - type: nauc_map_at_20_max value: 66.42108121059451 - type: nauc_map_at_20_std value: -17.05384923889036 - type: nauc_map_at_3_diff1 value: 74.08287018839246 - type: nauc_map_at_3_max value: 66.42422337760333 - type: nauc_map_at_3_std value: -17.79503404131652 - type: nauc_map_at_5_diff1 value: 73.9294779027339 - type: nauc_map_at_5_max value: 66.51752041065726 - type: nauc_map_at_5_std value: -17.67309805113804 - type: nauc_mrr_at_1000_diff1 value: 73.78389736923545 - type: nauc_mrr_at_1000_max value: 66.37929720858341 - type: nauc_mrr_at_1000_std value: -17.058591711291278 - type: nauc_mrr_at_100_diff1 value: 73.77126451253136 - type: nauc_mrr_at_100_max value: 66.38405917246607 - type: nauc_mrr_at_100_std value: -17.047251035212863 - type: nauc_mrr_at_10_diff1 value: 73.69960470665124 - type: nauc_mrr_at_10_max value: 66.33265194210313 - type: nauc_mrr_at_10_std value: -17.399659076827998 - type: nauc_mrr_at_1_diff1 value: 76.8689850260726 - type: nauc_mrr_at_1_max value: 64.09858188287487 - type: nauc_mrr_at_1_std value: -18.46064784201847 - type: nauc_mrr_at_20_diff1 value: 73.72312682063128 - type: nauc_mrr_at_20_max value: 66.42181932858745 - type: nauc_mrr_at_20_std value: -17.04690257511092 - type: nauc_mrr_at_3_diff1 value: 74.08287018839246 - type: nauc_mrr_at_3_max value: 66.42422337760333 - type: nauc_mrr_at_3_std value: -17.79503404131652 - type: nauc_mrr_at_5_diff1 value: 73.9294779027339 - type: nauc_mrr_at_5_max value: 66.51752041065726 - type: nauc_mrr_at_5_std value: -17.67309805113804 - type: nauc_ndcg_at_1000_diff1 value: 72.97825548342801 - type: nauc_ndcg_at_1000_max value: 66.96275437178257 - type: nauc_ndcg_at_1000_std value: -15.611902299641587 - type: nauc_ndcg_at_100_diff1 value: 72.58724738936613 - type: nauc_ndcg_at_100_max value: 67.16774012704182 - type: nauc_ndcg_at_100_std value: -14.945088654796812 - type: nauc_ndcg_at_10_diff1 value: 72.16253640477947 - type: nauc_ndcg_at_10_max value: 67.01746849484621 - type: nauc_ndcg_at_10_std value: -16.46102507270809 - type: nauc_ndcg_at_1_diff1 value: 76.8689850260726 - type: nauc_ndcg_at_1_max value: 64.09858188287487 - type: nauc_ndcg_at_1_std value: -18.46064784201847 - type: nauc_ndcg_at_20_diff1 value: 72.19995325129975 - type: nauc_ndcg_at_20_max value: 67.39639713797962 - type: nauc_ndcg_at_20_std value: -15.091689370748531 - type: nauc_ndcg_at_3_diff1 value: 73.13123604206514 - type: nauc_ndcg_at_3_max value: 67.23123167871547 - type: nauc_ndcg_at_3_std value: -17.492755234009156 - type: nauc_ndcg_at_5_diff1 value: 72.8154718929895 - type: nauc_ndcg_at_5_max value: 67.44578008373777 - type: nauc_ndcg_at_5_std value: -17.251840358751362 - type: nauc_precision_at_1000_diff1 value: 47.89748325983604 - type: nauc_precision_at_1000_max value: 70.47466197804906 - type: nauc_precision_at_1000_std value: 72.66193512114775 - type: nauc_precision_at_100_diff1 value: 59.493743734005356 - type: nauc_precision_at_100_max value: 74.02140147220713 - type: nauc_precision_at_100_std value: 17.26664098026236 - type: nauc_precision_at_10_diff1 value: 64.94415011040277 - type: nauc_precision_at_10_max value: 69.6963814950747 - type: nauc_precision_at_10_std value: -11.663043657012954 - type: nauc_precision_at_1_diff1 value: 76.8689850260726 - type: nauc_precision_at_1_max value: 64.09858188287487 - type: nauc_precision_at_1_std value: -18.46064784201847 - type: nauc_precision_at_20_diff1 value: 63.145886909986416 - type: nauc_precision_at_20_max value: 72.95708033630744 - type: nauc_precision_at_20_std value: -1.5039593629280323 - type: nauc_precision_at_3_diff1 value: 69.88902201644449 - type: nauc_precision_at_3_max value: 69.80499971089935 - type: nauc_precision_at_3_std value: -16.444680766676647 - type: nauc_precision_at_5_diff1 value: 68.60869967062919 - type: nauc_precision_at_5_max value: 70.75998207564281 - type: nauc_precision_at_5_std value: -15.62613396998262 - type: nauc_recall_at_1000_diff1 value: 62.6646436338833 - type: nauc_recall_at_1000_max value: 86.17801636476078 - type: nauc_recall_at_1000_std value: 71.84718775540334 - type: nauc_recall_at_100_diff1 value: 61.110492191439505 - type: nauc_recall_at_100_max value: 75.45730686603042 - type: nauc_recall_at_100_std value: 16.202465011589428 - type: nauc_recall_at_10_diff1 value: 65.1522196516815 - type: nauc_recall_at_10_max value: 69.7626435962161 - type: nauc_recall_at_10_std value: -11.801178474770449 - type: nauc_recall_at_1_diff1 value: 76.8784374396948 - type: nauc_recall_at_1_max value: 64.07170606935357 - type: nauc_recall_at_1_std value: -18.464213686790654 - type: nauc_recall_at_20_diff1 value: 63.40332739504143 - type: nauc_recall_at_20_max value: 73.04113661090965 - type: nauc_recall_at_20_std value: -1.6609741140266947 - type: nauc_recall_at_3_diff1 value: 70.03728086098866 - type: nauc_recall_at_3_max value: 69.85953774320521 - type: nauc_recall_at_3_std value: -16.482993123411706 - type: nauc_recall_at_5_diff1 value: 68.77396121765933 - type: nauc_recall_at_5_max value: 70.8231205493519 - type: nauc_recall_at_5_std value: -15.668037770700863 - type: ndcg_at_1 value: 56.57 - type: ndcg_at_10 value: 67.548 - type: ndcg_at_100 value: 70.421 - type: ndcg_at_1000 value: 71.198 - type: ndcg_at_20 value: 68.829 - type: ndcg_at_3 value: 63.88700000000001 - type: ndcg_at_5 value: 65.689 - type: precision_at_1 value: 56.57 - type: precision_at_10 value: 7.922 - type: precision_at_100 value: 0.9299999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.216 - type: precision_at_3 value: 23.015 - type: precision_at_5 value: 14.691 - type: recall_at_1 value: 56.559000000000005 - type: recall_at_10 value: 79.182 - type: recall_at_100 value: 92.946 - type: recall_at_1000 value: 99.092 - type: recall_at_20 value: 84.27900000000001 - type: recall_at_3 value: 69.023 - type: recall_at_5 value: 73.432 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (deu-spa) type: facebook/mlqa config: deu-spa split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 70.645 - type: map_at_1 value: 58.423 - type: map_at_10 value: 66.613 - type: map_at_100 value: 67.14099999999999 - type: map_at_1000 value: 67.161 - type: map_at_20 value: 66.965 - type: map_at_3 value: 64.714 - type: map_at_5 value: 65.835 - type: mrr_at_1 value: 58.4225352112676 - type: mrr_at_10 value: 66.61321260898735 - type: mrr_at_100 value: 67.13991570812132 - type: mrr_at_1000 value: 67.1598532168174 - type: mrr_at_20 value: 66.96384710024888 - type: mrr_at_3 value: 64.71361502347425 - type: mrr_at_5 value: 65.83474178403769 - type: nauc_map_at_1000_diff1 value: 73.9485117118935 - type: nauc_map_at_1000_max value: 65.74479869396299 - type: nauc_map_at_1000_std value: -20.300269749495563 - type: nauc_map_at_100_diff1 value: 73.93900406302829 - type: nauc_map_at_100_max value: 65.75508449194885 - type: nauc_map_at_100_std value: -20.265330791570175 - type: nauc_map_at_10_diff1 value: 73.84863233472605 - type: nauc_map_at_10_max value: 65.89377317378211 - type: nauc_map_at_10_std value: -20.404123131964695 - type: nauc_map_at_1_diff1 value: 76.73627284218519 - type: nauc_map_at_1_max value: 62.94957512510876 - type: nauc_map_at_1_std value: -20.99649749330682 - type: nauc_map_at_20_diff1 value: 73.88712006109598 - type: nauc_map_at_20_max value: 65.82057018162664 - type: nauc_map_at_20_std value: -20.269476512431915 - type: nauc_map_at_3_diff1 value: 74.21419190161502 - type: nauc_map_at_3_max value: 65.64993368062119 - type: nauc_map_at_3_std value: -21.34641749007071 - type: nauc_map_at_5_diff1 value: 74.0119419385777 - type: nauc_map_at_5_max value: 65.69809416369732 - type: nauc_map_at_5_std value: -21.16901556082261 - type: nauc_mrr_at_1000_diff1 value: 73.94915184134923 - type: nauc_mrr_at_1000_max value: 65.74522469633418 - type: nauc_mrr_at_1000_std value: -20.303028367132246 - type: nauc_mrr_at_100_diff1 value: 73.93964394728808 - type: nauc_mrr_at_100_max value: 65.75550992323707 - type: nauc_mrr_at_100_std value: -20.26808820438918 - type: nauc_mrr_at_10_diff1 value: 73.84863233472605 - type: nauc_mrr_at_10_max value: 65.89377317378211 - type: nauc_mrr_at_10_std value: -20.404123131964695 - type: nauc_mrr_at_1_diff1 value: 76.73627284218519 - type: nauc_mrr_at_1_max value: 62.94957512510876 - type: nauc_mrr_at_1_std value: -20.99649749330682 - type: nauc_mrr_at_20_diff1 value: 73.88775721128745 - type: nauc_mrr_at_20_max value: 65.820991355628 - type: nauc_mrr_at_20_std value: -20.272216587019734 - type: nauc_mrr_at_3_diff1 value: 74.21419190161502 - type: nauc_mrr_at_3_max value: 65.64993368062119 - type: nauc_mrr_at_3_std value: -21.34641749007071 - type: nauc_mrr_at_5_diff1 value: 74.0119419385777 - type: nauc_mrr_at_5_max value: 65.69809416369732 - type: nauc_mrr_at_5_std value: -21.16901556082261 - type: nauc_ndcg_at_1000_diff1 value: 73.29396365944277 - type: nauc_ndcg_at_1000_max value: 66.44879592109541 - type: nauc_ndcg_at_1000_std value: -19.285991058788195 - type: nauc_ndcg_at_100_diff1 value: 73.0159172721162 - type: nauc_ndcg_at_100_max value: 66.76216389231388 - type: nauc_ndcg_at_100_std value: -18.27931368094887 - type: nauc_ndcg_at_10_diff1 value: 72.42096650774693 - type: nauc_ndcg_at_10_max value: 67.48592688463306 - type: nauc_ndcg_at_10_std value: -18.91453756077581 - type: nauc_ndcg_at_1_diff1 value: 76.73627284218519 - type: nauc_ndcg_at_1_max value: 62.94957512510876 - type: nauc_ndcg_at_1_std value: -20.99649749330682 - type: nauc_ndcg_at_20_diff1 value: 72.53699362385684 - type: nauc_ndcg_at_20_max value: 67.22763976357872 - type: nauc_ndcg_at_20_std value: -18.299910635008338 - type: nauc_ndcg_at_3_diff1 value: 73.3698453761989 - type: nauc_ndcg_at_3_max value: 66.71056987289383 - type: nauc_ndcg_at_3_std value: -21.405154376652803 - type: nauc_ndcg_at_5_diff1 value: 72.9491030712935 - type: nauc_ndcg_at_5_max value: 66.85786103137077 - type: nauc_ndcg_at_5_std value: -21.04005053344073 - type: nauc_precision_at_1000_diff1 value: 17.02462370967451 - type: nauc_precision_at_1000_max value: 48.03260752496052 - type: nauc_precision_at_1000_std value: 87.56077915079334 - type: nauc_precision_at_100_diff1 value: 58.590352501194985 - type: nauc_precision_at_100_max value: 78.2649015433222 - type: nauc_precision_at_100_std value: 28.05030453158992 - type: nauc_precision_at_10_diff1 value: 64.89497928764766 - type: nauc_precision_at_10_max value: 75.93257124951242 - type: nauc_precision_at_10_std value: -9.825306994117462 - type: nauc_precision_at_1_diff1 value: 76.73627284218519 - type: nauc_precision_at_1_max value: 62.94957512510876 - type: nauc_precision_at_1_std value: -20.99649749330682 - type: nauc_precision_at_20_diff1 value: 62.11366204321558 - type: nauc_precision_at_20_max value: 75.9571427846493 - type: nauc_precision_at_20_std value: -0.94585212808191 - type: nauc_precision_at_3_diff1 value: 70.52940972112398 - type: nauc_precision_at_3_max value: 70.3402053170779 - type: nauc_precision_at_3_std value: -21.579778424241304 - type: nauc_precision_at_5_diff1 value: 68.78962580223575 - type: nauc_precision_at_5_max value: 71.41410894398376 - type: nauc_precision_at_5_std value: -20.415603405161956 - type: nauc_recall_at_1000_diff1 value: 55.88625447348128 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 100.0 - type: nauc_recall_at_100_diff1 value: 61.17942268389525 - type: nauc_recall_at_100_max value: 81.12207841563487 - type: nauc_recall_at_100_std value: 27.141215257528113 - type: nauc_recall_at_10_diff1 value: 64.8949792876478 - type: nauc_recall_at_10_max value: 75.93257124951249 - type: nauc_recall_at_10_std value: -9.825306994117323 - type: nauc_recall_at_1_diff1 value: 76.73627284218519 - type: nauc_recall_at_1_max value: 62.94957512510876 - type: nauc_recall_at_1_std value: -20.99649749330682 - type: nauc_recall_at_20_diff1 value: 63.07808719241162 - type: nauc_recall_at_20_max value: 76.96808746317542 - type: nauc_recall_at_20_std value: -1.5235053258631275 - type: nauc_recall_at_3_diff1 value: 70.52940972112405 - type: nauc_recall_at_3_max value: 70.3402053170779 - type: nauc_recall_at_3_std value: -21.57977842424124 - type: nauc_recall_at_5_diff1 value: 68.78962580223575 - type: nauc_recall_at_5_max value: 71.41410894398392 - type: nauc_recall_at_5_std value: -20.415603405161793 - type: ndcg_at_1 value: 58.423 - type: ndcg_at_10 value: 70.645 - type: ndcg_at_100 value: 73.277 - type: ndcg_at_1000 value: 73.785 - type: ndcg_at_20 value: 71.918 - type: ndcg_at_3 value: 66.679 - type: ndcg_at_5 value: 68.72200000000001 - type: precision_at_1 value: 58.423 - type: precision_at_10 value: 8.338 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.423 - type: precision_at_3 value: 24.113 - type: precision_at_5 value: 15.47 - type: recall_at_1 value: 58.423 - type: recall_at_10 value: 83.38 - type: recall_at_100 value: 95.887 - type: recall_at_1000 value: 99.831 - type: recall_at_20 value: 88.39399999999999 - type: recall_at_3 value: 72.33800000000001 - type: recall_at_5 value: 77.352 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (deu-eng) type: facebook/mlqa config: deu-eng split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 67.067 - type: map_at_1 value: 55.861000000000004 - type: map_at_10 value: 63.42100000000001 - type: map_at_100 value: 64.03 - type: map_at_1000 value: 64.05999999999999 - type: map_at_20 value: 63.819 - type: map_at_3 value: 61.773 - type: map_at_5 value: 62.736999999999995 - type: mrr_at_1 value: 55.88300465322402 - type: mrr_at_10 value: 63.43111082973707 - type: mrr_at_100 value: 64.03962373590272 - type: mrr_at_1000 value: 64.0698259866376 - type: mrr_at_20 value: 63.82871766489112 - type: mrr_at_3 value: 61.78447448112865 - type: mrr_at_5 value: 62.74835659945346 - type: nauc_map_at_1000_diff1 value: 74.58505763417352 - type: nauc_map_at_1000_max value: 66.26060764852198 - type: nauc_map_at_1000_std value: -16.896178230873897 - type: nauc_map_at_100_diff1 value: 74.57057487892857 - type: nauc_map_at_100_max value: 66.26600433283826 - type: nauc_map_at_100_std value: -16.87596113104189 - type: nauc_map_at_10_diff1 value: 74.53453636322749 - type: nauc_map_at_10_max value: 66.27501737773804 - type: nauc_map_at_10_std value: -17.178743257781775 - type: nauc_map_at_1_diff1 value: 77.63067209375254 - type: nauc_map_at_1_max value: 64.17718675702672 - type: nauc_map_at_1_std value: -17.639521106853717 - type: nauc_map_at_20_diff1 value: 74.52007402431164 - type: nauc_map_at_20_max value: 66.28276291359268 - type: nauc_map_at_20_std value: -16.939292897754758 - type: nauc_map_at_3_diff1 value: 74.79187974631951 - type: nauc_map_at_3_max value: 66.23256568210611 - type: nauc_map_at_3_std value: -17.894889918934112 - type: nauc_map_at_5_diff1 value: 74.63011328882517 - type: nauc_map_at_5_max value: 66.35411054978499 - type: nauc_map_at_5_std value: -17.50140342194211 - type: nauc_mrr_at_1000_diff1 value: 74.57520089771667 - type: nauc_mrr_at_1000_max value: 66.27270912845914 - type: nauc_mrr_at_1000_std value: -16.84012675362397 - type: nauc_mrr_at_100_diff1 value: 74.56070964572156 - type: nauc_mrr_at_100_max value: 66.2780701126926 - type: nauc_mrr_at_100_std value: -16.820035083069865 - type: nauc_mrr_at_10_diff1 value: 74.52455978435117 - type: nauc_mrr_at_10_max value: 66.28697244023137 - type: nauc_mrr_at_10_std value: -17.122477723330523 - type: nauc_mrr_at_1_diff1 value: 77.60643512422061 - type: nauc_mrr_at_1_max value: 64.21736966061896 - type: nauc_mrr_at_1_std value: -17.56627338275146 - type: nauc_mrr_at_20_diff1 value: 74.5099814266373 - type: nauc_mrr_at_20_max value: 66.29485560556576 - type: nauc_mrr_at_20_std value: -16.882350027335306 - type: nauc_mrr_at_3_diff1 value: 74.78132817375507 - type: nauc_mrr_at_3_max value: 66.24761860047623 - type: nauc_mrr_at_3_std value: -17.833128575678998 - type: nauc_mrr_at_5_diff1 value: 74.6193031207433 - type: nauc_mrr_at_5_max value: 66.36951764432901 - type: nauc_mrr_at_5_std value: -17.438203106324227 - type: nauc_ndcg_at_1000_diff1 value: 73.79386161629151 - type: nauc_ndcg_at_1000_max value: 66.84013038018082 - type: nauc_ndcg_at_1000_std value: -15.387358822700667 - type: nauc_ndcg_at_100_diff1 value: 73.36132885277745 - type: nauc_ndcg_at_100_max value: 67.04416926901568 - type: nauc_ndcg_at_100_std value: -14.503256942521972 - type: nauc_ndcg_at_10_diff1 value: 73.11847332785027 - type: nauc_ndcg_at_10_max value: 67.02149621303091 - type: nauc_ndcg_at_10_std value: -16.142234662067782 - type: nauc_ndcg_at_1_diff1 value: 77.60643512422061 - type: nauc_ndcg_at_1_max value: 64.21736966061896 - type: nauc_ndcg_at_1_std value: -17.56627338275146 - type: nauc_ndcg_at_20_diff1 value: 72.97961452569768 - type: nauc_ndcg_at_20_max value: 67.12369127081152 - type: nauc_ndcg_at_20_std value: -15.11921773223936 - type: nauc_ndcg_at_3_diff1 value: 73.77769312598772 - type: nauc_ndcg_at_3_max value: 66.94438755852309 - type: nauc_ndcg_at_3_std value: -17.75960443830741 - type: nauc_ndcg_at_5_diff1 value: 73.43991209562891 - type: nauc_ndcg_at_5_max value: 67.21682951737418 - type: nauc_ndcg_at_5_std value: -17.013510008231805 - type: nauc_precision_at_1000_diff1 value: 51.30633281948362 - type: nauc_precision_at_1000_max value: 76.78675288883846 - type: nauc_precision_at_1000_std value: 71.70041985304397 - type: nauc_precision_at_100_diff1 value: 59.86656455853326 - type: nauc_precision_at_100_max value: 74.41958422732161 - type: nauc_precision_at_100_std value: 22.098920296069124 - type: nauc_precision_at_10_diff1 value: 66.4696166928741 - type: nauc_precision_at_10_max value: 69.88463108697104 - type: nauc_precision_at_10_std value: -10.707950954702742 - type: nauc_precision_at_1_diff1 value: 77.60643512422061 - type: nauc_precision_at_1_max value: 64.21736966061896 - type: nauc_precision_at_1_std value: -17.56627338275146 - type: nauc_precision_at_20_diff1 value: 63.45094585276983 - type: nauc_precision_at_20_max value: 71.57741245347195 - type: nauc_precision_at_20_std value: -2.2211545419051744 - type: nauc_precision_at_3_diff1 value: 70.28060818081384 - type: nauc_precision_at_3_max value: 69.22652927816439 - type: nauc_precision_at_3_std value: -17.158576243559434 - type: nauc_precision_at_5_diff1 value: 68.90765418427162 - type: nauc_precision_at_5_max value: 70.32585273389111 - type: nauc_precision_at_5_std value: -14.950363729664524 - type: nauc_recall_at_1000_diff1 value: 65.11255117927331 - type: nauc_recall_at_1000_max value: 88.35641213283338 - type: nauc_recall_at_1000_std value: 69.89792573640547 - type: nauc_recall_at_100_diff1 value: 61.46376457272238 - type: nauc_recall_at_100_max value: 75.48265142243015 - type: nauc_recall_at_100_std value: 21.223182712042178 - type: nauc_recall_at_10_diff1 value: 66.89353375308997 - type: nauc_recall_at_10_max value: 70.06655416883785 - type: nauc_recall_at_10_std value: -11.100871879439435 - type: nauc_recall_at_1_diff1 value: 77.63067209375254 - type: nauc_recall_at_1_max value: 64.17718675702672 - type: nauc_recall_at_1_std value: -17.639521106853717 - type: nauc_recall_at_20_diff1 value: 63.98532276331878 - type: nauc_recall_at_20_max value: 71.81562599791899 - type: nauc_recall_at_20_std value: -2.696537977147695 - type: nauc_recall_at_3_diff1 value: 70.4507655865698 - type: nauc_recall_at_3_max value: 69.25705030141037 - type: nauc_recall_at_3_std value: -17.299948348202836 - type: nauc_recall_at_5_diff1 value: 69.09152857901888 - type: nauc_recall_at_5_max value: 70.35609636026405 - type: nauc_recall_at_5_std value: -15.105012139255896 - type: ndcg_at_1 value: 55.883 - type: ndcg_at_10 value: 67.067 - type: ndcg_at_100 value: 70.07 - type: ndcg_at_1000 value: 70.875 - type: ndcg_at_20 value: 68.498 - type: ndcg_at_3 value: 63.666 - type: ndcg_at_5 value: 65.40599999999999 - type: precision_at_1 value: 55.883 - type: precision_at_10 value: 7.8549999999999995 - type: precision_at_100 value: 0.928 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.2090000000000005 - type: precision_at_3 value: 23.052 - type: precision_at_5 value: 14.677999999999999 - type: recall_at_1 value: 55.861000000000004 - type: recall_at_10 value: 78.495 - type: recall_at_100 value: 92.688 - type: recall_at_1000 value: 99.02499999999999 - type: recall_at_20 value: 84.124 - type: recall_at_3 value: 69.123 - type: recall_at_5 value: 73.355 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (spa-deu) type: facebook/mlqa config: spa-deu split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 73.90299999999999 - type: map_at_1 value: 61.236000000000004 - type: map_at_10 value: 69.88799999999999 - type: map_at_100 value: 70.319 - type: map_at_1000 value: 70.341 - type: map_at_20 value: 70.16799999999999 - type: map_at_3 value: 68.104 - type: map_at_5 value: 69.164 - type: mrr_at_1 value: 61.2739571589628 - type: mrr_at_10 value: 69.92589162684993 - type: mrr_at_100 value: 70.35245455509234 - type: mrr_at_1000 value: 70.37438351396742 - type: mrr_at_20 value: 70.20247469915404 - type: mrr_at_3 value: 68.14167606163099 - type: mrr_at_5 value: 69.20142803457354 - type: nauc_map_at_1000_diff1 value: 74.70416754842327 - type: nauc_map_at_1000_max value: 65.86915994583384 - type: nauc_map_at_1000_std value: -19.04437483534443 - type: nauc_map_at_100_diff1 value: 74.70011798058674 - type: nauc_map_at_100_max value: 65.88507779167188 - type: nauc_map_at_100_std value: -19.018670970643786 - type: nauc_map_at_10_diff1 value: 74.6362126804427 - type: nauc_map_at_10_max value: 66.05733054427198 - type: nauc_map_at_10_std value: -19.034317737897354 - type: nauc_map_at_1_diff1 value: 77.24970536833601 - type: nauc_map_at_1_max value: 62.07820573048406 - type: nauc_map_at_1_std value: -20.917086586335078 - type: nauc_map_at_20_diff1 value: 74.64113920401083 - type: nauc_map_at_20_max value: 65.89991740166793 - type: nauc_map_at_20_std value: -19.09987515041243 - type: nauc_map_at_3_diff1 value: 74.6518162332119 - type: nauc_map_at_3_max value: 66.10312348194024 - type: nauc_map_at_3_std value: -18.95881457716116 - type: nauc_map_at_5_diff1 value: 74.55141020670321 - type: nauc_map_at_5_max value: 65.94345752979342 - type: nauc_map_at_5_std value: -19.453976877992304 - type: nauc_mrr_at_1000_diff1 value: 74.64458488344088 - type: nauc_mrr_at_1000_max value: 65.84575328456057 - type: nauc_mrr_at_1000_std value: -18.901614615119904 - type: nauc_mrr_at_100_diff1 value: 74.64058497924627 - type: nauc_mrr_at_100_max value: 65.86170461767928 - type: nauc_mrr_at_100_std value: -18.87601697091505 - type: nauc_mrr_at_10_diff1 value: 74.57266634464752 - type: nauc_mrr_at_10_max value: 66.03331587645152 - type: nauc_mrr_at_10_std value: -18.87888060105393 - type: nauc_mrr_at_1_diff1 value: 77.19578272647183 - type: nauc_mrr_at_1_max value: 62.05252035478773 - type: nauc_mrr_at_1_std value: -20.790530940625267 - type: nauc_mrr_at_20_diff1 value: 74.5808171250021 - type: nauc_mrr_at_20_max value: 65.87643606587798 - type: nauc_mrr_at_20_std value: -18.95476583474199 - type: nauc_mrr_at_3_diff1 value: 74.5917053289191 - type: nauc_mrr_at_3_max value: 66.08044079438714 - type: nauc_mrr_at_3_std value: -18.81168463163586 - type: nauc_mrr_at_5_diff1 value: 74.48934579694608 - type: nauc_mrr_at_5_max value: 65.91993162383771 - type: nauc_mrr_at_5_std value: -19.302710791338797 - type: nauc_ndcg_at_1000_diff1 value: 74.20191283992186 - type: nauc_ndcg_at_1000_max value: 66.60831175771229 - type: nauc_ndcg_at_1000_std value: -18.175208725175484 - type: nauc_ndcg_at_100_diff1 value: 74.07713451642955 - type: nauc_ndcg_at_100_max value: 67.02028626335476 - type: nauc_ndcg_at_100_std value: -17.36560972181693 - type: nauc_ndcg_at_10_diff1 value: 73.63235521598476 - type: nauc_ndcg_at_10_max value: 67.8118473312638 - type: nauc_ndcg_at_10_std value: -17.647560577355915 - type: nauc_ndcg_at_1_diff1 value: 77.19578272647183 - type: nauc_ndcg_at_1_max value: 62.05252035478773 - type: nauc_ndcg_at_1_std value: -20.790530940625267 - type: nauc_ndcg_at_20_diff1 value: 73.65300308228291 - type: nauc_ndcg_at_20_max value: 67.18353402731985 - type: nauc_ndcg_at_20_std value: -17.9240756389792 - type: nauc_ndcg_at_3_diff1 value: 73.73764900202292 - type: nauc_ndcg_at_3_max value: 67.60840957876889 - type: nauc_ndcg_at_3_std value: -17.962667543518933 - type: nauc_ndcg_at_5_diff1 value: 73.49040500302092 - type: nauc_ndcg_at_5_max value: 67.41251918514402 - type: nauc_ndcg_at_5_std value: -18.851877225955523 - type: nauc_precision_at_1000_diff1 value: -18.652906102973922 - type: nauc_precision_at_1000_max value: 2.1701672475574885 - type: nauc_precision_at_1000_std value: 61.713411950188835 - type: nauc_precision_at_100_diff1 value: 62.37565302288498 - type: nauc_precision_at_100_max value: 76.96921843049006 - type: nauc_precision_at_100_std value: 19.152009040219678 - type: nauc_precision_at_10_diff1 value: 68.14047344105212 - type: nauc_precision_at_10_max value: 77.7177273849099 - type: nauc_precision_at_10_std value: -9.124325941493698 - type: nauc_precision_at_1_diff1 value: 77.19578272647183 - type: nauc_precision_at_1_max value: 62.05252035478773 - type: nauc_precision_at_1_std value: -20.790530940625267 - type: nauc_precision_at_20_diff1 value: 65.38487456362745 - type: nauc_precision_at_20_max value: 74.61122933443669 - type: nauc_precision_at_20_std value: -8.129775929648341 - type: nauc_precision_at_3_diff1 value: 70.45937744142297 - type: nauc_precision_at_3_max value: 73.03004233073901 - type: nauc_precision_at_3_std value: -14.246554579025158 - type: nauc_precision_at_5_diff1 value: 69.02821772428955 - type: nauc_precision_at_5_max value: 73.52949774726446 - type: nauc_precision_at_5_std value: -16.355747231517757 - type: nauc_recall_at_1000_diff1 value: 35.804192824985755 - type: nauc_recall_at_1000_max value: 61.367785756485894 - type: nauc_recall_at_1000_std value: 54.01380822466869 - type: nauc_recall_at_100_diff1 value: 67.96210883597479 - type: nauc_recall_at_100_max value: 82.38124823732169 - type: nauc_recall_at_100_std value: 16.814922595309966 - type: nauc_recall_at_10_diff1 value: 68.21964459634341 - type: nauc_recall_at_10_max value: 77.68301934858845 - type: nauc_recall_at_10_std value: -9.430792913885066 - type: nauc_recall_at_1_diff1 value: 77.24970536833601 - type: nauc_recall_at_1_max value: 62.07820573048406 - type: nauc_recall_at_1_std value: -20.917086586335078 - type: nauc_recall_at_20_diff1 value: 66.60569906579487 - type: nauc_recall_at_20_max value: 75.66163186604354 - type: nauc_recall_at_20_std value: -9.09826205489828 - type: nauc_recall_at_3_diff1 value: 70.52323701841641 - type: nauc_recall_at_3_max value: 73.03478107411232 - type: nauc_recall_at_3_std value: -14.432325989967962 - type: nauc_recall_at_5_diff1 value: 69.08521261524373 - type: nauc_recall_at_5_max value: 73.51150270382094 - type: nauc_recall_at_5_std value: -16.569387503524368 - type: ndcg_at_1 value: 61.273999999999994 - type: ndcg_at_10 value: 73.90299999999999 - type: ndcg_at_100 value: 75.983 - type: ndcg_at_1000 value: 76.488 - type: ndcg_at_20 value: 74.921 - type: ndcg_at_3 value: 70.277 - type: ndcg_at_5 value: 72.172 - type: precision_at_1 value: 61.273999999999994 - type: precision_at_10 value: 8.641 - type: precision_at_100 value: 0.962 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.524 - type: precision_at_3 value: 25.517 - type: precision_at_5 value: 16.223000000000003 - type: recall_at_1 value: 61.236000000000004 - type: recall_at_10 value: 86.37700000000001 - type: recall_at_100 value: 96.054 - type: recall_at_1000 value: 99.887 - type: recall_at_20 value: 90.398 - type: recall_at_3 value: 76.51299999999999 - type: recall_at_5 value: 81.07900000000001 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (spa-spa) type: facebook/mlqa config: spa-spa split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 68.632 - type: map_at_1 value: 57.046 - type: map_at_10 value: 64.869 - type: map_at_100 value: 65.384 - type: map_at_1000 value: 65.413 - type: map_at_20 value: 65.185 - type: map_at_3 value: 63.178 - type: map_at_5 value: 64.12 - type: mrr_at_1 value: 57.05579889544848 - type: mrr_at_10 value: 64.8806425382317 - type: mrr_at_100 value: 65.39469233244084 - type: mrr_at_1000 value: 65.42342199403159 - type: mrr_at_20 value: 65.19634815919534 - type: mrr_at_3 value: 63.18796419729591 - type: mrr_at_5 value: 64.13159398209874 - type: nauc_map_at_1000_diff1 value: 73.23803038674018 - type: nauc_map_at_1000_max value: 67.44156201421714 - type: nauc_map_at_1000_std value: -8.60143026450049 - type: nauc_map_at_100_diff1 value: 73.22575613034235 - type: nauc_map_at_100_max value: 67.44735143420195 - type: nauc_map_at_100_std value: -8.576905069492895 - type: nauc_map_at_10_diff1 value: 73.11950129610865 - type: nauc_map_at_10_max value: 67.45107232305055 - type: nauc_map_at_10_std value: -8.799837857015392 - type: nauc_map_at_1_diff1 value: 76.18354072047988 - type: nauc_map_at_1_max value: 65.03342186728786 - type: nauc_map_at_1_std value: -10.867650288695796 - type: nauc_map_at_20_diff1 value: 73.21570748770948 - type: nauc_map_at_20_max value: 67.50340321088724 - type: nauc_map_at_20_std value: -8.594057184944676 - type: nauc_map_at_3_diff1 value: 73.17239276163892 - type: nauc_map_at_3_max value: 67.06319504819103 - type: nauc_map_at_3_std value: -9.883216310270528 - type: nauc_map_at_5_diff1 value: 73.11913507367727 - type: nauc_map_at_5_max value: 67.27497019567078 - type: nauc_map_at_5_std value: -9.497714822103118 - type: nauc_mrr_at_1000_diff1 value: 73.22971233311306 - type: nauc_mrr_at_1000_max value: 67.42977229057223 - type: nauc_mrr_at_1000_std value: -8.550068702273297 - type: nauc_mrr_at_100_diff1 value: 73.21744467317815 - type: nauc_mrr_at_100_max value: 67.43557491068093 - type: nauc_mrr_at_100_std value: -8.52559275190607 - type: nauc_mrr_at_10_diff1 value: 73.11075619726137 - type: nauc_mrr_at_10_max value: 67.43889760205286 - type: nauc_mrr_at_10_std value: -8.74617232559183 - type: nauc_mrr_at_1_diff1 value: 76.17529975949547 - type: nauc_mrr_at_1_max value: 65.02401127001608 - type: nauc_mrr_at_1_std value: -10.817814457633952 - type: nauc_mrr_at_20_diff1 value: 73.20689275225138 - type: nauc_mrr_at_20_max value: 67.49111752272192 - type: nauc_mrr_at_20_std value: -8.539827528410353 - type: nauc_mrr_at_3_diff1 value: 73.16291729623958 - type: nauc_mrr_at_3_max value: 67.05300993427998 - type: nauc_mrr_at_3_std value: -9.827915885680811 - type: nauc_mrr_at_5_diff1 value: 73.11055686484109 - type: nauc_mrr_at_5_max value: 67.26299851089122 - type: nauc_mrr_at_5_std value: -9.445190276650903 - type: nauc_ndcg_at_1000_diff1 value: 72.58833638407177 - type: nauc_ndcg_at_1000_max value: 68.10447506371374 - type: nauc_ndcg_at_1000_std value: -6.910306241546282 - type: nauc_ndcg_at_100_diff1 value: 72.24524849631476 - type: nauc_ndcg_at_100_max value: 68.30659210081238 - type: nauc_ndcg_at_100_std value: -6.04305364268931 - type: nauc_ndcg_at_10_diff1 value: 71.87363502582961 - type: nauc_ndcg_at_10_max value: 68.5010009653693 - type: nauc_ndcg_at_10_std value: -7.021281296450588 - type: nauc_ndcg_at_1_diff1 value: 76.17529975949547 - type: nauc_ndcg_at_1_max value: 65.02401127001608 - type: nauc_ndcg_at_1_std value: -10.817814457633952 - type: nauc_ndcg_at_20_diff1 value: 72.21241010439327 - type: nauc_ndcg_at_20_max value: 68.71743274030551 - type: nauc_ndcg_at_20_std value: -6.186629577195946 - type: nauc_ndcg_at_3_diff1 value: 72.08204674794459 - type: nauc_ndcg_at_3_max value: 67.5958365046156 - type: nauc_ndcg_at_3_std value: -9.576418336610345 - type: nauc_ndcg_at_5_diff1 value: 71.93179095844508 - type: nauc_ndcg_at_5_max value: 68.01914639754217 - type: nauc_ndcg_at_5_std value: -8.833768332910777 - type: nauc_precision_at_1000_diff1 value: 63.0051360227489 - type: nauc_precision_at_1000_max value: 79.93532442313229 - type: nauc_precision_at_1000_std value: 52.869517607133254 - type: nauc_precision_at_100_diff1 value: 62.43301501857154 - type: nauc_precision_at_100_max value: 75.57280416668183 - type: nauc_precision_at_100_std value: 26.758300486132747 - type: nauc_precision_at_10_diff1 value: 66.29806375971134 - type: nauc_precision_at_10_max value: 73.40301413754797 - type: nauc_precision_at_10_std value: 1.9858547295235462 - type: nauc_precision_at_1_diff1 value: 76.17529975949547 - type: nauc_precision_at_1_max value: 65.02401127001608 - type: nauc_precision_at_1_std value: -10.817814457633952 - type: nauc_precision_at_20_diff1 value: 67.05111836051105 - type: nauc_precision_at_20_max value: 76.09783190824155 - type: nauc_precision_at_20_std value: 9.906010659515564 - type: nauc_precision_at_3_diff1 value: 68.44186679250453 - type: nauc_precision_at_3_max value: 69.30301351119388 - type: nauc_precision_at_3_std value: -8.566522518882348 - type: nauc_precision_at_5_diff1 value: 67.51737199297388 - type: nauc_precision_at_5_max value: 70.75887601590472 - type: nauc_precision_at_5_std value: -6.278983102710238 - type: nauc_recall_at_1000_diff1 value: 65.12360093170948 - type: nauc_recall_at_1000_max value: 82.60209843191132 - type: nauc_recall_at_1000_std value: 51.740179583368636 - type: nauc_recall_at_100_diff1 value: 62.82007697326819 - type: nauc_recall_at_100_max value: 76.04844844677562 - type: nauc_recall_at_100_std value: 26.4678415019248 - type: nauc_recall_at_10_diff1 value: 66.28557566848767 - type: nauc_recall_at_10_max value: 73.40302709828738 - type: nauc_recall_at_10_std value: 1.9224272854613582 - type: nauc_recall_at_1_diff1 value: 76.18354072047988 - type: nauc_recall_at_1_max value: 65.03342186728786 - type: nauc_recall_at_1_std value: -10.867650288695796 - type: nauc_recall_at_20_diff1 value: 67.03430451094992 - type: nauc_recall_at_20_max value: 76.09474005171319 - type: nauc_recall_at_20_std value: 9.815888637851074 - type: nauc_recall_at_3_diff1 value: 68.44411411344718 - type: nauc_recall_at_3_max value: 69.30502737137265 - type: nauc_recall_at_3_std value: -8.629526329714132 - type: nauc_recall_at_5_diff1 value: 67.51469265953514 - type: nauc_recall_at_5_max value: 70.76969893818111 - type: nauc_recall_at_5_std value: -6.325600167105444 - type: ndcg_at_1 value: 57.056 - type: ndcg_at_10 value: 68.632 - type: ndcg_at_100 value: 71.202 - type: ndcg_at_1000 value: 71.97099999999999 - type: ndcg_at_20 value: 69.785 - type: ndcg_at_3 value: 65.131 - type: ndcg_at_5 value: 66.834 - type: precision_at_1 value: 57.056 - type: precision_at_10 value: 8.044 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.251 - type: precision_at_3 value: 23.589 - type: precision_at_5 value: 14.984 - type: recall_at_1 value: 57.046 - type: recall_at_10 value: 80.423 - type: recall_at_100 value: 92.582 - type: recall_at_1000 value: 98.638 - type: recall_at_20 value: 84.993 - type: recall_at_3 value: 70.758 - type: recall_at_5 value: 74.9 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (spa-eng) type: facebook/mlqa config: spa-eng split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 68.765 - type: map_at_1 value: 56.538999999999994 - type: map_at_10 value: 64.816 - type: map_at_100 value: 65.325 - type: map_at_1000 value: 65.352 - type: map_at_20 value: 65.113 - type: map_at_3 value: 62.934999999999995 - type: map_at_5 value: 64.063 - type: mrr_at_1 value: 56.539120502569965 - type: mrr_at_10 value: 64.81561556661505 - type: mrr_at_100 value: 65.32464238613954 - type: mrr_at_1000 value: 65.35206516602133 - type: mrr_at_20 value: 65.11270445292227 - type: mrr_at_3 value: 62.935465448315384 - type: mrr_at_5 value: 64.06339234723022 - type: nauc_map_at_1000_diff1 value: 73.20701050428072 - type: nauc_map_at_1000_max value: 67.32797480614404 - type: nauc_map_at_1000_std value: -6.211540626528362 - type: nauc_map_at_100_diff1 value: 73.19497683923063 - type: nauc_map_at_100_max value: 67.33392646467817 - type: nauc_map_at_100_std value: -6.196671563900051 - type: nauc_map_at_10_diff1 value: 73.16010547612956 - type: nauc_map_at_10_max value: 67.37793741307372 - type: nauc_map_at_10_std value: -6.3443240322521675 - type: nauc_map_at_1_diff1 value: 76.63696578575964 - type: nauc_map_at_1_max value: 65.08189618178105 - type: nauc_map_at_1_std value: -8.594195451782733 - type: nauc_map_at_20_diff1 value: 73.15233479381568 - type: nauc_map_at_20_max value: 67.3679607256072 - type: nauc_map_at_20_std value: -6.175928265286352 - type: nauc_map_at_3_diff1 value: 73.14853380980746 - type: nauc_map_at_3_max value: 67.10354198073468 - type: nauc_map_at_3_std value: -7.409679815529866 - type: nauc_map_at_5_diff1 value: 73.13425961877715 - type: nauc_map_at_5_max value: 67.22452899371224 - type: nauc_map_at_5_std value: -6.895257774506354 - type: nauc_mrr_at_1000_diff1 value: 73.20701050428072 - type: nauc_mrr_at_1000_max value: 67.32797480614404 - type: nauc_mrr_at_1000_std value: -6.211540626528362 - type: nauc_mrr_at_100_diff1 value: 73.19497683923063 - type: nauc_mrr_at_100_max value: 67.33392646467817 - type: nauc_mrr_at_100_std value: -6.196671563900051 - type: nauc_mrr_at_10_diff1 value: 73.16010547612956 - type: nauc_mrr_at_10_max value: 67.37793741307372 - type: nauc_mrr_at_10_std value: -6.3443240322521675 - type: nauc_mrr_at_1_diff1 value: 76.63696578575964 - type: nauc_mrr_at_1_max value: 65.08189618178105 - type: nauc_mrr_at_1_std value: -8.594195451782733 - type: nauc_mrr_at_20_diff1 value: 73.15233479381568 - type: nauc_mrr_at_20_max value: 67.3679607256072 - type: nauc_mrr_at_20_std value: -6.175928265286352 - type: nauc_mrr_at_3_diff1 value: 73.14853380980746 - type: nauc_mrr_at_3_max value: 67.10354198073468 - type: nauc_mrr_at_3_std value: -7.409679815529866 - type: nauc_mrr_at_5_diff1 value: 73.13425961877715 - type: nauc_mrr_at_5_max value: 67.22452899371224 - type: nauc_mrr_at_5_std value: -6.895257774506354 - type: nauc_ndcg_at_1000_diff1 value: 72.44364625096874 - type: nauc_ndcg_at_1000_max value: 67.93635761141552 - type: nauc_ndcg_at_1000_std value: -4.616429464350954 - type: nauc_ndcg_at_100_diff1 value: 72.11352383758482 - type: nauc_ndcg_at_100_max value: 68.1627312575955 - type: nauc_ndcg_at_100_std value: -3.894213672131282 - type: nauc_ndcg_at_10_diff1 value: 71.8526850770812 - type: nauc_ndcg_at_10_max value: 68.41366561888562 - type: nauc_ndcg_at_10_std value: -4.472146861145989 - type: nauc_ndcg_at_1_diff1 value: 76.63696578575964 - type: nauc_ndcg_at_1_max value: 65.08189618178105 - type: nauc_ndcg_at_1_std value: -8.594195451782733 - type: nauc_ndcg_at_20_diff1 value: 71.76464418138866 - type: nauc_ndcg_at_20_max value: 68.41174963313698 - type: nauc_ndcg_at_20_std value: -3.7449762037540157 - type: nauc_ndcg_at_3_diff1 value: 71.93808990683131 - type: nauc_ndcg_at_3_max value: 67.7010029507334 - type: nauc_ndcg_at_3_std value: -6.971858419379321 - type: nauc_ndcg_at_5_diff1 value: 71.8505224811326 - type: nauc_ndcg_at_5_max value: 67.97139549500251 - type: nauc_ndcg_at_5_std value: -5.958491308070017 - type: nauc_precision_at_1000_diff1 value: 62.20956180320043 - type: nauc_precision_at_1000_max value: 82.53412670611299 - type: nauc_precision_at_1000_std value: 55.57278124999575 - type: nauc_precision_at_100_diff1 value: 62.03792857023201 - type: nauc_precision_at_100_max value: 76.77130713424538 - type: nauc_precision_at_100_std value: 26.674102719959564 - type: nauc_precision_at_10_diff1 value: 65.89798055049931 - type: nauc_precision_at_10_max value: 73.41908620140674 - type: nauc_precision_at_10_std value: 5.21818573283179 - type: nauc_precision_at_1_diff1 value: 76.63696578575964 - type: nauc_precision_at_1_max value: 65.08189618178105 - type: nauc_precision_at_1_std value: -8.594195451782733 - type: nauc_precision_at_20_diff1 value: 63.734308542647355 - type: nauc_precision_at_20_max value: 74.69578825096144 - type: nauc_precision_at_20_std value: 12.627842502659162 - type: nauc_precision_at_3_diff1 value: 67.91189666671904 - type: nauc_precision_at_3_max value: 69.64986036783209 - type: nauc_precision_at_3_std value: -5.505669087429055 - type: nauc_precision_at_5_diff1 value: 67.01880006360248 - type: nauc_precision_at_5_max value: 70.78916423358686 - type: nauc_precision_at_5_std value: -2.2273742736401045 - type: nauc_recall_at_1000_diff1 value: 62.20956180319936 - type: nauc_recall_at_1000_max value: 82.53412670611287 - type: nauc_recall_at_1000_std value: 55.57278124999549 - type: nauc_recall_at_100_diff1 value: 62.03792857023208 - type: nauc_recall_at_100_max value: 76.77130713424577 - type: nauc_recall_at_100_std value: 26.67410271995973 - type: nauc_recall_at_10_diff1 value: 65.8979805504994 - type: nauc_recall_at_10_max value: 73.41908620140678 - type: nauc_recall_at_10_std value: 5.2181857328318655 - type: nauc_recall_at_1_diff1 value: 76.63696578575964 - type: nauc_recall_at_1_max value: 65.08189618178105 - type: nauc_recall_at_1_std value: -8.594195451782733 - type: nauc_recall_at_20_diff1 value: 63.734308542647334 - type: nauc_recall_at_20_max value: 74.69578825096123 - type: nauc_recall_at_20_std value: 12.627842502658982 - type: nauc_recall_at_3_diff1 value: 67.91189666671897 - type: nauc_recall_at_3_max value: 69.64986036783203 - type: nauc_recall_at_3_std value: -5.505669087428989 - type: nauc_recall_at_5_diff1 value: 67.01880006360243 - type: nauc_recall_at_5_max value: 70.78916423358686 - type: nauc_recall_at_5_std value: -2.227374273640135 - type: ndcg_at_1 value: 56.538999999999994 - type: ndcg_at_10 value: 68.765 - type: ndcg_at_100 value: 71.314 - type: ndcg_at_1000 value: 72.038 - type: ndcg_at_20 value: 69.828 - type: ndcg_at_3 value: 64.937 - type: ndcg_at_5 value: 66.956 - type: precision_at_1 value: 56.538999999999994 - type: precision_at_10 value: 8.113 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.265 - type: precision_at_3 value: 23.567 - type: precision_at_5 value: 15.115 - type: recall_at_1 value: 56.538999999999994 - type: recall_at_10 value: 81.135 - type: recall_at_100 value: 93.223 - type: recall_at_1000 value: 98.896 - type: recall_at_20 value: 85.304 - type: recall_at_3 value: 70.702 - type: recall_at_5 value: 75.576 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (eng-deu) type: facebook/mlqa config: eng-deu split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 69.298 - type: map_at_1 value: 58.553 - type: map_at_10 value: 65.769 - type: map_at_100 value: 66.298 - type: map_at_1000 value: 66.328 - type: map_at_20 value: 66.101 - type: map_at_3 value: 64.048 - type: map_at_5 value: 65.09 - type: mrr_at_1 value: 58.564148016840235 - type: mrr_at_10 value: 65.7685997066675 - type: mrr_at_100 value: 66.29874034432214 - type: mrr_at_1000 value: 66.32844979939088 - type: mrr_at_20 value: 66.10120513957821 - type: mrr_at_3 value: 64.04830489696437 - type: mrr_at_5 value: 65.08974074894746 - type: nauc_map_at_1000_diff1 value: 76.8409650183994 - type: nauc_map_at_1000_max value: 71.86367015521367 - type: nauc_map_at_1000_std value: -14.464881539957256 - type: nauc_map_at_100_diff1 value: 76.82536521842064 - type: nauc_map_at_100_max value: 71.86811127965429 - type: nauc_map_at_100_std value: -14.441105539722244 - type: nauc_map_at_10_diff1 value: 76.75522453447859 - type: nauc_map_at_10_max value: 71.87677500176706 - type: nauc_map_at_10_std value: -14.741331625103559 - type: nauc_map_at_1_diff1 value: 79.64060747740989 - type: nauc_map_at_1_max value: 69.84278563569617 - type: nauc_map_at_1_std value: -15.936904929655832 - type: nauc_map_at_20_diff1 value: 76.78894776059715 - type: nauc_map_at_20_max value: 71.89637938044827 - type: nauc_map_at_20_std value: -14.500564106990769 - type: nauc_map_at_3_diff1 value: 77.20562577450342 - type: nauc_map_at_3_max value: 71.80578229361525 - type: nauc_map_at_3_std value: -15.344134588512201 - type: nauc_map_at_5_diff1 value: 77.00480147367867 - type: nauc_map_at_5_max value: 71.98335924076163 - type: nauc_map_at_5_std value: -15.16537653041026 - type: nauc_mrr_at_1000_diff1 value: 76.84165367691193 - type: nauc_mrr_at_1000_max value: 71.8642679499795 - type: nauc_mrr_at_1000_std value: -14.461717954593158 - type: nauc_mrr_at_100_diff1 value: 76.8263363557998 - type: nauc_mrr_at_100_max value: 71.86874522368626 - type: nauc_mrr_at_100_std value: -14.437105168707426 - type: nauc_mrr_at_10_diff1 value: 76.75522453447859 - type: nauc_mrr_at_10_max value: 71.87677500176706 - type: nauc_mrr_at_10_std value: -14.741331625103559 - type: nauc_mrr_at_1_diff1 value: 79.65642669321981 - type: nauc_mrr_at_1_max value: 69.89135358784799 - type: nauc_mrr_at_1_std value: -15.919357002229589 - type: nauc_mrr_at_20_diff1 value: 76.78883171270601 - type: nauc_mrr_at_20_max value: 71.89806887245291 - type: nauc_mrr_at_20_std value: -14.497139746907905 - type: nauc_mrr_at_3_diff1 value: 77.20562577450342 - type: nauc_mrr_at_3_max value: 71.80578229361525 - type: nauc_mrr_at_3_std value: -15.344134588512201 - type: nauc_mrr_at_5_diff1 value: 77.00480147367867 - type: nauc_mrr_at_5_max value: 71.98335924076163 - type: nauc_mrr_at_5_std value: -15.16537653041026 - type: nauc_ndcg_at_1000_diff1 value: 76.07802417817047 - type: nauc_ndcg_at_1000_max value: 72.31792804426776 - type: nauc_ndcg_at_1000_std value: -13.049160715132244 - type: nauc_ndcg_at_100_diff1 value: 75.63343849116544 - type: nauc_ndcg_at_100_max value: 72.48362076101817 - type: nauc_ndcg_at_100_std value: -12.089600993516777 - type: nauc_ndcg_at_10_diff1 value: 75.23387929929208 - type: nauc_ndcg_at_10_max value: 72.51436288271807 - type: nauc_ndcg_at_10_std value: -13.624132103038104 - type: nauc_ndcg_at_1_diff1 value: 79.65642669321981 - type: nauc_ndcg_at_1_max value: 69.89135358784799 - type: nauc_ndcg_at_1_std value: -15.919357002229589 - type: nauc_ndcg_at_20_diff1 value: 75.32926047656296 - type: nauc_ndcg_at_20_max value: 72.61254165918145 - type: nauc_ndcg_at_20_std value: -12.683157599238701 - type: nauc_ndcg_at_3_diff1 value: 76.3089337665469 - type: nauc_ndcg_at_3_max value: 72.40014674426054 - type: nauc_ndcg_at_3_std value: -15.08624226353458 - type: nauc_ndcg_at_5_diff1 value: 75.88857331641834 - type: nauc_ndcg_at_5_max value: 72.7719386827224 - type: nauc_ndcg_at_5_std value: -14.70546521089236 - type: nauc_precision_at_1000_diff1 value: 59.66563879069911 - type: nauc_precision_at_1000_max value: 74.57123562956772 - type: nauc_precision_at_1000_std value: 58.61396866718965 - type: nauc_precision_at_100_diff1 value: 62.8695896550042 - type: nauc_precision_at_100_max value: 77.81408796785 - type: nauc_precision_at_100_std value: 23.819735672317826 - type: nauc_precision_at_10_diff1 value: 68.08051625224569 - type: nauc_precision_at_10_max value: 75.14432336036869 - type: nauc_precision_at_10_std value: -7.97602345252735 - type: nauc_precision_at_1_diff1 value: 79.65642669321981 - type: nauc_precision_at_1_max value: 69.89135358784799 - type: nauc_precision_at_1_std value: -15.919357002229589 - type: nauc_precision_at_20_diff1 value: 66.7168005185165 - type: nauc_precision_at_20_max value: 76.58522761697147 - type: nauc_precision_at_20_std value: -0.17923428317323292 - type: nauc_precision_at_3_diff1 value: 73.23394851561207 - type: nauc_precision_at_3_max value: 74.32517846819215 - type: nauc_precision_at_3_std value: -14.142301336188348 - type: nauc_precision_at_5_diff1 value: 71.5666882547012 - type: nauc_precision_at_5_max value: 75.71098205440033 - type: nauc_precision_at_5_std value: -12.808362513638052 - type: nauc_recall_at_1000_diff1 value: 71.73736112325805 - type: nauc_recall_at_1000_max value: 86.70743436225898 - type: nauc_recall_at_1000_std value: 54.45802578371167 - type: nauc_recall_at_100_diff1 value: 64.07053861428128 - type: nauc_recall_at_100_max value: 78.8348308099261 - type: nauc_recall_at_100_std value: 22.72263677785103 - type: nauc_recall_at_10_diff1 value: 68.20272901407903 - type: nauc_recall_at_10_max value: 75.16315335381938 - type: nauc_recall_at_10_std value: -8.060716748913386 - type: nauc_recall_at_1_diff1 value: 79.64060747740989 - type: nauc_recall_at_1_max value: 69.84278563569617 - type: nauc_recall_at_1_std value: -15.936904929655832 - type: nauc_recall_at_20_diff1 value: 66.88206981973654 - type: nauc_recall_at_20_max value: 76.54824917595687 - type: nauc_recall_at_20_std value: -0.40294589316962287 - type: nauc_recall_at_3_diff1 value: 73.33076087258938 - type: nauc_recall_at_3_max value: 74.33763112508771 - type: nauc_recall_at_3_std value: -14.213355414905399 - type: nauc_recall_at_5_diff1 value: 71.67487623469464 - type: nauc_recall_at_5_max value: 75.72770292516316 - type: nauc_recall_at_5_std value: -12.887572274644818 - type: ndcg_at_1 value: 58.56400000000001 - type: ndcg_at_10 value: 69.298 - type: ndcg_at_100 value: 71.95899999999999 - type: ndcg_at_1000 value: 72.735 - type: ndcg_at_20 value: 70.50699999999999 - type: ndcg_at_3 value: 65.81700000000001 - type: ndcg_at_5 value: 67.681 - type: precision_at_1 value: 58.56400000000001 - type: precision_at_10 value: 8.039 - type: precision_at_100 value: 0.931 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.259 - type: precision_at_3 value: 23.65 - type: precision_at_5 value: 15.09 - type: recall_at_1 value: 58.553 - type: recall_at_10 value: 80.368 - type: recall_at_100 value: 93.013 - type: recall_at_1000 value: 99.092 - type: recall_at_20 value: 85.143 - type: recall_at_3 value: 70.928 - type: recall_at_5 value: 75.42699999999999 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (eng-spa) type: facebook/mlqa config: eng-spa split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 66.374 - type: map_at_1 value: 55.494 - type: map_at_10 value: 62.763999999999996 - type: map_at_100 value: 63.33 - type: map_at_1000 value: 63.36000000000001 - type: map_at_20 value: 63.104000000000006 - type: map_at_3 value: 61.065000000000005 - type: map_at_5 value: 62.053000000000004 - type: mrr_at_1 value: 55.49419158255571 - type: mrr_at_10 value: 62.765195140457095 - type: mrr_at_100 value: 63.33083349354529 - type: mrr_at_1000 value: 63.3611897014839 - type: mrr_at_20 value: 63.10543590095977 - type: mrr_at_3 value: 61.06455913159412 - type: mrr_at_5 value: 62.052942296705474 - type: nauc_map_at_1000_diff1 value: 75.04200018088618 - type: nauc_map_at_1000_max value: 70.49937782771909 - type: nauc_map_at_1000_std value: -5.257206317083184 - type: nauc_map_at_100_diff1 value: 75.02786834256312 - type: nauc_map_at_100_max value: 70.5016476500189 - type: nauc_map_at_100_std value: -5.228770832077681 - type: nauc_map_at_10_diff1 value: 74.9626552701647 - type: nauc_map_at_10_max value: 70.56253732243214 - type: nauc_map_at_10_std value: -5.359037281768563 - type: nauc_map_at_1_diff1 value: 78.46858307815857 - type: nauc_map_at_1_max value: 69.03908373759435 - type: nauc_map_at_1_std value: -7.479412070736642 - type: nauc_map_at_20_diff1 value: 74.98121458084796 - type: nauc_map_at_20_max value: 70.51885366822565 - type: nauc_map_at_20_std value: -5.286051287133815 - type: nauc_map_at_3_diff1 value: 75.36078454383373 - type: nauc_map_at_3_max value: 70.34997144546014 - type: nauc_map_at_3_std value: -6.663517224039184 - type: nauc_map_at_5_diff1 value: 75.0274512828238 - type: nauc_map_at_5_max value: 70.45292551591874 - type: nauc_map_at_5_std value: -6.029224488640147 - type: nauc_mrr_at_1000_diff1 value: 75.04018768469983 - type: nauc_mrr_at_1000_max value: 70.49855509132635 - type: nauc_mrr_at_1000_std value: -5.258929961409948 - type: nauc_mrr_at_100_diff1 value: 75.02605732810112 - type: nauc_mrr_at_100_max value: 70.50082584929103 - type: nauc_mrr_at_100_std value: -5.2304917988542154 - type: nauc_mrr_at_10_diff1 value: 74.96079080525713 - type: nauc_mrr_at_10_max value: 70.56167294920391 - type: nauc_mrr_at_10_std value: -5.360650630655072 - type: nauc_mrr_at_1_diff1 value: 78.46858307815857 - type: nauc_mrr_at_1_max value: 69.03908373759435 - type: nauc_mrr_at_1_std value: -7.479412070736642 - type: nauc_mrr_at_20_diff1 value: 74.97939804960517 - type: nauc_mrr_at_20_max value: 70.51804078965411 - type: nauc_mrr_at_20_std value: -5.287681954889177 - type: nauc_mrr_at_3_diff1 value: 75.36078454383373 - type: nauc_mrr_at_3_max value: 70.34997144546014 - type: nauc_mrr_at_3_std value: -6.663517224039184 - type: nauc_mrr_at_5_diff1 value: 75.0274512828238 - type: nauc_mrr_at_5_max value: 70.45292551591874 - type: nauc_mrr_at_5_std value: -6.029224488640147 - type: nauc_ndcg_at_1000_diff1 value: 74.22106834748942 - type: nauc_ndcg_at_1000_max value: 70.93625922934912 - type: nauc_ndcg_at_1000_std value: -3.4878399005946017 - type: nauc_ndcg_at_100_diff1 value: 73.74068883646733 - type: nauc_ndcg_at_100_max value: 71.02357018347472 - type: nauc_ndcg_at_100_std value: -2.462293184201324 - type: nauc_ndcg_at_10_diff1 value: 73.40967965536565 - type: nauc_ndcg_at_10_max value: 71.29379828672067 - type: nauc_ndcg_at_10_std value: -3.295547756383108 - type: nauc_ndcg_at_1_diff1 value: 78.46858307815857 - type: nauc_ndcg_at_1_max value: 69.03908373759435 - type: nauc_ndcg_at_1_std value: -7.479412070736642 - type: nauc_ndcg_at_20_diff1 value: 73.45790057693699 - type: nauc_ndcg_at_20_max value: 71.16598432419126 - type: nauc_ndcg_at_20_std value: -2.962877157646097 - type: nauc_ndcg_at_3_diff1 value: 74.30696173964847 - type: nauc_ndcg_at_3_max value: 70.79878978459556 - type: nauc_ndcg_at_3_std value: -6.297286578628299 - type: nauc_ndcg_at_5_diff1 value: 73.65858211199816 - type: nauc_ndcg_at_5_max value: 71.01122417463776 - type: nauc_ndcg_at_5_std value: -5.075990882646765 - type: nauc_precision_at_1000_diff1 value: 68.71065091972568 - type: nauc_precision_at_1000_max value: 81.38173585624777 - type: nauc_precision_at_1000_std value: 58.035497889797895 - type: nauc_precision_at_100_diff1 value: 61.93634256957017 - type: nauc_precision_at_100_max value: 74.84191770203093 - type: nauc_precision_at_100_std value: 31.3325983123831 - type: nauc_precision_at_10_diff1 value: 66.68247010944937 - type: nauc_precision_at_10_max value: 74.48773524654571 - type: nauc_precision_at_10_std value: 6.560421880785153 - type: nauc_precision_at_1_diff1 value: 78.46858307815857 - type: nauc_precision_at_1_max value: 69.03908373759435 - type: nauc_precision_at_1_std value: -7.479412070736642 - type: nauc_precision_at_20_diff1 value: 65.51592872758067 - type: nauc_precision_at_20_max value: 74.50684066823096 - type: nauc_precision_at_20_std value: 10.830479877698208 - type: nauc_precision_at_3_diff1 value: 70.89587884861588 - type: nauc_precision_at_3_max value: 72.25310558370424 - type: nauc_precision_at_3_std value: -5.0796100900749765 - type: nauc_precision_at_5_diff1 value: 68.71885719845497 - type: nauc_precision_at_5_max value: 73.02601751485672 - type: nauc_precision_at_5_std value: -1.4382681421626857 - type: nauc_recall_at_1000_diff1 value: 71.95510299834734 - type: nauc_recall_at_1000_max value: 84.03647166092985 - type: nauc_recall_at_1000_std value: 56.87490604776847 - type: nauc_recall_at_100_diff1 value: 62.446624924715955 - type: nauc_recall_at_100_max value: 75.25666892464507 - type: nauc_recall_at_100_std value: 31.068789794554686 - type: nauc_recall_at_10_diff1 value: 66.70676336328988 - type: nauc_recall_at_10_max value: 74.4963699656397 - type: nauc_recall_at_10_std value: 6.57498399706916 - type: nauc_recall_at_1_diff1 value: 78.46858307815857 - type: nauc_recall_at_1_max value: 69.03908373759435 - type: nauc_recall_at_1_std value: -7.479412070736642 - type: nauc_recall_at_20_diff1 value: 65.54082767974772 - type: nauc_recall_at_20_max value: 74.5111529838772 - type: nauc_recall_at_20_std value: 10.84574829707354 - type: nauc_recall_at_3_diff1 value: 70.89587884861584 - type: nauc_recall_at_3_max value: 72.25310558370421 - type: nauc_recall_at_3_std value: -5.07961009007491 - type: nauc_recall_at_5_diff1 value: 68.71885719845501 - type: nauc_recall_at_5_max value: 73.02601751485666 - type: nauc_recall_at_5_std value: -1.4382681421626995 - type: ndcg_at_1 value: 55.494 - type: ndcg_at_10 value: 66.374 - type: ndcg_at_100 value: 69.254 - type: ndcg_at_1000 value: 70.136 - type: ndcg_at_20 value: 67.599 - type: ndcg_at_3 value: 62.863 - type: ndcg_at_5 value: 64.644 - type: precision_at_1 value: 55.494 - type: precision_at_10 value: 7.776 - type: precision_at_100 value: 0.9159999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 4.1290000000000004 - type: precision_at_3 value: 22.688 - type: precision_at_5 value: 14.477 - type: recall_at_1 value: 55.494 - type: recall_at_10 value: 77.747 - type: recall_at_100 value: 91.535 - type: recall_at_1000 value: 98.619 - type: recall_at_20 value: 82.565 - type: recall_at_3 value: 68.063 - type: recall_at_5 value: 72.386 - task: type: Retrieval dataset: name: MTEB MLQARetrieval (eng-eng) type: facebook/mlqa config: eng-eng split: test revision: 397ed406c1a7902140303e7faf60fff35b58d285 metrics: - type: main_score value: 64.723 - type: map_at_1 value: 54.308 - type: map_at_10 value: 61.26200000000001 - type: map_at_100 value: 61.82299999999999 - type: map_at_1000 value: 61.856 - type: map_at_20 value: 61.575 - type: map_at_3 value: 59.565 - type: map_at_5 value: 60.561 - type: mrr_at_1 value: 54.31704368848212 - type: mrr_at_10 value: 61.26520216098834 - type: mrr_at_100 value: 61.82588321127103 - type: mrr_at_1000 value: 61.859333030574334 - type: mrr_at_20 value: 61.57780339921337 - type: mrr_at_3 value: 59.569446842801646 - type: mrr_at_5 value: 60.56323029989004 - type: nauc_map_at_1000_diff1 value: 74.21413722468635 - type: nauc_map_at_1000_max value: 70.41741227882316 - type: nauc_map_at_1000_std value: -2.5438707209848506 - type: nauc_map_at_100_diff1 value: 74.19812315947975 - type: nauc_map_at_100_max value: 70.41589146728445 - type: nauc_map_at_100_std value: -2.5336117059429553 - type: nauc_map_at_10_diff1 value: 74.21810561152937 - type: nauc_map_at_10_max value: 70.48816115200171 - type: nauc_map_at_10_std value: -2.7443834681406734 - type: nauc_map_at_1_diff1 value: 77.69378738778958 - type: nauc_map_at_1_max value: 68.64652310701173 - type: nauc_map_at_1_std value: -4.667071946448379 - type: nauc_map_at_20_diff1 value: 74.16105697562438 - type: nauc_map_at_20_max value: 70.42491994631179 - type: nauc_map_at_20_std value: -2.6070416022440472 - type: nauc_map_at_3_diff1 value: 74.60449392878863 - type: nauc_map_at_3_max value: 70.39888609914269 - type: nauc_map_at_3_std value: -3.5401151125723986 - type: nauc_map_at_5_diff1 value: 74.2423420992663 - type: nauc_map_at_5_max value: 70.36574501826757 - type: nauc_map_at_5_std value: -3.2707393116898964 - type: nauc_mrr_at_1000_diff1 value: 74.21029843731323 - type: nauc_mrr_at_1000_max value: 70.43020492688913 - type: nauc_mrr_at_1000_std value: -2.526895582202081 - type: nauc_mrr_at_100_diff1 value: 74.19440960479243 - type: nauc_mrr_at_100_max value: 70.4288998824232 - type: nauc_mrr_at_100_std value: -2.5160929945118107 - type: nauc_mrr_at_10_diff1 value: 74.2141357266166 - type: nauc_mrr_at_10_max value: 70.5005683347807 - type: nauc_mrr_at_10_std value: -2.727154557882168 - type: nauc_mrr_at_1_diff1 value: 77.69891248239793 - type: nauc_mrr_at_1_max value: 68.68255231164922 - type: nauc_mrr_at_1_std value: -4.630226727154317 - type: nauc_mrr_at_20_diff1 value: 74.15705434409723 - type: nauc_mrr_at_20_max value: 70.43741835972747 - type: nauc_mrr_at_20_std value: -2.5896756472464495 - type: nauc_mrr_at_3_diff1 value: 74.5981844349412 - type: nauc_mrr_at_3_max value: 70.41834937080564 - type: nauc_mrr_at_3_std value: -3.5161656408031163 - type: nauc_mrr_at_5_diff1 value: 74.23847535424844 - type: nauc_mrr_at_5_max value: 70.37763810013656 - type: nauc_mrr_at_5_std value: -3.2560955164581733 - type: nauc_ndcg_at_1000_diff1 value: 73.20994496725493 - type: nauc_ndcg_at_1000_max value: 70.8903016277125 - type: nauc_ndcg_at_1000_std value: -0.625772298462309 - type: nauc_ndcg_at_100_diff1 value: 72.6847141682645 - type: nauc_ndcg_at_100_max value: 70.86564422034162 - type: nauc_ndcg_at_100_std value: -0.07195786766326141 - type: nauc_ndcg_at_10_diff1 value: 72.78806493754281 - type: nauc_ndcg_at_10_max value: 71.21957067926769 - type: nauc_ndcg_at_10_std value: -1.2760418313382227 - type: nauc_ndcg_at_1_diff1 value: 77.69891248239793 - type: nauc_ndcg_at_1_max value: 68.68255231164922 - type: nauc_ndcg_at_1_std value: -4.630226727154317 - type: nauc_ndcg_at_20_diff1 value: 72.52082440882546 - type: nauc_ndcg_at_20_max value: 70.98185004796734 - type: nauc_ndcg_at_20_std value: -0.6908280874815464 - type: nauc_ndcg_at_3_diff1 value: 73.59870660843939 - type: nauc_ndcg_at_3_max value: 70.94391957288654 - type: nauc_ndcg_at_3_std value: -3.147723179140428 - type: nauc_ndcg_at_5_diff1 value: 72.90122868193457 - type: nauc_ndcg_at_5_max value: 70.89376368965165 - type: nauc_ndcg_at_5_std value: -2.6451807385626744 - type: nauc_precision_at_1000_diff1 value: 58.14737201864067 - type: nauc_precision_at_1000_max value: 78.79011251144826 - type: nauc_precision_at_1000_std value: 59.98985420476577 - type: nauc_precision_at_100_diff1 value: 59.21069121644552 - type: nauc_precision_at_100_max value: 73.00557835912306 - type: nauc_precision_at_100_std value: 26.85027406282173 - type: nauc_precision_at_10_diff1 value: 66.8760831023675 - type: nauc_precision_at_10_max value: 74.21167950452596 - type: nauc_precision_at_10_std value: 5.453652499335947 - type: nauc_precision_at_1_diff1 value: 77.69891248239793 - type: nauc_precision_at_1_max value: 68.68255231164922 - type: nauc_precision_at_1_std value: -4.630226727154317 - type: nauc_precision_at_20_diff1 value: 64.3118559132602 - type: nauc_precision_at_20_max value: 73.33078184673825 - type: nauc_precision_at_20_std value: 9.993299523049402 - type: nauc_precision_at_3_diff1 value: 70.38667185155593 - type: nauc_precision_at_3_max value: 72.66495006030951 - type: nauc_precision_at_3_std value: -1.8532839591326276 - type: nauc_precision_at_5_diff1 value: 68.12161337583686 - type: nauc_precision_at_5_max value: 72.65644960375046 - type: nauc_precision_at_5_std value: -0.33317164167012875 - type: nauc_recall_at_1000_diff1 value: 61.63204394739985 - type: nauc_recall_at_1000_max value: 81.77241537319897 - type: nauc_recall_at_1000_std value: 58.44841544062308 - type: nauc_recall_at_100_diff1 value: 59.72072697224705 - type: nauc_recall_at_100_max value: 73.28519507061553 - type: nauc_recall_at_100_std value: 26.27318390763456 - type: nauc_recall_at_10_diff1 value: 66.9757135465418 - type: nauc_recall_at_10_max value: 74.21919493374149 - type: nauc_recall_at_10_std value: 5.323369605377166 - type: nauc_recall_at_1_diff1 value: 77.69378738778958 - type: nauc_recall_at_1_max value: 68.64652310701173 - type: nauc_recall_at_1_std value: -4.667071946448379 - type: nauc_recall_at_20_diff1 value: 64.42290081731899 - type: nauc_recall_at_20_max value: 73.3358289439033 - type: nauc_recall_at_20_std value: 9.846598361586073 - type: nauc_recall_at_3_diff1 value: 70.41211290964785 - type: nauc_recall_at_3_max value: 72.64451776775402 - type: nauc_recall_at_3_std value: -1.916280959835826 - type: nauc_recall_at_5_diff1 value: 68.20695272727916 - type: nauc_recall_at_5_max value: 72.66404224006101 - type: nauc_recall_at_5_std value: -0.431125323007886 - type: ndcg_at_1 value: 54.31700000000001 - type: ndcg_at_10 value: 64.723 - type: ndcg_at_100 value: 67.648 - type: ndcg_at_1000 value: 68.619 - type: ndcg_at_20 value: 65.85499999999999 - type: ndcg_at_3 value: 61.244 - type: ndcg_at_5 value: 63.038000000000004 - type: precision_at_1 value: 54.31700000000001 - type: precision_at_10 value: 7.564 - type: precision_at_100 value: 0.898 - type: precision_at_1000 value: 0.098 - type: precision_at_20 value: 4.005 - type: precision_at_3 value: 22.034000000000002 - type: precision_at_5 value: 14.093 - type: recall_at_1 value: 54.308 - type: recall_at_10 value: 75.622 - type: recall_at_100 value: 89.744 - type: recall_at_1000 value: 97.539 - type: recall_at_20 value: 80.085 - type: recall_at_3 value: 66.09 - type: recall_at_5 value: 70.446 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (de) type: reciTAL/mlsum config: de split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 41.267647761702854 - type: v_measure value: 41.267647761702854 - type: v_measure_std value: 10.93390895077248 - type: main_score value: 40.07927325071353 - type: v_measure value: 40.07927325071353 - type: v_measure_std value: 9.296680835266145 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (fr) type: reciTAL/mlsum config: fr split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 44.68714862333979 - type: v_measure value: 44.68714862333979 - type: v_measure_std value: 1.811036989797814 - type: main_score value: 44.88484854069901 - type: v_measure value: 44.88484854069901 - type: v_measure_std value: 2.3704247819781843 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (ru) type: reciTAL/mlsum config: ru split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 41.92518785753813 - type: v_measure value: 41.92518785753813 - type: v_measure_std value: 5.9356661900220775 - type: main_score value: 43.97657450929179 - type: v_measure value: 43.97657450929179 - type: v_measure_std value: 6.087547931333613 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (es) type: reciTAL/mlsum config: es split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 48.69875719812033 - type: v_measure value: 48.69875719812033 - type: v_measure_std value: 1.204253881950113 - type: main_score value: 48.41108671948728 - type: v_measure value: 48.41108671948728 - type: v_measure_std value: 1.3848320630151243 - task: type: Reranking dataset: name: MTEB MMarcoReranking (default) type: C-MTEB/Mmarco-reranking config: default split: dev revision: 8e0c766dbe9e16e1d221116a3f36795fbade07f6 metrics: - type: map value: 21.050447576170395 - type: mrr value: 20.201984126984126 - type: main_score value: 21.050447576170395 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval (default) type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: main_score value: 79.687 - type: map_at_1 value: 66.872 - type: map_at_10 value: 75.949 - type: map_at_100 value: 76.25 - type: map_at_1000 value: 76.259 - type: map_at_20 value: 76.145 - type: map_at_3 value: 74.01299999999999 - type: map_at_5 value: 75.232 - type: mrr_at_1 value: 69.18338108882521 - type: mrr_at_10 value: 76.5424227952881 - type: mrr_at_100 value: 76.8019342792628 - type: mrr_at_1000 value: 76.81002278342808 - type: mrr_at_20 value: 76.7115234815896 - type: mrr_at_3 value: 74.83046800382044 - type: mrr_at_5 value: 75.88490926456515 - type: nauc_map_at_1000_diff1 value: 78.06933310424179 - type: nauc_map_at_1000_max value: 49.392948209665896 - type: nauc_map_at_1000_std value: -15.126109322591166 - type: nauc_map_at_100_diff1 value: 78.06612779298378 - type: nauc_map_at_100_max value: 49.40761618630397 - type: nauc_map_at_100_std value: -15.099282408159349 - type: nauc_map_at_10_diff1 value: 77.94565685470538 - type: nauc_map_at_10_max value: 49.50559610363201 - type: nauc_map_at_10_std value: -15.182130695916355 - type: nauc_map_at_1_diff1 value: 79.84814509858211 - type: nauc_map_at_1_max value: 40.78978466656547 - type: nauc_map_at_1_std value: -19.96189264026715 - type: nauc_map_at_20_diff1 value: 78.03597839981245 - type: nauc_map_at_20_max value: 49.49477427223376 - type: nauc_map_at_20_std value: -15.084990000838378 - type: nauc_map_at_3_diff1 value: 78.0637014655507 - type: nauc_map_at_3_max value: 48.63214001973341 - type: nauc_map_at_3_std value: -17.093950563306596 - type: nauc_map_at_5_diff1 value: 77.94068229240348 - type: nauc_map_at_5_max value: 49.38930719689204 - type: nauc_map_at_5_std value: -15.9919454201954 - type: nauc_mrr_at_1000_diff1 value: 78.34582398092816 - type: nauc_mrr_at_1000_max value: 49.623566992784156 - type: nauc_mrr_at_1000_std value: -14.381347765493265 - type: nauc_mrr_at_100_diff1 value: 78.3429966714221 - type: nauc_mrr_at_100_max value: 49.63684922240546 - type: nauc_mrr_at_100_std value: -14.354914066301236 - type: nauc_mrr_at_10_diff1 value: 78.2208070219624 - type: nauc_mrr_at_10_max value: 49.77720536573364 - type: nauc_mrr_at_10_std value: -14.316233764741812 - type: nauc_mrr_at_1_diff1 value: 80.22305496572142 - type: nauc_mrr_at_1_max value: 44.30231210192536 - type: nauc_mrr_at_1_std value: -18.942549914934492 - type: nauc_mrr_at_20_diff1 value: 78.31006724240147 - type: nauc_mrr_at_20_max value: 49.72338465276142 - type: nauc_mrr_at_20_std value: -14.30722621948953 - type: nauc_mrr_at_3_diff1 value: 78.39832634634523 - type: nauc_mrr_at_3_max value: 49.24985961036677 - type: nauc_mrr_at_3_std value: -15.966286866763191 - type: nauc_mrr_at_5_diff1 value: 78.2406507247798 - type: nauc_mrr_at_5_max value: 49.71276359754787 - type: nauc_mrr_at_5_std value: -14.979526226149698 - type: nauc_ndcg_at_1000_diff1 value: 77.74892471071016 - type: nauc_ndcg_at_1000_max value: 51.11543344053061 - type: nauc_ndcg_at_1000_std value: -12.208878737005096 - type: nauc_ndcg_at_100_diff1 value: 77.67462502211228 - type: nauc_ndcg_at_100_max value: 51.593977338939034 - type: nauc_ndcg_at_100_std value: -11.312126179513802 - type: nauc_ndcg_at_10_diff1 value: 77.0571291760012 - type: nauc_ndcg_at_10_max value: 52.35435572808972 - type: nauc_ndcg_at_10_std value: -11.33242546164059 - type: nauc_ndcg_at_1_diff1 value: 80.22305496572142 - type: nauc_ndcg_at_1_max value: 44.30231210192536 - type: nauc_ndcg_at_1_std value: -18.942549914934492 - type: nauc_ndcg_at_20_diff1 value: 77.4141216117471 - type: nauc_ndcg_at_20_max value: 52.340600871365375 - type: nauc_ndcg_at_20_std value: -10.989010161550912 - type: nauc_ndcg_at_3_diff1 value: 77.43971989259062 - type: nauc_ndcg_at_3_max value: 50.59251358320663 - type: nauc_ndcg_at_3_std value: -15.59337960636058 - type: nauc_ndcg_at_5_diff1 value: 77.12174287031847 - type: nauc_ndcg_at_5_max value: 51.97108510288907 - type: nauc_ndcg_at_5_std value: -13.474902612427167 - type: nauc_precision_at_1000_diff1 value: -19.36793534929367 - type: nauc_precision_at_1000_max value: 11.803383262344036 - type: nauc_precision_at_1000_std value: 24.304436015177046 - type: nauc_precision_at_100_diff1 value: -6.273790806909921 - type: nauc_precision_at_100_max value: 23.372606271300747 - type: nauc_precision_at_100_std value: 29.085768971612342 - type: nauc_precision_at_10_diff1 value: 21.67045907336595 - type: nauc_precision_at_10_max value: 41.68948432407223 - type: nauc_precision_at_10_std value: 17.837055074458092 - type: nauc_precision_at_1_diff1 value: 80.22305496572142 - type: nauc_precision_at_1_max value: 44.30231210192536 - type: nauc_precision_at_1_std value: -18.942549914934492 - type: nauc_precision_at_20_diff1 value: 12.577671896684803 - type: nauc_precision_at_20_max value: 37.44944702246691 - type: nauc_precision_at_20_std value: 23.635897665206087 - type: nauc_precision_at_3_diff1 value: 47.165335112814056 - type: nauc_precision_at_3_max value: 47.0458691263379 - type: nauc_precision_at_3_std value: -3.3181861146890217 - type: nauc_precision_at_5_diff1 value: 35.406205343514806 - type: nauc_precision_at_5_max value: 45.56549449285401 - type: nauc_precision_at_5_std value: 5.612378074562386 - type: nauc_recall_at_1000_diff1 value: 72.32762520815842 - type: nauc_recall_at_1000_max value: 85.64979256307343 - type: nauc_recall_at_1000_std value: 73.61925297037476 - type: nauc_recall_at_100_diff1 value: 72.31946328709962 - type: nauc_recall_at_100_max value: 83.76576070068353 - type: nauc_recall_at_100_std value: 57.39376538662535 - type: nauc_recall_at_10_diff1 value: 69.51307788072499 - type: nauc_recall_at_10_max value: 69.60124733654142 - type: nauc_recall_at_10_std value: 13.483540424716892 - type: nauc_recall_at_1_diff1 value: 79.84814509858211 - type: nauc_recall_at_1_max value: 40.78978466656547 - type: nauc_recall_at_1_std value: -19.96189264026715 - type: nauc_recall_at_20_diff1 value: 70.92168324710599 - type: nauc_recall_at_20_max value: 76.09106252420084 - type: nauc_recall_at_20_std value: 25.406842300761447 - type: nauc_recall_at_3_diff1 value: 74.1212680517145 - type: nauc_recall_at_3_max value: 56.24921832879403 - type: nauc_recall_at_3_std value: -11.55542913578436 - type: nauc_recall_at_5_diff1 value: 72.31262959872993 - type: nauc_recall_at_5_max value: 62.761214896697915 - type: nauc_recall_at_5_std value: -3.280167584070396 - type: ndcg_at_1 value: 69.18299999999999 - type: ndcg_at_10 value: 79.687 - type: ndcg_at_100 value: 81.062 - type: ndcg_at_1000 value: 81.312 - type: ndcg_at_20 value: 80.34599999999999 - type: ndcg_at_3 value: 75.98700000000001 - type: ndcg_at_5 value: 78.039 - type: precision_at_1 value: 69.18299999999999 - type: precision_at_10 value: 9.636 - type: precision_at_100 value: 1.0330000000000001 - type: precision_at_1000 value: 0.105 - type: precision_at_20 value: 4.958 - type: precision_at_3 value: 28.515 - type: precision_at_5 value: 18.201 - type: recall_at_1 value: 66.872 - type: recall_at_10 value: 90.688 - type: recall_at_100 value: 96.99 - type: recall_at_1000 value: 98.958 - type: recall_at_20 value: 93.21199999999999 - type: recall_at_3 value: 80.84599999999999 - type: recall_at_5 value: 85.732 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 21.861 - type: map_at_10 value: 34.008 - type: map_at_100 value: 35.174 - type: map_at_1000 value: 35.224 - type: map_at_20 value: 34.705999999999996 - type: map_at_3 value: 30.209000000000003 - type: map_at_5 value: 32.351 - type: mrr_at_1 value: 22.493 - type: mrr_at_10 value: 34.583999999999996 - type: mrr_at_100 value: 35.691 - type: mrr_at_1000 value: 35.736000000000004 - type: mrr_at_20 value: 35.257 - type: mrr_at_3 value: 30.85 - type: mrr_at_5 value: 32.962 - type: ndcg_at_1 value: 22.493 - type: ndcg_at_10 value: 40.815 - type: ndcg_at_100 value: 46.483999999999995 - type: ndcg_at_1000 value: 47.73 - type: ndcg_at_20 value: 43.302 - type: ndcg_at_3 value: 33.056000000000004 - type: ndcg_at_5 value: 36.879 - type: precision_at_1 value: 22.493 - type: precision_at_10 value: 6.465999999999999 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.104 - type: precision_at_20 value: 3.752 - type: precision_at_3 value: 14.069 - type: precision_at_5 value: 10.384 - type: recall_at_1 value: 21.861 - type: recall_at_10 value: 61.781 - type: recall_at_100 value: 88.095 - type: recall_at_1000 value: 97.625 - type: recall_at_20 value: 71.44500000000001 - type: recall_at_3 value: 40.653 - type: recall_at_5 value: 49.841 - type: main_score value: 40.815 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.4874601003192 - type: f1 value: 97.19067544931094 - type: f1_weighted value: 97.49331776181019 - type: main_score value: 97.4874601003192 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.89489997182305 - type: f1 value: 96.51138586512977 - type: f1_weighted value: 96.89723065967186 - type: main_score value: 96.89489997182305 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.17144763175452 - type: f1 value: 96.81785681878274 - type: f1_weighted value: 97.1778974586874 - type: main_score value: 97.17144763175452 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.30128405887879 - type: f1 value: 95.94555923088487 - type: f1_weighted value: 96.30399416794926 - type: main_score value: 96.30128405887879 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 84.53488372093022 - type: f1 value: 61.77995074251401 - type: f1_weighted value: 86.8005170485101 - type: main_score value: 84.53488372093022 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 80.79459002535924 - type: f1 value: 56.08938302001448 - type: f1_weighted value: 83.66582131948252 - type: main_score value: 80.79459002535924 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 84.7765176784523 - type: f1 value: 61.39860057885528 - type: f1_weighted value: 86.94881745670745 - type: main_score value: 84.7765176784523 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.2079549013467 - type: f1 value: 59.90260478749016 - type: f1_weighted value: 84.36861708593257 - type: main_score value: 82.2079549013467 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (eng) type: mteb/masakhanews config: eng split: test revision: 18193f187b92da67168c655c9973a165ed9593dd metrics: - type: accuracy value: 74.98945147679325 - type: f1 value: 74.3157483560261 - type: f1_weighted value: 75.01179008904884 - type: main_score value: 74.98945147679325 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: mteb/masakhanews config: fra split: test revision: 18193f187b92da67168c655c9973a165ed9593dd metrics: - type: accuracy value: 74.02843601895735 - type: f1 value: 70.40326349620732 - type: f1_weighted value: 74.6596277063484 - type: main_score value: 74.02843601895735 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (amh) type: masakhane/masakhanews config: amh split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 69.45780291725053 - type: v_measure value: 69.45780291725053 - type: v_measure_std value: 36.54340055904091 - type: main_score value: 60.95132147787602 - type: v_measure value: 60.95132147787602 - type: v_measure_std value: 37.330148394033365 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (eng) type: masakhane/masakhanews config: eng split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 64.88996119332239 - type: v_measure value: 64.88996119332239 - type: v_measure_std value: 30.017223408197268 - type: main_score value: 60.974810831426595 - type: v_measure value: 60.974810831426595 - type: v_measure_std value: 24.934675467507827 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 42.362383958691666 - type: v_measure value: 42.362383958691666 - type: v_measure_std value: 37.61076788039063 - type: main_score value: 44.479206673553335 - type: v_measure value: 44.479206673553335 - type: v_measure_std value: 32.58254804499339 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (hau) type: masakhane/masakhanews config: hau split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 43.29201252405562 - type: v_measure value: 43.29201252405562 - type: v_measure_std value: 34.31987945146255 - type: main_score value: 26.4742082741682 - type: v_measure value: 26.4742082741682 - type: v_measure_std value: 22.344929192323097 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (ibo) type: masakhane/masakhanews config: ibo split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 33.59926542995238 - type: v_measure value: 33.59926542995238 - type: v_measure_std value: 35.70048601084112 - type: main_score value: 38.906129911741985 - type: v_measure value: 38.906129911741985 - type: v_measure_std value: 34.785601792668444 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (lin) type: masakhane/masakhanews config: lin split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 67.58487601893106 - type: v_measure value: 67.58487601893106 - type: v_measure_std value: 35.16784970777931 - type: main_score value: 62.60982020876592 - type: v_measure value: 62.60982020876592 - type: v_measure_std value: 40.7368955715045 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (lug) type: masakhane/masakhanews config: lug split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 50.01220872023533 - type: v_measure value: 50.01220872023533 - type: v_measure_std value: 41.87411574676182 - type: main_score value: 42.70424106365967 - type: v_measure value: 42.70424106365967 - type: v_measure_std value: 46.80946241135087 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (orm) type: masakhane/masakhanews config: orm split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 29.007847502598317 - type: v_measure value: 29.007847502598317 - type: v_measure_std value: 38.374997395079994 - type: main_score value: 28.609942199922322 - type: v_measure value: 28.609942199922322 - type: v_measure_std value: 38.46685040191088 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (pcm) type: masakhane/masakhanews config: pcm split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 79.13520228554611 - type: v_measure value: 79.13520228554611 - type: v_measure_std value: 18.501843848275183 - type: main_score value: 76.83901348810822 - type: v_measure value: 76.83901348810822 - type: v_measure_std value: 17.57617141269189 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (run) type: masakhane/masakhanews config: run split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 60.317213909746656 - type: v_measure value: 60.317213909746656 - type: v_measure_std value: 36.500281823747386 - type: main_score value: 46.89757547846193 - type: v_measure value: 46.89757547846193 - type: v_measure_std value: 44.58903590203438 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (sna) type: masakhane/masakhanews config: sna split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 59.395277358240946 - type: v_measure value: 59.395277358240946 - type: v_measure_std value: 37.500916816164654 - type: main_score value: 55.37185207068829 - type: v_measure value: 55.37185207068829 - type: v_measure_std value: 36.944574863543004 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (som) type: masakhane/masakhanews config: som split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 38.18638688704302 - type: v_measure value: 38.18638688704302 - type: v_measure_std value: 35.453681137564466 - type: main_score value: 37.44211021681754 - type: v_measure value: 37.44211021681754 - type: v_measure_std value: 33.41469994463241 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (swa) type: masakhane/masakhanews config: swa split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 29.49230755729658 - type: v_measure value: 29.49230755729658 - type: v_measure_std value: 28.284313285264645 - type: main_score value: 26.020680621216062 - type: v_measure value: 26.020680621216062 - type: v_measure_std value: 25.480037522570413 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (tir) type: masakhane/masakhanews config: tir split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 60.632258622750115 - type: v_measure value: 60.632258622750115 - type: v_measure_std value: 34.429711214740564 - type: main_score value: 63.74306846771303 - type: v_measure value: 63.74306846771303 - type: v_measure_std value: 32.19119631078685 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (xho) type: masakhane/masakhanews config: xho split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 41.76322918806381 - type: v_measure value: 41.76322918806381 - type: v_measure_std value: 36.43245296200775 - type: main_score value: 24.580890519243777 - type: v_measure value: 24.580890519243777 - type: v_measure_std value: 37.941836363967106 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (yor) type: masakhane/masakhanews config: yor split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 33.17083910808645 - type: v_measure value: 33.17083910808645 - type: v_measure_std value: 34.87547994284835 - type: main_score value: 43.63458888828314 - type: v_measure value: 43.63458888828314 - type: v_measure_std value: 31.28169350649098 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 75.37323470073974 - type: f1 value: 71.1836877753734 - type: f1_weighted value: 75.72073213955457 - type: main_score value: 75.37323470073974 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 74.83523873570948 - type: f1 value: 70.72375821116886 - type: f1_weighted value: 75.20800490010755 - type: main_score value: 74.83523873570948 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 75.31607262945528 - type: f1 value: 72.06063554897662 - type: f1_weighted value: 75.72438161355252 - type: main_score value: 75.31607262945528 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 76.7955615332885 - type: f1 value: 73.08099648499756 - type: f1_weighted value: 77.18482068239668 - type: main_score value: 76.7955615332885 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 77.60591795561534 - type: f1 value: 74.46676705370395 - type: f1_weighted value: 77.69888062336614 - type: main_score value: 77.60591795561534 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 76.32145258910558 - type: f1 value: 72.89824154178328 - type: f1_weighted value: 76.6539327979472 - type: main_score value: 76.32145258910558 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 73.21788836583724 - type: f1 value: 70.45594512246377 - type: f1_weighted value: 73.67862536499393 - type: main_score value: 73.21788836583724 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 80.82044384667114 - type: f1 value: 80.53217664465089 - type: f1_weighted value: 80.94535087010512 - type: main_score value: 80.82044384667114 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 82.1049092131809 - type: f1 value: 81.55343463694733 - type: f1_weighted value: 82.33509098770782 - type: main_score value: 82.1049092131809 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 82.58238063214526 - type: f1 value: 82.27974449333072 - type: f1_weighted value: 82.81337569618209 - type: main_score value: 82.58238063214526 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 83.97108271687962 - type: f1 value: 83.56285606936076 - type: f1_weighted value: 84.10198745390771 - type: main_score value: 83.97108271687962 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 84.71082716879623 - type: f1 value: 84.09447062371402 - type: f1_weighted value: 84.73765765551342 - type: main_score value: 84.71082716879623 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 83.093476798924 - type: f1 value: 82.72656900752943 - type: f1_weighted value: 83.26606516503364 - type: main_score value: 83.093476798924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 84.05850706119705 - type: f1 value: 83.64234048881222 - type: f1_weighted value: 84.17315768381876 - type: main_score value: 84.05850706119705 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval (default) type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: main_score value: 56.635999999999996 - type: map_at_1 value: 48.699999999999996 - type: map_at_10 value: 53.991 - type: map_at_100 value: 54.449999999999996 - type: map_at_1000 value: 54.515 - type: map_at_20 value: 54.212 - type: map_at_3 value: 52.833 - type: map_at_5 value: 53.503 - type: mrr_at_1 value: 48.699999999999996 - type: mrr_at_10 value: 53.991309523809505 - type: mrr_at_100 value: 54.45008993448266 - type: mrr_at_1000 value: 54.515253990549795 - type: mrr_at_20 value: 54.21201762247036 - type: mrr_at_3 value: 52.8333333333333 - type: mrr_at_5 value: 53.50333333333328 - type: nauc_map_at_1000_diff1 value: 79.96867989401643 - type: nauc_map_at_1000_max value: 69.75230895599029 - type: nauc_map_at_1000_std value: 2.6418738289740213 - type: nauc_map_at_100_diff1 value: 79.95343709599133 - type: nauc_map_at_100_max value: 69.751282671507 - type: nauc_map_at_100_std value: 2.621719966106279 - type: nauc_map_at_10_diff1 value: 80.02875864565634 - type: nauc_map_at_10_max value: 69.80948662290187 - type: nauc_map_at_10_std value: 2.329151604733765 - type: nauc_map_at_1_diff1 value: 83.616940281383 - type: nauc_map_at_1_max value: 69.08142651929452 - type: nauc_map_at_1_std value: 1.9687791394035643 - type: nauc_map_at_20_diff1 value: 79.95555601275339 - type: nauc_map_at_20_max value: 69.76604695002925 - type: nauc_map_at_20_std value: 2.556184141901367 - type: nauc_map_at_3_diff1 value: 80.74790131023668 - type: nauc_map_at_3_max value: 70.57797991892402 - type: nauc_map_at_3_std value: 2.7115149849964117 - type: nauc_map_at_5_diff1 value: 80.31796539878381 - type: nauc_map_at_5_max value: 69.93573796420061 - type: nauc_map_at_5_std value: 2.0731614029506606 - type: nauc_mrr_at_1000_diff1 value: 79.96867999907981 - type: nauc_mrr_at_1000_max value: 69.57395578976896 - type: nauc_mrr_at_1000_std value: 2.46351945887829 - type: nauc_mrr_at_100_diff1 value: 79.95343709599133 - type: nauc_mrr_at_100_max value: 69.57322054130803 - type: nauc_mrr_at_100_std value: 2.4436578359073433 - type: nauc_mrr_at_10_diff1 value: 80.02875864565634 - type: nauc_mrr_at_10_max value: 69.63292630937411 - type: nauc_mrr_at_10_std value: 2.1525912912060012 - type: nauc_mrr_at_1_diff1 value: 83.616940281383 - type: nauc_mrr_at_1_max value: 68.74717310480305 - type: nauc_mrr_at_1_std value: 1.6345257249120868 - type: nauc_mrr_at_20_diff1 value: 79.95555601275339 - type: nauc_mrr_at_20_max value: 69.58883608470444 - type: nauc_mrr_at_20_std value: 2.378973276576547 - type: nauc_mrr_at_3_diff1 value: 80.74790131023668 - type: nauc_mrr_at_3_max value: 70.40430475488604 - type: nauc_mrr_at_3_std value: 2.5378398209583817 - type: nauc_mrr_at_5_diff1 value: 80.31796539878381 - type: nauc_mrr_at_5_max value: 69.7605991748183 - type: nauc_mrr_at_5_std value: 1.898022613568352 - type: nauc_ndcg_at_1000_diff1 value: 78.35504059321225 - type: nauc_ndcg_at_1000_max value: 69.06752522437093 - type: nauc_ndcg_at_1000_std value: 3.9624036886099265 - type: nauc_ndcg_at_100_diff1 value: 77.79729140249833 - type: nauc_ndcg_at_100_max value: 68.93113791506029 - type: nauc_ndcg_at_100_std value: 3.642178826886181 - type: nauc_ndcg_at_10_diff1 value: 78.160158293918 - type: nauc_ndcg_at_10_max value: 69.28122202281361 - type: nauc_ndcg_at_10_std value: 2.438976810940962 - type: nauc_ndcg_at_1_diff1 value: 83.616940281383 - type: nauc_ndcg_at_1_max value: 69.08142651929452 - type: nauc_ndcg_at_1_std value: 1.9687791394035643 - type: nauc_ndcg_at_20_diff1 value: 77.88514432874997 - type: nauc_ndcg_at_20_max value: 69.06148818508873 - type: nauc_ndcg_at_20_std value: 3.1800249272363676 - type: nauc_ndcg_at_3_diff1 value: 79.73510384405803 - type: nauc_ndcg_at_3_max value: 70.78000695123832 - type: nauc_ndcg_at_3_std value: 2.9041415468363274 - type: nauc_ndcg_at_5_diff1 value: 78.91872808866195 - type: nauc_ndcg_at_5_max value: 69.61478429620091 - type: nauc_ndcg_at_5_std value: 1.734699636301054 - type: nauc_precision_at_1000_diff1 value: 66.37858395390673 - type: nauc_precision_at_1000_max value: 60.651659037598534 - type: nauc_precision_at_1000_std value: 27.388353715469798 - type: nauc_precision_at_100_diff1 value: 66.34325807776025 - type: nauc_precision_at_100_max value: 63.63855305621111 - type: nauc_precision_at_100_std value: 10.641748149575351 - type: nauc_precision_at_10_diff1 value: 71.3784685491089 - type: nauc_precision_at_10_max value: 67.05313695174542 - type: nauc_precision_at_10_std value: 3.000406867930561 - type: nauc_precision_at_1_diff1 value: 83.616940281383 - type: nauc_precision_at_1_max value: 69.08142651929452 - type: nauc_precision_at_1_std value: 1.9687791394035643 - type: nauc_precision_at_20_diff1 value: 69.73407910977694 - type: nauc_precision_at_20_max value: 65.77426240320742 - type: nauc_precision_at_20_std value: 6.204416838482586 - type: nauc_precision_at_3_diff1 value: 76.63737537643107 - type: nauc_precision_at_3_max value: 71.29710200719668 - type: nauc_precision_at_3_std value: 3.47180961484546 - type: nauc_precision_at_5_diff1 value: 74.36945983536717 - type: nauc_precision_at_5_max value: 68.33292218003061 - type: nauc_precision_at_5_std value: 0.47128762620258075 - type: nauc_recall_at_1000_diff1 value: 66.37858395390681 - type: nauc_recall_at_1000_max value: 60.65165903759889 - type: nauc_recall_at_1000_std value: 27.388353715469822 - type: nauc_recall_at_100_diff1 value: 66.34325807776025 - type: nauc_recall_at_100_max value: 63.63855305621116 - type: nauc_recall_at_100_std value: 10.641748149575351 - type: nauc_recall_at_10_diff1 value: 71.37846854910892 - type: nauc_recall_at_10_max value: 67.05313695174546 - type: nauc_recall_at_10_std value: 3.000406867930663 - type: nauc_recall_at_1_diff1 value: 83.616940281383 - type: nauc_recall_at_1_max value: 69.08142651929452 - type: nauc_recall_at_1_std value: 1.9687791394035643 - type: nauc_recall_at_20_diff1 value: 69.73407910977691 - type: nauc_recall_at_20_max value: 65.77426240320746 - type: nauc_recall_at_20_std value: 6.204416838482536 - type: nauc_recall_at_3_diff1 value: 76.63737537643112 - type: nauc_recall_at_3_max value: 71.29710200719668 - type: nauc_recall_at_3_std value: 3.471809614845442 - type: nauc_recall_at_5_diff1 value: 74.36945983536715 - type: nauc_recall_at_5_max value: 68.33292218003065 - type: nauc_recall_at_5_std value: 0.4712876262026442 - type: ndcg_at_1 value: 48.699999999999996 - type: ndcg_at_10 value: 56.635999999999996 - type: ndcg_at_100 value: 59.193 - type: ndcg_at_1000 value: 60.97 - type: ndcg_at_20 value: 57.426 - type: ndcg_at_3 value: 54.186 - type: ndcg_at_5 value: 55.407 - type: precision_at_1 value: 48.699999999999996 - type: precision_at_10 value: 6.5 - type: precision_at_100 value: 0.777 - type: precision_at_1000 value: 0.092 - type: precision_at_20 value: 3.405 - type: precision_at_3 value: 19.367 - type: precision_at_5 value: 12.22 - type: recall_at_1 value: 48.699999999999996 - type: recall_at_10 value: 65.0 - type: recall_at_100 value: 77.7 - type: recall_at_1000 value: 91.8 - type: recall_at_20 value: 68.10000000000001 - type: recall_at_3 value: 58.099999999999994 - type: recall_at_5 value: 61.1 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 34.80188561439236 - type: v_measure value: 34.80188561439236 - type: v_measure_std value: 1.5703148841573102 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 32.42285513996236 - type: v_measure value: 32.42285513996236 - type: v_measure_std value: 1.3769867487457566 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (de) type: jinaai/mintakaqa config: de split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: main_score value: 27.025 - type: map_at_1 value: 14.532 - type: map_at_10 value: 22.612 - type: map_at_100 value: 23.802 - type: map_at_1000 value: 23.9 - type: map_at_20 value: 23.275000000000002 - type: map_at_3 value: 20.226 - type: map_at_5 value: 21.490000000000002 - type: mrr_at_1 value: 14.532434709351305 - type: mrr_at_10 value: 22.612077265615575 - type: mrr_at_100 value: 23.801523356874675 - type: mrr_at_1000 value: 23.900118499340238 - type: mrr_at_20 value: 23.275466430108995 - type: mrr_at_3 value: 20.22606009547877 - type: mrr_at_5 value: 21.489750070204945 - type: nauc_map_at_1000_diff1 value: 14.148987799763596 - type: nauc_map_at_1000_max value: 44.70338461387784 - type: nauc_map_at_1000_std value: 15.868006767707637 - type: nauc_map_at_100_diff1 value: 14.11371769080442 - type: nauc_map_at_100_max value: 44.67995540936296 - type: nauc_map_at_100_std value: 15.890796502029076 - type: nauc_map_at_10_diff1 value: 14.29066834165688 - type: nauc_map_at_10_max value: 45.10997111765282 - type: nauc_map_at_10_std value: 15.508568918629864 - type: nauc_map_at_1_diff1 value: 23.473291302576396 - type: nauc_map_at_1_max value: 44.68942599764586 - type: nauc_map_at_1_std value: 12.424377262427253 - type: nauc_map_at_20_diff1 value: 14.112652046087831 - type: nauc_map_at_20_max value: 44.82014861413682 - type: nauc_map_at_20_std value: 15.739350613646385 - type: nauc_map_at_3_diff1 value: 16.119659221396347 - type: nauc_map_at_3_max value: 46.04766378953525 - type: nauc_map_at_3_std value: 13.969878046315925 - type: nauc_map_at_5_diff1 value: 15.095453434076184 - type: nauc_map_at_5_max value: 45.802128149314406 - type: nauc_map_at_5_std value: 14.957442173319949 - type: nauc_mrr_at_1000_diff1 value: 14.148987799763596 - type: nauc_mrr_at_1000_max value: 44.70338461387784 - type: nauc_mrr_at_1000_std value: 15.868006767707637 - type: nauc_mrr_at_100_diff1 value: 14.11371769080442 - type: nauc_mrr_at_100_max value: 44.67995540936296 - type: nauc_mrr_at_100_std value: 15.890796502029076 - type: nauc_mrr_at_10_diff1 value: 14.29066834165688 - type: nauc_mrr_at_10_max value: 45.10997111765282 - type: nauc_mrr_at_10_std value: 15.508568918629864 - type: nauc_mrr_at_1_diff1 value: 23.473291302576396 - type: nauc_mrr_at_1_max value: 44.68942599764586 - type: nauc_mrr_at_1_std value: 12.424377262427253 - type: nauc_mrr_at_20_diff1 value: 14.112652046087831 - type: nauc_mrr_at_20_max value: 44.82014861413682 - type: nauc_mrr_at_20_std value: 15.739350613646385 - type: nauc_mrr_at_3_diff1 value: 16.119659221396347 - type: nauc_mrr_at_3_max value: 46.04766378953525 - type: nauc_mrr_at_3_std value: 13.969878046315925 - type: nauc_mrr_at_5_diff1 value: 15.095453434076184 - type: nauc_mrr_at_5_max value: 45.802128149314406 - type: nauc_mrr_at_5_std value: 14.957442173319949 - type: nauc_ndcg_at_1000_diff1 value: 11.626606894574028 - type: nauc_ndcg_at_1000_max value: 43.328592841065536 - type: nauc_ndcg_at_1000_std value: 18.049446272245547 - type: nauc_ndcg_at_100_diff1 value: 10.485720606660239 - type: nauc_ndcg_at_100_max value: 42.405317674170966 - type: nauc_ndcg_at_100_std value: 19.107151641936987 - type: nauc_ndcg_at_10_diff1 value: 11.029351078162982 - type: nauc_ndcg_at_10_max value: 44.36855031964681 - type: nauc_ndcg_at_10_std value: 17.302796171409305 - type: nauc_ndcg_at_1_diff1 value: 23.473291302576396 - type: nauc_ndcg_at_1_max value: 44.68942599764586 - type: nauc_ndcg_at_1_std value: 12.424377262427253 - type: nauc_ndcg_at_20_diff1 value: 10.356662718168412 - type: nauc_ndcg_at_20_max value: 43.31602680430083 - type: nauc_ndcg_at_20_std value: 18.162891267850316 - type: nauc_ndcg_at_3_diff1 value: 14.42844952297869 - type: nauc_ndcg_at_3_max value: 46.26603339466543 - type: nauc_ndcg_at_3_std value: 14.449362723887857 - type: nauc_ndcg_at_5_diff1 value: 12.783416563486396 - type: nauc_ndcg_at_5_max value: 45.852176479124424 - type: nauc_ndcg_at_5_std value: 16.11775016428085 - type: nauc_precision_at_1000_diff1 value: -8.045361059399795 - type: nauc_precision_at_1000_max value: 21.970273281738777 - type: nauc_precision_at_1000_std value: 49.564650488193266 - type: nauc_precision_at_100_diff1 value: -2.118628861593353 - type: nauc_precision_at_100_max value: 31.32498977104778 - type: nauc_precision_at_100_std value: 32.96087731883451 - type: nauc_precision_at_10_diff1 value: 3.0335517475367615 - type: nauc_precision_at_10_max value: 42.21620215030219 - type: nauc_precision_at_10_std value: 21.90159732315962 - type: nauc_precision_at_1_diff1 value: 23.473291302576396 - type: nauc_precision_at_1_max value: 44.68942599764586 - type: nauc_precision_at_1_std value: 12.424377262427253 - type: nauc_precision_at_20_diff1 value: 0.4087201843719047 - type: nauc_precision_at_20_max value: 38.485034773895734 - type: nauc_precision_at_20_std value: 25.077397979916682 - type: nauc_precision_at_3_diff1 value: 10.408327736589833 - type: nauc_precision_at_3_max value: 46.757216289175076 - type: nauc_precision_at_3_std value: 15.62594354926867 - type: nauc_precision_at_5_diff1 value: 7.326752744229544 - type: nauc_precision_at_5_max value: 45.89190518573553 - type: nauc_precision_at_5_std value: 19.01717163438957 - type: nauc_recall_at_1000_diff1 value: -8.045361059400387 - type: nauc_recall_at_1000_max value: 21.97027328173812 - type: nauc_recall_at_1000_std value: 49.56465048819266 - type: nauc_recall_at_100_diff1 value: -2.118628861593277 - type: nauc_recall_at_100_max value: 31.324989771047818 - type: nauc_recall_at_100_std value: 32.96087731883457 - type: nauc_recall_at_10_diff1 value: 3.0335517475367166 - type: nauc_recall_at_10_max value: 42.21620215030217 - type: nauc_recall_at_10_std value: 21.901597323159606 - type: nauc_recall_at_1_diff1 value: 23.473291302576396 - type: nauc_recall_at_1_max value: 44.68942599764586 - type: nauc_recall_at_1_std value: 12.424377262427253 - type: nauc_recall_at_20_diff1 value: 0.40872018437190905 - type: nauc_recall_at_20_max value: 38.485034773895734 - type: nauc_recall_at_20_std value: 25.077397979916693 - type: nauc_recall_at_3_diff1 value: 10.408327736589843 - type: nauc_recall_at_3_max value: 46.75721628917507 - type: nauc_recall_at_3_std value: 15.625943549268664 - type: nauc_recall_at_5_diff1 value: 7.326752744229548 - type: nauc_recall_at_5_max value: 45.89190518573557 - type: nauc_recall_at_5_std value: 19.01717163438958 - type: ndcg_at_1 value: 14.532 - type: ndcg_at_10 value: 27.025 - type: ndcg_at_100 value: 33.305 - type: ndcg_at_1000 value: 36.38 - type: ndcg_at_20 value: 29.443 - type: ndcg_at_3 value: 22.035 - type: ndcg_at_5 value: 24.319 - type: precision_at_1 value: 14.532 - type: precision_at_10 value: 4.115 - type: precision_at_100 value: 0.717 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 2.536 - type: precision_at_3 value: 9.085 - type: precision_at_5 value: 6.563 - type: recall_at_1 value: 14.532 - type: recall_at_10 value: 41.154 - type: recall_at_100 value: 71.651 - type: recall_at_1000 value: 96.841 - type: recall_at_20 value: 50.71600000000001 - type: recall_at_3 value: 27.254 - type: recall_at_5 value: 32.814 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (es) type: jinaai/mintakaqa config: es split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: main_score value: 26.912000000000003 - type: map_at_1 value: 14.686 - type: map_at_10 value: 22.569 - type: map_at_100 value: 23.679 - type: map_at_1000 value: 23.777 - type: map_at_20 value: 23.169 - type: map_at_3 value: 20.201 - type: map_at_5 value: 21.566 - type: mrr_at_1 value: 14.686468646864686 - type: mrr_at_10 value: 22.569346220336296 - type: mrr_at_100 value: 23.678819125817146 - type: mrr_at_1000 value: 23.77713511338264 - type: mrr_at_20 value: 23.16850858443442 - type: mrr_at_3 value: 20.200770077007665 - type: mrr_at_5 value: 21.56628162816276 - type: nauc_map_at_1000_diff1 value: 14.129007578838381 - type: nauc_map_at_1000_max value: 44.4255501141499 - type: nauc_map_at_1000_std value: 19.95906154868176 - type: nauc_map_at_100_diff1 value: 14.09071870575231 - type: nauc_map_at_100_max value: 44.403179928955566 - type: nauc_map_at_100_std value: 20.00413657519976 - type: nauc_map_at_10_diff1 value: 14.149535953153688 - type: nauc_map_at_10_max value: 44.66529917634685 - type: nauc_map_at_10_std value: 19.580235989479394 - type: nauc_map_at_1_diff1 value: 23.489813522176636 - type: nauc_map_at_1_max value: 46.54578639925787 - type: nauc_map_at_1_std value: 16.39083721709994 - type: nauc_map_at_20_diff1 value: 14.021560420656181 - type: nauc_map_at_20_max value: 44.4825455452467 - type: nauc_map_at_20_std value: 19.886927750826878 - type: nauc_map_at_3_diff1 value: 16.182977890477723 - type: nauc_map_at_3_max value: 46.1840554029258 - type: nauc_map_at_3_std value: 18.735671900228958 - type: nauc_map_at_5_diff1 value: 14.779126395472833 - type: nauc_map_at_5_max value: 45.23237213817556 - type: nauc_map_at_5_std value: 19.348508580412872 - type: nauc_mrr_at_1000_diff1 value: 14.129007578838381 - type: nauc_mrr_at_1000_max value: 44.4255501141499 - type: nauc_mrr_at_1000_std value: 19.95906154868176 - type: nauc_mrr_at_100_diff1 value: 14.09071870575231 - type: nauc_mrr_at_100_max value: 44.403179928955566 - type: nauc_mrr_at_100_std value: 20.00413657519976 - type: nauc_mrr_at_10_diff1 value: 14.149535953153688 - type: nauc_mrr_at_10_max value: 44.66529917634685 - type: nauc_mrr_at_10_std value: 19.580235989479394 - type: nauc_mrr_at_1_diff1 value: 23.489813522176636 - type: nauc_mrr_at_1_max value: 46.54578639925787 - type: nauc_mrr_at_1_std value: 16.39083721709994 - type: nauc_mrr_at_20_diff1 value: 14.021560420656181 - type: nauc_mrr_at_20_max value: 44.4825455452467 - type: nauc_mrr_at_20_std value: 19.886927750826878 - type: nauc_mrr_at_3_diff1 value: 16.182977890477723 - type: nauc_mrr_at_3_max value: 46.1840554029258 - type: nauc_mrr_at_3_std value: 18.735671900228958 - type: nauc_mrr_at_5_diff1 value: 14.779126395472833 - type: nauc_mrr_at_5_max value: 45.23237213817556 - type: nauc_mrr_at_5_std value: 19.348508580412872 - type: nauc_ndcg_at_1000_diff1 value: 11.762470380481101 - type: nauc_ndcg_at_1000_max value: 42.8233203033089 - type: nauc_ndcg_at_1000_std value: 21.78503705117719 - type: nauc_ndcg_at_100_diff1 value: 10.45886076220022 - type: nauc_ndcg_at_100_max value: 41.85472899256818 - type: nauc_ndcg_at_100_std value: 23.20955486335138 - type: nauc_ndcg_at_10_diff1 value: 10.605912468659469 - type: nauc_ndcg_at_10_max value: 43.150942448104715 - type: nauc_ndcg_at_10_std value: 21.120035764826085 - type: nauc_ndcg_at_1_diff1 value: 23.489813522176636 - type: nauc_ndcg_at_1_max value: 46.54578639925787 - type: nauc_ndcg_at_1_std value: 16.39083721709994 - type: nauc_ndcg_at_20_diff1 value: 10.11291783888644 - type: nauc_ndcg_at_20_max value: 42.51260678842788 - type: nauc_ndcg_at_20_std value: 22.1744949382252 - type: nauc_ndcg_at_3_diff1 value: 14.25625326760802 - type: nauc_ndcg_at_3_max value: 45.96162916377383 - type: nauc_ndcg_at_3_std value: 19.557832728215523 - type: nauc_ndcg_at_5_diff1 value: 11.956317653823053 - type: nauc_ndcg_at_5_max value: 44.35971268886807 - type: nauc_ndcg_at_5_std value: 20.581696730374233 - type: nauc_precision_at_1000_diff1 value: 5.132291843566577 - type: nauc_precision_at_1000_max value: 25.293354576835263 - type: nauc_precision_at_1000_std value: 40.36005126087624 - type: nauc_precision_at_100_diff1 value: -1.5252854375008238 - type: nauc_precision_at_100_max value: 31.007586474495984 - type: nauc_precision_at_100_std value: 37.297552993548386 - type: nauc_precision_at_10_diff1 value: 1.9663657370770737 - type: nauc_precision_at_10_max value: 39.194092293625125 - type: nauc_precision_at_10_std value: 24.956542621999542 - type: nauc_precision_at_1_diff1 value: 23.489813522176636 - type: nauc_precision_at_1_max value: 46.54578639925787 - type: nauc_precision_at_1_std value: 16.39083721709994 - type: nauc_precision_at_20_diff1 value: 0.011112090390932373 - type: nauc_precision_at_20_max value: 36.9357074392519 - type: nauc_precision_at_20_std value: 28.611387115093876 - type: nauc_precision_at_3_diff1 value: 9.596831091013703 - type: nauc_precision_at_3_max value: 45.3905541893809 - type: nauc_precision_at_3_std value: 21.599314388526945 - type: nauc_precision_at_5_diff1 value: 5.175887949900142 - type: nauc_precision_at_5_max value: 42.129467510414464 - type: nauc_precision_at_5_std value: 23.607251548776677 - type: nauc_recall_at_1000_diff1 value: 5.132291843566257 - type: nauc_recall_at_1000_max value: 25.29335457683396 - type: nauc_recall_at_1000_std value: 40.36005126087638 - type: nauc_recall_at_100_diff1 value: -1.5252854375008988 - type: nauc_recall_at_100_max value: 31.00758647449594 - type: nauc_recall_at_100_std value: 37.29755299354834 - type: nauc_recall_at_10_diff1 value: 1.9663657370770793 - type: nauc_recall_at_10_max value: 39.19409229362512 - type: nauc_recall_at_10_std value: 24.956542621999546 - type: nauc_recall_at_1_diff1 value: 23.489813522176636 - type: nauc_recall_at_1_max value: 46.54578639925787 - type: nauc_recall_at_1_std value: 16.39083721709994 - type: nauc_recall_at_20_diff1 value: 0.011112090390923075 - type: nauc_recall_at_20_max value: 36.93570743925189 - type: nauc_recall_at_20_std value: 28.611387115093883 - type: nauc_recall_at_3_diff1 value: 9.596831091013714 - type: nauc_recall_at_3_max value: 45.39055418938087 - type: nauc_recall_at_3_std value: 21.599314388526956 - type: nauc_recall_at_5_diff1 value: 5.17588794990012 - type: nauc_recall_at_5_max value: 42.12946751041448 - type: nauc_recall_at_5_std value: 23.607251548776695 - type: ndcg_at_1 value: 14.686 - type: ndcg_at_10 value: 26.912000000000003 - type: ndcg_at_100 value: 32.919 - type: ndcg_at_1000 value: 36.119 - type: ndcg_at_20 value: 29.079 - type: ndcg_at_3 value: 21.995 - type: ndcg_at_5 value: 24.474999999999998 - type: precision_at_1 value: 14.686 - type: precision_at_10 value: 4.08 - type: precision_at_100 value: 0.703 - type: precision_at_1000 value: 0.097 - type: precision_at_20 value: 2.467 - type: precision_at_3 value: 9.062000000000001 - type: precision_at_5 value: 6.65 - type: recall_at_1 value: 14.686 - type: recall_at_10 value: 40.8 - type: recall_at_100 value: 70.338 - type: recall_at_1000 value: 96.82300000000001 - type: recall_at_20 value: 49.34 - type: recall_at_3 value: 27.186 - type: recall_at_5 value: 33.251 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: main_score value: 26.909 - type: map_at_1 value: 14.701 - type: map_at_10 value: 22.613 - type: map_at_100 value: 23.729 - type: map_at_1000 value: 23.837 - type: map_at_20 value: 23.262 - type: map_at_3 value: 20.236 - type: map_at_5 value: 21.673000000000002 - type: mrr_at_1 value: 14.7010647010647 - type: mrr_at_10 value: 22.613165113165113 - type: mrr_at_100 value: 23.72877605989423 - type: mrr_at_1000 value: 23.837150802746805 - type: mrr_at_20 value: 23.261627081110596 - type: mrr_at_3 value: 20.2361452361452 - type: mrr_at_5 value: 21.673491673491625 - type: nauc_map_at_1000_diff1 value: 17.08927788889635 - type: nauc_map_at_1000_max value: 47.240929150603336 - type: nauc_map_at_1000_std value: 20.559244258100275 - type: nauc_map_at_100_diff1 value: 17.029461792796777 - type: nauc_map_at_100_max value: 47.207381115550696 - type: nauc_map_at_100_std value: 20.581498156895265 - type: nauc_map_at_10_diff1 value: 17.351456007804536 - type: nauc_map_at_10_max value: 47.815880040221344 - type: nauc_map_at_10_std value: 20.292999107555794 - type: nauc_map_at_1_diff1 value: 27.297525357600776 - type: nauc_map_at_1_max value: 47.18835074959486 - type: nauc_map_at_1_std value: 18.304203168281834 - type: nauc_map_at_20_diff1 value: 17.157460199542136 - type: nauc_map_at_20_max value: 47.4776610667456 - type: nauc_map_at_20_std value: 20.499186342964478 - type: nauc_map_at_3_diff1 value: 19.393119961356277 - type: nauc_map_at_3_max value: 49.02841822452882 - type: nauc_map_at_3_std value: 19.293122796321292 - type: nauc_map_at_5_diff1 value: 17.76275044752008 - type: nauc_map_at_5_max value: 48.01292548040298 - type: nauc_map_at_5_std value: 19.928449977400504 - type: nauc_mrr_at_1000_diff1 value: 17.08927788889635 - type: nauc_mrr_at_1000_max value: 47.240929150603336 - type: nauc_mrr_at_1000_std value: 20.559244258100275 - type: nauc_mrr_at_100_diff1 value: 17.029461792796777 - type: nauc_mrr_at_100_max value: 47.207381115550696 - type: nauc_mrr_at_100_std value: 20.581498156895265 - type: nauc_mrr_at_10_diff1 value: 17.351456007804536 - type: nauc_mrr_at_10_max value: 47.815880040221344 - type: nauc_mrr_at_10_std value: 20.292999107555794 - type: nauc_mrr_at_1_diff1 value: 27.297525357600776 - type: nauc_mrr_at_1_max value: 47.18835074959486 - type: nauc_mrr_at_1_std value: 18.304203168281834 - type: nauc_mrr_at_20_diff1 value: 17.157460199542136 - type: nauc_mrr_at_20_max value: 47.4776610667456 - type: nauc_mrr_at_20_std value: 20.499186342964478 - type: nauc_mrr_at_3_diff1 value: 19.393119961356277 - type: nauc_mrr_at_3_max value: 49.02841822452882 - type: nauc_mrr_at_3_std value: 19.293122796321292 - type: nauc_mrr_at_5_diff1 value: 17.76275044752008 - type: nauc_mrr_at_5_max value: 48.01292548040298 - type: nauc_mrr_at_5_std value: 19.928449977400504 - type: nauc_ndcg_at_1000_diff1 value: 13.989496006047975 - type: nauc_ndcg_at_1000_max value: 45.626323944336114 - type: nauc_ndcg_at_1000_std value: 22.125600410796515 - type: nauc_ndcg_at_100_diff1 value: 12.302204843705244 - type: nauc_ndcg_at_100_max value: 44.46856314559079 - type: nauc_ndcg_at_100_std value: 23.084984546328677 - type: nauc_ndcg_at_10_diff1 value: 14.001226213368275 - type: nauc_ndcg_at_10_max value: 47.37780636546918 - type: nauc_ndcg_at_10_std value: 21.702709032840637 - type: nauc_ndcg_at_1_diff1 value: 27.297525357600776 - type: nauc_ndcg_at_1_max value: 47.18835074959486 - type: nauc_ndcg_at_1_std value: 18.304203168281834 - type: nauc_ndcg_at_20_diff1 value: 13.317759910171056 - type: nauc_ndcg_at_20_max value: 46.25171251043813 - type: nauc_ndcg_at_20_std value: 22.309331575402595 - type: nauc_ndcg_at_3_diff1 value: 17.555381234893872 - type: nauc_ndcg_at_3_max value: 49.48635590260059 - type: nauc_ndcg_at_3_std value: 19.734570962933674 - type: nauc_ndcg_at_5_diff1 value: 14.844841165765061 - type: nauc_ndcg_at_5_max value: 47.76437065028708 - type: nauc_ndcg_at_5_std value: 20.816034479453954 - type: nauc_precision_at_1000_diff1 value: -15.591898698252546 - type: nauc_precision_at_1000_max value: 20.545984285353892 - type: nauc_precision_at_1000_std value: 38.9013414992826 - type: nauc_precision_at_100_diff1 value: -5.290395978742176 - type: nauc_precision_at_100_max value: 31.340480360546845 - type: nauc_precision_at_100_std value: 33.6897935720505 - type: nauc_precision_at_10_diff1 value: 5.965001997926562 - type: nauc_precision_at_10_max value: 46.12515296162247 - type: nauc_precision_at_10_std value: 25.409433135253558 - type: nauc_precision_at_1_diff1 value: 27.297525357600776 - type: nauc_precision_at_1_max value: 47.18835074959486 - type: nauc_precision_at_1_std value: 18.304203168281834 - type: nauc_precision_at_20_diff1 value: 3.4438127279827744 - type: nauc_precision_at_20_max value: 42.36095587714494 - type: nauc_precision_at_20_std value: 27.367900512797906 - type: nauc_precision_at_3_diff1 value: 13.165017224718916 - type: nauc_precision_at_3_max value: 50.58931825484506 - type: nauc_precision_at_3_std value: 20.852009214609442 - type: nauc_precision_at_5_diff1 value: 7.840087177549876 - type: nauc_precision_at_5_max value: 46.99388755575109 - type: nauc_precision_at_5_std value: 23.048702393099834 - type: nauc_recall_at_1000_diff1 value: -15.591898698252932 - type: nauc_recall_at_1000_max value: 20.5459842853537 - type: nauc_recall_at_1000_std value: 38.901341499282395 - type: nauc_recall_at_100_diff1 value: -5.290395978742165 - type: nauc_recall_at_100_max value: 31.340480360546863 - type: nauc_recall_at_100_std value: 33.68979357205046 - type: nauc_recall_at_10_diff1 value: 5.96500199792656 - type: nauc_recall_at_10_max value: 46.1251529616225 - type: nauc_recall_at_10_std value: 25.409433135253543 - type: nauc_recall_at_1_diff1 value: 27.297525357600776 - type: nauc_recall_at_1_max value: 47.18835074959486 - type: nauc_recall_at_1_std value: 18.304203168281834 - type: nauc_recall_at_20_diff1 value: 3.4438127279827833 - type: nauc_recall_at_20_max value: 42.36095587714498 - type: nauc_recall_at_20_std value: 27.36790051279787 - type: nauc_recall_at_3_diff1 value: 13.165017224718916 - type: nauc_recall_at_3_max value: 50.589318254845054 - type: nauc_recall_at_3_std value: 20.852009214609435 - type: nauc_recall_at_5_diff1 value: 7.840087177549891 - type: nauc_recall_at_5_max value: 46.99388755575112 - type: nauc_recall_at_5_std value: 23.048702393099845 - type: ndcg_at_1 value: 14.701 - type: ndcg_at_10 value: 26.909 - type: ndcg_at_100 value: 32.727000000000004 - type: ndcg_at_1000 value: 36.086 - type: ndcg_at_20 value: 29.236 - type: ndcg_at_3 value: 22.004 - type: ndcg_at_5 value: 24.615000000000002 - type: precision_at_1 value: 14.701 - type: precision_at_10 value: 4.062 - type: precision_at_100 value: 0.688 - type: precision_at_1000 value: 0.096 - type: precision_at_20 value: 2.488 - type: precision_at_3 value: 9.036 - type: precision_at_5 value: 6.699 - type: recall_at_1 value: 14.701 - type: recall_at_10 value: 40.622 - type: recall_at_100 value: 68.796 - type: recall_at_1000 value: 96.314 - type: recall_at_20 value: 49.754 - type: recall_at_3 value: 27.108999999999998 - type: recall_at_5 value: 33.497 - task: type: Classification dataset: name: MTEB MultilingualSentiment (default) type: C-MTEB/MultilingualSentiment-classification config: default split: test revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 73.20999999999998 - type: f1 value: 73.18755986777474 - type: f1_weighted value: 73.18755986777475 - type: main_score value: 73.20999999999998 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 4.822 - type: map_at_10 value: 13.144 - type: map_at_100 value: 17.254 - type: map_at_1000 value: 18.931 - type: map_at_20 value: 14.834 - type: map_at_3 value: 8.975 - type: map_at_5 value: 10.922 - type: mrr_at_1 value: 47.059 - type: mrr_at_10 value: 55.806999999999995 - type: mrr_at_100 value: 56.286 - type: mrr_at_1000 value: 56.327000000000005 - type: mrr_at_20 value: 56.00000000000001 - type: mrr_at_3 value: 54.17999999999999 - type: mrr_at_5 value: 55.155 - type: ndcg_at_1 value: 44.427 - type: ndcg_at_10 value: 36.623 - type: ndcg_at_100 value: 33.664 - type: ndcg_at_1000 value: 42.538 - type: ndcg_at_20 value: 34.066 - type: ndcg_at_3 value: 41.118 - type: ndcg_at_5 value: 39.455 - type: precision_at_1 value: 46.44 - type: precision_at_10 value: 28.607 - type: precision_at_100 value: 9.189 - type: precision_at_1000 value: 2.261 - type: precision_at_20 value: 21.238 - type: precision_at_3 value: 39.628 - type: precision_at_5 value: 35.604 - type: recall_at_1 value: 4.822 - type: recall_at_10 value: 17.488999999999997 - type: recall_at_100 value: 35.052 - type: recall_at_1000 value: 66.67999999999999 - type: recall_at_20 value: 21.343999999999998 - type: recall_at_3 value: 10.259 - type: recall_at_5 value: 13.406 - type: main_score value: 36.623 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 41.411 - type: map_at_10 value: 57.179 - type: map_at_100 value: 57.945 - type: map_at_1000 value: 57.967999999999996 - type: map_at_20 value: 57.687 - type: map_at_3 value: 53.46300000000001 - type: map_at_5 value: 55.696999999999996 - type: mrr_at_1 value: 46.233999999999995 - type: mrr_at_10 value: 59.831999999999994 - type: mrr_at_100 value: 60.33500000000001 - type: mrr_at_1000 value: 60.348 - type: mrr_at_20 value: 60.167 - type: mrr_at_3 value: 56.972 - type: mrr_at_5 value: 58.74 - type: ndcg_at_1 value: 46.205 - type: ndcg_at_10 value: 64.23100000000001 - type: ndcg_at_100 value: 67.242 - type: ndcg_at_1000 value: 67.72500000000001 - type: ndcg_at_20 value: 65.77300000000001 - type: ndcg_at_3 value: 57.516 - type: ndcg_at_5 value: 61.11600000000001 - type: precision_at_1 value: 46.205 - type: precision_at_10 value: 9.873 - type: precision_at_100 value: 1.158 - type: precision_at_1000 value: 0.12 - type: precision_at_20 value: 5.319 - type: precision_at_3 value: 25.424999999999997 - type: precision_at_5 value: 17.375 - type: recall_at_1 value: 41.411 - type: recall_at_10 value: 82.761 - type: recall_at_100 value: 95.52199999999999 - type: recall_at_1000 value: 99.02499999999999 - type: recall_at_20 value: 88.34 - type: recall_at_3 value: 65.73 - type: recall_at_5 value: 73.894 - type: main_score value: 64.23100000000001 - task: type: PairClassification dataset: name: MTEB Ocnli (default) type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cosine_accuracy value: 62.3714131023281 - type: cosine_accuracy_threshold value: 79.70921993255615 - type: cosine_ap value: 66.41380155495659 - type: cosine_f1 value: 68.89547185780786 - type: cosine_f1_threshold value: 72.91591167449951 - type: cosine_precision value: 57.485875706214685 - type: cosine_recall value: 85.95564941921859 - type: dot_accuracy value: 60.47644829453167 - type: dot_accuracy_threshold value: 36627.362060546875 - type: dot_ap value: 63.696303449293204 - type: dot_f1 value: 68.3986041101202 - type: dot_f1_threshold value: 30452.72216796875 - type: dot_precision value: 54.04411764705882 - type: dot_recall value: 93.13621964097149 - type: euclidean_accuracy value: 63.02111532214402 - type: euclidean_accuracy_threshold value: 1392.76762008667 - type: euclidean_ap value: 66.65907089443218 - type: euclidean_f1 value: 69.05036524413688 - type: euclidean_f1_threshold value: 1711.5310668945312 - type: euclidean_precision value: 54.29262394195889 - type: euclidean_recall value: 94.82576557550159 - type: main_score value: 63.02111532214402 - type: manhattan_accuracy value: 62.75040606388739 - type: manhattan_accuracy_threshold value: 32475.347900390625 - type: manhattan_ap value: 66.50943585125434 - type: manhattan_f1 value: 69.08382066276802 - type: manhattan_f1_threshold value: 41238.470458984375 - type: manhattan_precision value: 54.75896168108776 - type: manhattan_recall value: 93.55860612460401 - type: max_accuracy value: 63.02111532214402 - type: max_ap value: 66.65907089443218 - type: max_f1 value: 69.08382066276802 - type: max_precision value: 57.485875706214685 - type: max_recall value: 94.82576557550159 - type: similarity_accuracy value: 62.3714131023281 - type: similarity_accuracy_threshold value: 79.70921993255615 - type: similarity_ap value: 66.41380155495659 - type: similarity_f1 value: 68.89547185780786 - type: similarity_f1_threshold value: 72.91591167449951 - type: similarity_precision value: 57.485875706214685 - type: similarity_recall value: 85.95564941921859 - task: type: Classification dataset: name: MTEB OnlineShopping (default) type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 91.88000000000001 - type: ap value: 89.52463684448476 - type: ap_weighted value: 89.52463684448476 - type: f1 value: 91.86313022306673 - type: f1_weighted value: 91.87806318146912 - type: main_score value: 91.88000000000001 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (en) type: GEM/opusparcus config: en split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 92.65578635014838 - type: cosine_accuracy_threshold value: 74.02530312538147 - type: cosine_ap value: 98.3834226153613 - type: cosine_f1 value: 94.92567913890312 - type: cosine_f1_threshold value: 74.02530312538147 - type: cosine_precision value: 95.562435500516 - type: cosine_recall value: 94.29735234215886 - type: dot_accuracy value: 91.54302670623146 - type: dot_accuracy_threshold value: 34452.29187011719 - type: dot_ap value: 98.1237257754439 - type: dot_f1 value: 94.22400803616273 - type: dot_f1_threshold value: 33670.41931152344 - type: dot_precision value: 92.9633300297324 - type: dot_recall value: 95.5193482688391 - type: euclidean_accuracy value: 92.28486646884274 - type: euclidean_accuracy_threshold value: 1602.8022766113281 - type: euclidean_ap value: 98.3099021504706 - type: euclidean_f1 value: 94.75277497477296 - type: euclidean_f1_threshold value: 1604.7462463378906 - type: euclidean_precision value: 93.89999999999999 - type: euclidean_recall value: 95.62118126272912 - type: main_score value: 98.3834226153613 - type: manhattan_accuracy value: 92.2106824925816 - type: manhattan_accuracy_threshold value: 38872.90954589844 - type: manhattan_ap value: 98.28694101230218 - type: manhattan_f1 value: 94.67815509376584 - type: manhattan_f1_threshold value: 38872.90954589844 - type: manhattan_precision value: 94.24823410696267 - type: manhattan_recall value: 95.11201629327903 - type: max_accuracy value: 92.65578635014838 - type: max_ap value: 98.3834226153613 - type: max_f1 value: 94.92567913890312 - type: max_precision value: 95.562435500516 - type: max_recall value: 95.62118126272912 - type: similarity_accuracy value: 92.65578635014838 - type: similarity_accuracy_threshold value: 74.02530312538147 - type: similarity_ap value: 98.3834226153613 - type: similarity_f1 value: 94.92567913890312 - type: similarity_f1_threshold value: 74.02530312538147 - type: similarity_precision value: 95.562435500516 - type: similarity_recall value: 94.29735234215886 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (de) type: GEM/opusparcus config: de split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 87.72178850248403 - type: cosine_accuracy_threshold value: 73.33863377571106 - type: cosine_ap value: 96.98901408834976 - type: cosine_f1 value: 91.89944134078212 - type: cosine_f1_threshold value: 71.45810127258301 - type: cosine_precision value: 89.64577656675749 - type: cosine_recall value: 94.26934097421203 - type: dot_accuracy value: 86.30234208658624 - type: dot_accuracy_threshold value: 32027.130126953125 - type: dot_ap value: 96.12260574893256 - type: dot_f1 value: 91.31602506714414 - type: dot_f1_threshold value: 30804.376220703125 - type: dot_precision value: 85.93091828138164 - type: dot_recall value: 97.42120343839542 - type: euclidean_accuracy value: 87.9347054648687 - type: euclidean_accuracy_threshold value: 1609.6670150756836 - type: euclidean_ap value: 97.00238860358252 - type: euclidean_f1 value: 92.1089063221043 - type: euclidean_f1_threshold value: 1641.8487548828125 - type: euclidean_precision value: 89.10714285714286 - type: euclidean_recall value: 95.31996179560649 - type: main_score value: 97.00238860358252 - type: manhattan_accuracy value: 87.72178850248403 - type: manhattan_accuracy_threshold value: 40137.060546875 - type: manhattan_ap value: 96.98653728159941 - type: manhattan_f1 value: 92.03865623561896 - type: manhattan_f1_threshold value: 40137.060546875 - type: manhattan_precision value: 88.80994671403198 - type: manhattan_recall value: 95.51098376313276 - type: max_accuracy value: 87.9347054648687 - type: max_ap value: 97.00238860358252 - type: max_f1 value: 92.1089063221043 - type: max_precision value: 89.64577656675749 - type: max_recall value: 97.42120343839542 - type: similarity_accuracy value: 87.72178850248403 - type: similarity_accuracy_threshold value: 73.33863377571106 - type: similarity_ap value: 96.98901408834976 - type: similarity_f1 value: 91.89944134078212 - type: similarity_f1_threshold value: 71.45810127258301 - type: similarity_precision value: 89.64577656675749 - type: similarity_recall value: 94.26934097421203 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 80.92643051771117 - type: cosine_accuracy_threshold value: 76.68856382369995 - type: cosine_ap value: 93.74622381534307 - type: cosine_f1 value: 87.12328767123287 - type: cosine_f1_threshold value: 71.64022922515869 - type: cosine_precision value: 80.64243448858834 - type: cosine_recall value: 94.73684210526315 - type: dot_accuracy value: 80.858310626703 - type: dot_accuracy_threshold value: 34028.3935546875 - type: dot_ap value: 91.18448457633308 - type: dot_f1 value: 86.82606657290202 - type: dot_f1_threshold value: 34028.3935546875 - type: dot_precision value: 82.2380106571936 - type: dot_recall value: 91.9563058589871 - type: euclidean_accuracy value: 80.858310626703 - type: euclidean_accuracy_threshold value: 1595.7651138305664 - type: euclidean_ap value: 93.8182717829648 - type: euclidean_f1 value: 87.04044117647058 - type: euclidean_f1_threshold value: 1609.2475891113281 - type: euclidean_precision value: 81.00940975192472 - type: euclidean_recall value: 94.04170804369414 - type: main_score value: 93.8182717829648 - type: manhattan_accuracy value: 80.99455040871935 - type: manhattan_accuracy_threshold value: 38092.132568359375 - type: manhattan_ap value: 93.77563401151711 - type: manhattan_f1 value: 86.91983122362869 - type: manhattan_f1_threshold value: 38092.132568359375 - type: manhattan_precision value: 82.32682060390763 - type: manhattan_recall value: 92.05561072492551 - type: max_accuracy value: 80.99455040871935 - type: max_ap value: 93.8182717829648 - type: max_f1 value: 87.12328767123287 - type: max_precision value: 82.32682060390763 - type: max_recall value: 94.73684210526315 - type: similarity_accuracy value: 80.92643051771117 - type: similarity_accuracy_threshold value: 76.68856382369995 - type: similarity_ap value: 93.74622381534307 - type: similarity_f1 value: 87.12328767123287 - type: similarity_f1_threshold value: 71.64022922515869 - type: similarity_precision value: 80.64243448858834 - type: similarity_recall value: 94.73684210526315 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (ru) type: GEM/opusparcus config: ru split: test.full revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 76.83823529411765 - type: cosine_accuracy_threshold value: 72.70769476890564 - type: cosine_ap value: 89.56692049908222 - type: cosine_f1 value: 83.99832003359934 - type: cosine_f1_threshold value: 70.9052324295044 - type: cosine_precision value: 76.16146230007617 - type: cosine_recall value: 93.63295880149812 - type: dot_accuracy value: 76.28676470588235 - type: dot_accuracy_threshold value: 33740.68908691406 - type: dot_ap value: 87.77185177141567 - type: dot_f1 value: 83.62251375370292 - type: dot_f1_threshold value: 32726.611328125 - type: dot_precision value: 76.29343629343629 - type: dot_recall value: 92.50936329588015 - type: euclidean_accuracy value: 77.32843137254902 - type: euclidean_accuracy_threshold value: 1566.510009765625 - type: euclidean_ap value: 89.60605626791111 - type: euclidean_f1 value: 84.06546080964686 - type: euclidean_f1_threshold value: 1576.4202117919922 - type: euclidean_precision value: 77.83094098883574 - type: euclidean_recall value: 91.38576779026218 - type: main_score value: 89.60605626791111 - type: manhattan_accuracy value: 76.89950980392157 - type: manhattan_accuracy_threshold value: 38202.215576171875 - type: manhattan_ap value: 89.55766894104868 - type: manhattan_f1 value: 83.80462724935732 - type: manhattan_f1_threshold value: 38934.375 - type: manhattan_precision value: 77.25118483412322 - type: manhattan_recall value: 91.57303370786516 - type: max_accuracy value: 77.32843137254902 - type: max_ap value: 89.60605626791111 - type: max_f1 value: 84.06546080964686 - type: max_precision value: 77.83094098883574 - type: max_recall value: 93.63295880149812 - type: similarity_accuracy value: 76.83823529411765 - type: similarity_accuracy_threshold value: 72.70769476890564 - type: similarity_ap value: 89.56692049908222 - type: similarity_f1 value: 83.99832003359934 - type: similarity_f1_threshold value: 70.9052324295044 - type: similarity_precision value: 76.16146230007617 - type: similarity_recall value: 93.63295880149812 - task: type: Classification dataset: name: MTEB PAC (default) type: laugustyniak/abusive-clauses-pl config: default split: test revision: fc69d1c153a8ccdcf1eef52f4e2a27f88782f543 metrics: - type: accuracy value: 68.39559803069794 - type: ap value: 77.68074206719457 - type: ap_weighted value: 77.68074206719457 - type: f1 value: 66.23485605467732 - type: f1_weighted value: 69.03201442129347 - type: main_score value: 68.39559803069794 - task: type: STS dataset: name: MTEB PAWSX (default) type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cosine_pearson value: 13.161523266433587 - type: cosine_spearman value: 15.557333873773386 - type: euclidean_pearson value: 17.147508431907525 - type: euclidean_spearman value: 15.664112857732146 - type: main_score value: 15.557333873773386 - type: manhattan_pearson value: 17.130875906264386 - type: manhattan_spearman value: 15.624397342229637 - type: pearson value: 13.161523266433587 - type: spearman value: 15.557333873773386 - task: type: PairClassification dataset: name: MTEB PSC (default) type: PL-MTEB/psc-pairclassification config: default split: test revision: d05a294af9e1d3ff2bfb6b714e08a24a6cabc669 metrics: - type: cosine_accuracy value: 97.86641929499072 - type: cosine_accuracy_threshold value: 79.0391206741333 - type: cosine_ap value: 99.19403807771533 - type: cosine_f1 value: 96.45608628659475 - type: cosine_f1_threshold value: 79.0391206741333 - type: cosine_precision value: 97.50778816199377 - type: cosine_recall value: 95.42682926829268 - type: dot_accuracy value: 98.14471243042672 - type: dot_accuracy_threshold value: 29808.1787109375 - type: dot_ap value: 99.331999859971 - type: dot_f1 value: 97.01492537313433 - type: dot_f1_threshold value: 29808.1787109375 - type: dot_precision value: 95.02923976608187 - type: dot_recall value: 99.08536585365853 - type: euclidean_accuracy value: 97.49536178107606 - type: euclidean_accuracy_threshold value: 1276.227855682373 - type: euclidean_ap value: 98.91056467717377 - type: euclidean_f1 value: 95.83975346687212 - type: euclidean_f1_threshold value: 1276.227855682373 - type: euclidean_precision value: 96.88473520249221 - type: euclidean_recall value: 94.8170731707317 - type: main_score value: 99.331999859971 - type: manhattan_accuracy value: 97.49536178107606 - type: manhattan_accuracy_threshold value: 31097.674560546875 - type: manhattan_ap value: 98.95694691792707 - type: manhattan_f1 value: 95.83975346687212 - type: manhattan_f1_threshold value: 31097.674560546875 - type: manhattan_precision value: 96.88473520249221 - type: manhattan_recall value: 94.8170731707317 - type: max_accuracy value: 98.14471243042672 - type: max_ap value: 99.331999859971 - type: max_f1 value: 97.01492537313433 - type: max_precision value: 97.50778816199377 - type: max_recall value: 99.08536585365853 - type: similarity_accuracy value: 97.86641929499072 - type: similarity_accuracy_threshold value: 79.0391206741333 - type: similarity_ap value: 99.19403807771533 - type: similarity_f1 value: 96.45608628659475 - type: similarity_f1_threshold value: 79.0391206741333 - type: similarity_precision value: 97.50778816199377 - type: similarity_recall value: 95.42682926829268 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (en) type: google-research-datasets/paws-x config: en split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 61.8 - type: cosine_accuracy_threshold value: 99.5664119720459 - type: cosine_ap value: 60.679317786040585 - type: cosine_f1 value: 63.17354143441101 - type: cosine_f1_threshold value: 97.22164869308472 - type: cosine_precision value: 47.6457399103139 - type: cosine_recall value: 93.71554575523705 - type: dot_accuracy value: 55.7 - type: dot_accuracy_threshold value: 48353.62548828125 - type: dot_ap value: 48.53805970536875 - type: dot_f1 value: 62.42214532871972 - type: dot_f1_threshold value: 38215.53955078125 - type: dot_precision value: 45.48663640948058 - type: dot_recall value: 99.44873208379272 - type: euclidean_accuracy value: 61.75000000000001 - type: euclidean_accuracy_threshold value: 189.0761137008667 - type: euclidean_ap value: 60.55517418691518 - type: euclidean_f1 value: 63.07977736549165 - type: euclidean_f1_threshold value: 504.3168067932129 - type: euclidean_precision value: 47.53914988814318 - type: euclidean_recall value: 93.71554575523705 - type: main_score value: 60.679317786040585 - type: manhattan_accuracy value: 61.9 - type: manhattan_accuracy_threshold value: 4695.778274536133 - type: manhattan_ap value: 60.48686620413608 - type: manhattan_f1 value: 62.92880855772778 - type: manhattan_f1_threshold value: 12542.36831665039 - type: manhattan_precision value: 47.28381374722838 - type: manhattan_recall value: 94.04630650496141 - type: max_accuracy value: 61.9 - type: max_ap value: 60.679317786040585 - type: max_f1 value: 63.17354143441101 - type: max_precision value: 47.6457399103139 - type: max_recall value: 99.44873208379272 - type: similarity_accuracy value: 61.8 - type: similarity_accuracy_threshold value: 99.5664119720459 - type: similarity_ap value: 60.679317786040585 - type: similarity_f1 value: 63.17354143441101 - type: similarity_f1_threshold value: 97.22164869308472 - type: similarity_precision value: 47.6457399103139 - type: similarity_recall value: 93.71554575523705 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (de) type: google-research-datasets/paws-x config: de split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 60.25 - type: cosine_accuracy_threshold value: 99.54338073730469 - type: cosine_ap value: 56.7863613689054 - type: cosine_f1 value: 62.23499820337766 - type: cosine_f1_threshold value: 89.95014429092407 - type: cosine_precision value: 45.86864406779661 - type: cosine_recall value: 96.75977653631284 - type: dot_accuracy value: 56.8 - type: dot_accuracy_threshold value: 47349.78332519531 - type: dot_ap value: 49.7857806061729 - type: dot_f1 value: 62.31225986727209 - type: dot_f1_threshold value: 30143.206787109375 - type: dot_precision value: 45.32520325203252 - type: dot_recall value: 99.66480446927373 - type: euclidean_accuracy value: 60.3 - type: euclidean_accuracy_threshold value: 219.78106498718262 - type: euclidean_ap value: 56.731544327179606 - type: euclidean_f1 value: 62.19895287958115 - type: euclidean_f1_threshold value: 1792.1623229980469 - type: euclidean_precision value: 45.22842639593909 - type: euclidean_recall value: 99.55307262569832 - type: main_score value: 56.7863613689054 - type: manhattan_accuracy value: 60.150000000000006 - type: manhattan_accuracy_threshold value: 5104.503631591797 - type: manhattan_ap value: 56.70304479768734 - type: manhattan_f1 value: 62.22067039106145 - type: manhattan_f1_threshold value: 42839.471435546875 - type: manhattan_precision value: 45.2513966480447 - type: manhattan_recall value: 99.55307262569832 - type: max_accuracy value: 60.3 - type: max_ap value: 56.7863613689054 - type: max_f1 value: 62.31225986727209 - type: max_precision value: 45.86864406779661 - type: max_recall value: 99.66480446927373 - type: similarity_accuracy value: 60.25 - type: similarity_accuracy_threshold value: 99.54338073730469 - type: similarity_ap value: 56.7863613689054 - type: similarity_f1 value: 62.23499820337766 - type: similarity_f1_threshold value: 89.95014429092407 - type: similarity_precision value: 45.86864406779661 - type: similarity_recall value: 96.75977653631284 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (es) type: google-research-datasets/paws-x config: es split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 59.699999999999996 - type: cosine_accuracy_threshold value: 99.55930709838867 - type: cosine_ap value: 57.31662248806265 - type: cosine_f1 value: 62.444061962134256 - type: cosine_f1_threshold value: 74.75898265838623 - type: cosine_precision value: 45.3953953953954 - type: cosine_recall value: 100.0 - type: dot_accuracy value: 55.900000000000006 - type: dot_accuracy_threshold value: 47512.90283203125 - type: dot_ap value: 49.39339147787568 - type: dot_f1 value: 62.487082328625554 - type: dot_f1_threshold value: 34989.03503417969 - type: dot_precision value: 45.44088176352705 - type: dot_recall value: 100.0 - type: euclidean_accuracy value: 59.599999999999994 - type: euclidean_accuracy_threshold value: 200.82547664642334 - type: euclidean_ap value: 57.19737488445163 - type: euclidean_f1 value: 62.444061962134256 - type: euclidean_f1_threshold value: 1538.8837814331055 - type: euclidean_precision value: 45.3953953953954 - type: euclidean_recall value: 100.0 - type: main_score value: 57.31662248806265 - type: manhattan_accuracy value: 59.550000000000004 - type: manhattan_accuracy_threshold value: 5016.501617431641 - type: manhattan_ap value: 57.089959907945065 - type: manhattan_f1 value: 62.444061962134256 - type: manhattan_f1_threshold value: 37523.53515625 - type: manhattan_precision value: 45.3953953953954 - type: manhattan_recall value: 100.0 - type: max_accuracy value: 59.699999999999996 - type: max_ap value: 57.31662248806265 - type: max_f1 value: 62.487082328625554 - type: max_precision value: 45.44088176352705 - type: max_recall value: 100.0 - type: similarity_accuracy value: 59.699999999999996 - type: similarity_accuracy_threshold value: 99.55930709838867 - type: similarity_ap value: 57.31662248806265 - type: similarity_f1 value: 62.444061962134256 - type: similarity_f1_threshold value: 74.75898265838623 - type: similarity_precision value: 45.3953953953954 - type: similarity_recall value: 100.0 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (fr) type: google-research-datasets/paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 61.150000000000006 - type: cosine_accuracy_threshold value: 99.36153888702393 - type: cosine_ap value: 59.43845317938599 - type: cosine_f1 value: 62.51298026998961 - type: cosine_f1_threshold value: 76.77866220474243 - type: cosine_precision value: 45.468277945619334 - type: cosine_recall value: 100.0 - type: dot_accuracy value: 55.75 - type: dot_accuracy_threshold value: 48931.55212402344 - type: dot_ap value: 50.15949290538757 - type: dot_f1 value: 62.53462603878117 - type: dot_f1_threshold value: 34415.7958984375 - type: dot_precision value: 45.4911838790932 - type: dot_recall value: 100.0 - type: euclidean_accuracy value: 61.050000000000004 - type: euclidean_accuracy_threshold value: 240.8097267150879 - type: euclidean_ap value: 59.367971294226216 - type: euclidean_f1 value: 62.51298026998961 - type: euclidean_f1_threshold value: 1444.132423400879 - type: euclidean_precision value: 45.468277945619334 - type: euclidean_recall value: 100.0 - type: main_score value: 59.43845317938599 - type: manhattan_accuracy value: 60.95 - type: manhattan_accuracy_threshold value: 5701.206207275391 - type: manhattan_ap value: 59.30094096378774 - type: manhattan_f1 value: 62.53462603878117 - type: manhattan_f1_threshold value: 33445.672607421875 - type: manhattan_precision value: 45.4911838790932 - type: manhattan_recall value: 100.0 - type: max_accuracy value: 61.150000000000006 - type: max_ap value: 59.43845317938599 - type: max_f1 value: 62.53462603878117 - type: max_precision value: 45.4911838790932 - type: max_recall value: 100.0 - type: similarity_accuracy value: 61.150000000000006 - type: similarity_accuracy_threshold value: 99.36153888702393 - type: similarity_ap value: 59.43845317938599 - type: similarity_f1 value: 62.51298026998961 - type: similarity_f1_threshold value: 76.77866220474243 - type: similarity_precision value: 45.468277945619334 - type: similarity_recall value: 100.0 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (zh) type: google-research-datasets/paws-x config: zh split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 58.85 - type: cosine_accuracy_threshold value: 99.73838329315186 - type: cosine_ap value: 54.66913160570546 - type: cosine_f1 value: 62.32136632973162 - type: cosine_f1_threshold value: 76.4499306678772 - type: cosine_precision value: 45.265822784810126 - type: cosine_recall value: 100.0 - type: dot_accuracy value: 56.25 - type: dot_accuracy_threshold value: 47351.9287109375 - type: dot_ap value: 48.5266232989438 - type: dot_f1 value: 62.277951933124356 - type: dot_f1_threshold value: 31325.28076171875 - type: dot_precision value: 45.220030349013655 - type: dot_recall value: 100.0 - type: euclidean_accuracy value: 58.9 - type: euclidean_accuracy_threshold value: 144.24468278884888 - type: euclidean_ap value: 54.66981490353506 - type: euclidean_f1 value: 62.32136632973162 - type: euclidean_f1_threshold value: 1484.908676147461 - type: euclidean_precision value: 45.265822784810126 - type: euclidean_recall value: 100.0 - type: main_score value: 54.66981490353506 - type: manhattan_accuracy value: 58.9 - type: manhattan_accuracy_threshold value: 3586.785125732422 - type: manhattan_ap value: 54.668355260247736 - type: manhattan_f1 value: 62.32136632973162 - type: manhattan_f1_threshold value: 36031.22863769531 - type: manhattan_precision value: 45.265822784810126 - type: manhattan_recall value: 100.0 - type: max_accuracy value: 58.9 - type: max_ap value: 54.66981490353506 - type: max_f1 value: 62.32136632973162 - type: max_precision value: 45.265822784810126 - type: max_recall value: 100.0 - type: similarity_accuracy value: 58.85 - type: similarity_accuracy_threshold value: 99.73838329315186 - type: similarity_ap value: 54.66913160570546 - type: similarity_f1 value: 62.32136632973162 - type: similarity_f1_threshold value: 76.4499306678772 - type: similarity_precision value: 45.265822784810126 - type: similarity_recall value: 100.0 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN (default) type: PL-MTEB/polemo2_in config: default split: test revision: d90724373c70959f17d2331ad51fb60c71176b03 metrics: - type: accuracy value: 83.75346260387812 - type: f1 value: 81.98304891214909 - type: f1_weighted value: 84.29623200830078 - type: main_score value: 83.75346260387812 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT (default) type: PL-MTEB/polemo2_out config: default split: test revision: 6a21ab8716e255ab1867265f8b396105e8aa63d4 metrics: - type: accuracy value: 66.53846153846153 - type: f1 value: 52.71826064368638 - type: f1_weighted value: 69.10010124630334 - type: main_score value: 66.53846153846153 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cosine_accuracy value: 81.8 - type: cosine_accuracy_threshold value: 90.47793745994568 - type: cosine_ap value: 91.42490266080884 - type: cosine_f1 value: 85.4632587859425 - type: cosine_f1_threshold value: 90.47793745994568 - type: cosine_precision value: 82.56172839506173 - type: cosine_recall value: 88.57615894039735 - type: dot_accuracy value: 74.6 - type: dot_accuracy_threshold value: 42102.23693847656 - type: dot_ap value: 86.20060009096979 - type: dot_f1 value: 80.02842928216063 - type: dot_f1_threshold value: 38970.16906738281 - type: dot_precision value: 70.1120797011208 - type: dot_recall value: 93.21192052980133 - type: euclidean_accuracy value: 81.5 - type: euclidean_accuracy_threshold value: 880.433464050293 - type: euclidean_ap value: 91.33143477982087 - type: euclidean_f1 value: 85.44600938967135 - type: euclidean_f1_threshold value: 964.0384674072266 - type: euclidean_precision value: 81.00890207715133 - type: euclidean_recall value: 90.39735099337747 - type: main_score value: 91.42490266080884 - type: manhattan_accuracy value: 81.3 - type: manhattan_accuracy_threshold value: 22100.830078125 - type: manhattan_ap value: 91.25996158651282 - type: manhattan_f1 value: 85.38102643856921 - type: manhattan_f1_threshold value: 24043.515014648438 - type: manhattan_precision value: 80.49853372434018 - type: manhattan_recall value: 90.89403973509934 - type: max_accuracy value: 81.8 - type: max_ap value: 91.42490266080884 - type: max_f1 value: 85.4632587859425 - type: max_precision value: 82.56172839506173 - type: max_recall value: 93.21192052980133 - type: similarity_accuracy value: 81.8 - type: similarity_accuracy_threshold value: 90.47793745994568 - type: similarity_ap value: 91.42490266080884 - type: similarity_f1 value: 85.4632587859425 - type: similarity_f1_threshold value: 90.47793745994568 - type: similarity_precision value: 82.56172839506173 - type: similarity_recall value: 88.57615894039735 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 71.419 - type: map_at_10 value: 85.542 - type: map_at_100 value: 86.161 - type: map_at_1000 value: 86.175 - type: map_at_20 value: 85.949 - type: map_at_3 value: 82.623 - type: map_at_5 value: 84.5 - type: mrr_at_1 value: 82.27 - type: mrr_at_10 value: 88.21900000000001 - type: mrr_at_100 value: 88.313 - type: mrr_at_1000 value: 88.31400000000001 - type: mrr_at_20 value: 88.286 - type: mrr_at_3 value: 87.325 - type: mrr_at_5 value: 87.97500000000001 - type: ndcg_at_1 value: 82.3 - type: ndcg_at_10 value: 89.088 - type: ndcg_at_100 value: 90.217 - type: ndcg_at_1000 value: 90.29700000000001 - type: ndcg_at_20 value: 89.697 - type: ndcg_at_3 value: 86.435 - type: ndcg_at_5 value: 87.966 - type: precision_at_1 value: 82.3 - type: precision_at_10 value: 13.527000000000001 - type: precision_at_100 value: 1.537 - type: precision_at_1000 value: 0.157 - type: precision_at_20 value: 7.165000000000001 - type: precision_at_3 value: 37.92 - type: precision_at_5 value: 24.914 - type: recall_at_1 value: 71.419 - type: recall_at_10 value: 95.831 - type: recall_at_100 value: 99.64 - type: recall_at_1000 value: 99.988 - type: recall_at_20 value: 97.76599999999999 - type: recall_at_3 value: 88.081 - type: recall_at_5 value: 92.50500000000001 - type: main_score value: 89.088 - task: type: STS dataset: name: MTEB RUParaPhraserSTS (default) type: merionum/ru_paraphraser config: default split: test revision: 43265056790b8f7c59e0139acb4be0a8dad2c8f4 metrics: - type: cosine_pearson value: 67.91177744712421 - type: cosine_spearman value: 76.77113726753656 - type: euclidean_pearson value: 73.81454206068638 - type: euclidean_spearman value: 76.92529493599028 - type: main_score value: 76.77113726753656 - type: manhattan_pearson value: 73.81690454439168 - type: manhattan_spearman value: 76.87333776705002 - type: pearson value: 67.91177744712421 - type: spearman value: 76.77113726753656 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 55.39924225216962 - type: v_measure value: 55.39924225216962 - type: v_measure_std value: 4.723802279292467 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 62.87465161304012 - type: v_measure value: 62.87465161304012 - type: v_measure_std value: 12.082670914488473 - task: type: Retrieval dataset: name: MTEB RiaNewsRetrieval (default) type: ai-forever/ria-news-retrieval config: default split: test revision: 82374b0bbacda6114f39ff9c5b925fa1512ca5d7 metrics: - type: main_score value: 79.209 - type: map_at_1 value: 67.33 - type: map_at_10 value: 75.633 - type: map_at_100 value: 75.897 - type: map_at_1000 value: 75.907 - type: map_at_20 value: 75.804 - type: map_at_3 value: 74.2 - type: map_at_5 value: 75.13300000000001 - type: mrr_at_1 value: 67.31 - type: mrr_at_10 value: 75.62709126984095 - type: mrr_at_100 value: 75.89105697041113 - type: mrr_at_1000 value: 75.90115653883124 - type: mrr_at_20 value: 75.79802332308172 - type: mrr_at_3 value: 74.19499999999961 - type: mrr_at_5 value: 75.12849999999939 - type: nauc_map_at_1000_diff1 value: 74.30304869630591 - type: nauc_map_at_1000_max value: 36.477146725784046 - type: nauc_map_at_1000_std value: -20.862772498461723 - type: nauc_map_at_100_diff1 value: 74.29833058090355 - type: nauc_map_at_100_max value: 36.483678619667884 - type: nauc_map_at_100_std value: -20.856274849980135 - type: nauc_map_at_10_diff1 value: 74.20729220697967 - type: nauc_map_at_10_max value: 36.56543146170092 - type: nauc_map_at_10_std value: -20.991081015484728 - type: nauc_map_at_1_diff1 value: 77.38899022125185 - type: nauc_map_at_1_max value: 32.45918619669731 - type: nauc_map_at_1_std value: -22.149586336167324 - type: nauc_map_at_20_diff1 value: 74.2447573558587 - type: nauc_map_at_20_max value: 36.50383130240387 - type: nauc_map_at_20_std value: -20.87013743041831 - type: nauc_map_at_3_diff1 value: 74.3054577294586 - type: nauc_map_at_3_max value: 36.484530586652724 - type: nauc_map_at_3_std value: -21.90543024607988 - type: nauc_map_at_5_diff1 value: 74.21062368961503 - type: nauc_map_at_5_max value: 36.55670532498779 - type: nauc_map_at_5_std value: -21.488786900676942 - type: nauc_mrr_at_1000_diff1 value: 74.31619177956684 - type: nauc_mrr_at_1000_max value: 36.53498918453189 - type: nauc_mrr_at_1000_std value: -20.75986704931237 - type: nauc_mrr_at_100_diff1 value: 74.31146790382356 - type: nauc_mrr_at_100_max value: 36.54149252857106 - type: nauc_mrr_at_100_std value: -20.75341959250079 - type: nauc_mrr_at_10_diff1 value: 74.22027806145095 - type: nauc_mrr_at_10_max value: 36.622542969971725 - type: nauc_mrr_at_10_std value: -20.889417384064117 - type: nauc_mrr_at_1_diff1 value: 77.4306709551449 - type: nauc_mrr_at_1_max value: 32.57259463438259 - type: nauc_mrr_at_1_std value: -21.964402859613937 - type: nauc_mrr_at_20_diff1 value: 74.25784396230718 - type: nauc_mrr_at_20_max value: 36.561412224507336 - type: nauc_mrr_at_20_std value: -20.767665000065723 - type: nauc_mrr_at_3_diff1 value: 74.31423253547214 - type: nauc_mrr_at_3_max value: 36.537745749488906 - type: nauc_mrr_at_3_std value: -21.81259529019546 - type: nauc_mrr_at_5_diff1 value: 74.22404613312771 - type: nauc_mrr_at_5_max value: 36.60743768455219 - type: nauc_mrr_at_5_std value: -21.39479216331971 - type: nauc_ndcg_at_1000_diff1 value: 73.48182819705742 - type: nauc_ndcg_at_1000_max value: 37.86991608461793 - type: nauc_ndcg_at_1000_std value: -19.021499322688904 - type: nauc_ndcg_at_100_diff1 value: 73.34941250585759 - type: nauc_ndcg_at_100_max value: 38.11150275625829 - type: nauc_ndcg_at_100_std value: -18.70624087206104 - type: nauc_ndcg_at_10_diff1 value: 72.82520265115987 - type: nauc_ndcg_at_10_max value: 38.43323357650525 - type: nauc_ndcg_at_10_std value: -19.410953792830878 - type: nauc_ndcg_at_1_diff1 value: 77.38899022125185 - type: nauc_ndcg_at_1_max value: 32.45918619669731 - type: nauc_ndcg_at_1_std value: -22.149586336167324 - type: nauc_ndcg_at_20_diff1 value: 72.93309285256507 - type: nauc_ndcg_at_20_max value: 38.217372819067755 - type: nauc_ndcg_at_20_std value: -18.864113576359333 - type: nauc_ndcg_at_3_diff1 value: 73.18253776744112 - type: nauc_ndcg_at_3_max value: 38.008109328364 - type: nauc_ndcg_at_3_std value: -21.68785687594153 - type: nauc_ndcg_at_5_diff1 value: 72.90474739784793 - type: nauc_ndcg_at_5_max value: 38.29483039202184 - type: nauc_ndcg_at_5_std value: -20.833049811453474 - type: nauc_precision_at_1000_diff1 value: 59.306217613750334 - type: nauc_precision_at_1000_max value: 72.20747948302262 - type: nauc_precision_at_1000_std value: 45.58837180096227 - type: nauc_precision_at_100_diff1 value: 62.87286844562389 - type: nauc_precision_at_100_max value: 61.33108214045868 - type: nauc_precision_at_100_std value: 20.67481963545654 - type: nauc_precision_at_10_diff1 value: 64.11222984256685 - type: nauc_precision_at_10_max value: 50.323697746037496 - type: nauc_precision_at_10_std value: -7.9994544634332625 - type: nauc_precision_at_1_diff1 value: 77.38899022125185 - type: nauc_precision_at_1_max value: 32.45918619669731 - type: nauc_precision_at_1_std value: -22.149586336167324 - type: nauc_precision_at_20_diff1 value: 62.30228127286973 - type: nauc_precision_at_20_max value: 52.02090746208407 - type: nauc_precision_at_20_std value: 0.7629898806370331 - type: nauc_precision_at_3_diff1 value: 68.82856645994157 - type: nauc_precision_at_3_max value: 43.94171571306625 - type: nauc_precision_at_3_std value: -20.78595255410148 - type: nauc_precision_at_5_diff1 value: 66.62157622497887 - type: nauc_precision_at_5_max value: 46.69398173603811 - type: nauc_precision_at_5_std value: -17.412423571163057 - type: nauc_recall_at_1000_diff1 value: 59.30621761375148 - type: nauc_recall_at_1000_max value: 72.20747948302191 - type: nauc_recall_at_1000_std value: 45.588371800962655 - type: nauc_recall_at_100_diff1 value: 62.872868445623894 - type: nauc_recall_at_100_max value: 61.33108214045813 - type: nauc_recall_at_100_std value: 20.67481963545666 - type: nauc_recall_at_10_diff1 value: 64.11222984256698 - type: nauc_recall_at_10_max value: 50.32369774603755 - type: nauc_recall_at_10_std value: -7.999454463433321 - type: nauc_recall_at_1_diff1 value: 77.38899022125185 - type: nauc_recall_at_1_max value: 32.45918619669731 - type: nauc_recall_at_1_std value: -22.149586336167324 - type: nauc_recall_at_20_diff1 value: 62.3022812728695 - type: nauc_recall_at_20_max value: 52.02090746208397 - type: nauc_recall_at_20_std value: 0.7629898806369458 - type: nauc_recall_at_3_diff1 value: 68.82856645994157 - type: nauc_recall_at_3_max value: 43.94171571306612 - type: nauc_recall_at_3_std value: -20.78595255410157 - type: nauc_recall_at_5_diff1 value: 66.62157622497897 - type: nauc_recall_at_5_max value: 46.693981736038246 - type: nauc_recall_at_5_std value: -17.412423571162954 - type: ndcg_at_1 value: 67.33 - type: ndcg_at_10 value: 79.209 - type: ndcg_at_100 value: 80.463 - type: ndcg_at_1000 value: 80.74799999999999 - type: ndcg_at_20 value: 79.81899999999999 - type: ndcg_at_3 value: 76.335 - type: ndcg_at_5 value: 78.011 - type: precision_at_1 value: 67.33 - type: precision_at_10 value: 9.020999999999999 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.098 - type: precision_at_20 value: 4.63 - type: precision_at_3 value: 27.493000000000002 - type: precision_at_5 value: 17.308 - type: recall_at_1 value: 67.33 - type: recall_at_10 value: 90.21000000000001 - type: recall_at_100 value: 96.00999999999999 - type: recall_at_1000 value: 98.29 - type: recall_at_20 value: 92.60000000000001 - type: recall_at_3 value: 82.48 - type: recall_at_5 value: 86.53999999999999 - task: type: Reranking dataset: name: MTEB RuBQReranking (default) type: ai-forever/rubq-reranking config: default split: test revision: 2e96b8f098fa4b0950fc58eacadeb31c0d0c7fa2 metrics: - type: main_score value: 65.57453932493252 - type: map value: 65.57453932493252 - type: mrr value: 70.51408205663526 - type: nAUC_map_diff1 value: 26.69583260609023 - type: nAUC_map_max value: 12.928262749610663 - type: nAUC_map_std value: 11.702468857903128 - type: nAUC_mrr_diff1 value: 28.5206955462174 - type: nAUC_mrr_max value: 14.207162454694227 - type: nAUC_mrr_std value: 10.725721001555296 - task: type: Retrieval dataset: name: MTEB RuBQRetrieval (default) type: ai-forever/rubq-retrieval config: default split: test revision: e19b6ffa60b3bc248e0b41f4cc37c26a55c2a67b metrics: - type: main_score value: 72.306 - type: map_at_1 value: 44.187 - type: map_at_10 value: 64.836 - type: map_at_100 value: 65.771 - type: map_at_1000 value: 65.8 - type: map_at_20 value: 65.497 - type: map_at_3 value: 59.692 - type: map_at_5 value: 63.105 - type: mrr_at_1 value: 62.23404255319149 - type: mrr_at_10 value: 73.40810161732159 - type: mrr_at_100 value: 73.67949305473395 - type: mrr_at_1000 value: 73.68707852294746 - type: mrr_at_20 value: 73.60429051697479 - type: mrr_at_3 value: 71.47360126083535 - type: mrr_at_5 value: 72.8447596532704 - type: nauc_map_at_1000_diff1 value: 39.838449035736886 - type: nauc_map_at_1000_max value: 32.29962306877408 - type: nauc_map_at_1000_std value: -6.324859592714388 - type: nauc_map_at_100_diff1 value: 39.824361938745426 - type: nauc_map_at_100_max value: 32.32055222704763 - type: nauc_map_at_100_std value: -6.301641111869559 - type: nauc_map_at_10_diff1 value: 39.50155328718487 - type: nauc_map_at_10_max value: 31.745730244960672 - type: nauc_map_at_10_std value: -6.867215137329693 - type: nauc_map_at_1_diff1 value: 47.66181128677822 - type: nauc_map_at_1_max value: 21.75204233166764 - type: nauc_map_at_1_std value: -8.06951079061697 - type: nauc_map_at_20_diff1 value: 39.78364637902108 - type: nauc_map_at_20_max value: 32.39065528029405 - type: nauc_map_at_20_std value: -6.368994332729006 - type: nauc_map_at_3_diff1 value: 39.51829474433183 - type: nauc_map_at_3_max value: 28.633292697821673 - type: nauc_map_at_3_std value: -7.2561170814963925 - type: nauc_map_at_5_diff1 value: 39.288433237676266 - type: nauc_map_at_5_max value: 31.007702201615515 - type: nauc_map_at_5_std value: -7.235131195162474 - type: nauc_mrr_at_1000_diff1 value: 49.599102391215226 - type: nauc_mrr_at_1000_max value: 38.25521825911133 - type: nauc_mrr_at_1000_std value: -10.448180939809435 - type: nauc_mrr_at_100_diff1 value: 49.5957067716212 - type: nauc_mrr_at_100_max value: 38.26760703964535 - type: nauc_mrr_at_100_std value: -10.438443051971081 - type: nauc_mrr_at_10_diff1 value: 49.35269710190271 - type: nauc_mrr_at_10_max value: 38.43782589127069 - type: nauc_mrr_at_10_std value: -10.404402063509815 - type: nauc_mrr_at_1_diff1 value: 53.32206103688421 - type: nauc_mrr_at_1_max value: 33.52402390241035 - type: nauc_mrr_at_1_std value: -12.73473393949936 - type: nauc_mrr_at_20_diff1 value: 49.550630850826636 - type: nauc_mrr_at_20_max value: 38.35964703941151 - type: nauc_mrr_at_20_std value: -10.444577766284766 - type: nauc_mrr_at_3_diff1 value: 49.12029127633829 - type: nauc_mrr_at_3_max value: 38.01631275124067 - type: nauc_mrr_at_3_std value: -10.523724301481309 - type: nauc_mrr_at_5_diff1 value: 49.04606949432458 - type: nauc_mrr_at_5_max value: 38.33647550077891 - type: nauc_mrr_at_5_std value: -10.47076409263114 - type: nauc_ndcg_at_1000_diff1 value: 41.342785916264226 - type: nauc_ndcg_at_1000_max value: 35.75731064862711 - type: nauc_ndcg_at_1000_std value: -5.45573422899229 - type: nauc_ndcg_at_100_diff1 value: 40.972974559636086 - type: nauc_ndcg_at_100_max value: 36.32938573321036 - type: nauc_ndcg_at_100_std value: -4.749631537590004 - type: nauc_ndcg_at_10_diff1 value: 39.67813474464166 - type: nauc_ndcg_at_10_max value: 35.480200504848966 - type: nauc_ndcg_at_10_std value: -6.318561293935512 - type: nauc_ndcg_at_1_diff1 value: 53.45970160222764 - type: nauc_ndcg_at_1_max value: 33.14759013278075 - type: nauc_ndcg_at_1_std value: -12.579833891774847 - type: nauc_ndcg_at_20_diff1 value: 40.67492861219249 - type: nauc_ndcg_at_20_max value: 36.84960799838019 - type: nauc_ndcg_at_20_std value: -5.202530835850179 - type: nauc_ndcg_at_3_diff1 value: 39.574906207408844 - type: nauc_ndcg_at_3_max value: 31.76512164509258 - type: nauc_ndcg_at_3_std value: -7.656143208565999 - type: nauc_ndcg_at_5_diff1 value: 39.096348529742095 - type: nauc_ndcg_at_5_max value: 34.075926475544165 - type: nauc_ndcg_at_5_std value: -7.238045445366631 - type: nauc_precision_at_1000_diff1 value: -14.283799754212609 - type: nauc_precision_at_1000_max value: 6.449741756717101 - type: nauc_precision_at_1000_std value: 4.862828679759048 - type: nauc_precision_at_100_diff1 value: -13.23173132700258 - type: nauc_precision_at_100_max value: 11.058898534529195 - type: nauc_precision_at_100_std value: 7.343683941814956 - type: nauc_precision_at_10_diff1 value: -7.202951643546464 - type: nauc_precision_at_10_max value: 17.499446869433278 - type: nauc_precision_at_10_std value: 2.8367985220406307 - type: nauc_precision_at_1_diff1 value: 53.45970160222764 - type: nauc_precision_at_1_max value: 33.14759013278075 - type: nauc_precision_at_1_std value: -12.579833891774847 - type: nauc_precision_at_20_diff1 value: -9.477122699154124 - type: nauc_precision_at_20_max value: 16.80556031564312 - type: nauc_precision_at_20_std value: 6.420218284416923 - type: nauc_precision_at_3_diff1 value: 5.5276143574150245 - type: nauc_precision_at_3_max value: 23.65952688481666 - type: nauc_precision_at_3_std value: -1.8730348729295785 - type: nauc_precision_at_5_diff1 value: -2.4537029093721308 - type: nauc_precision_at_5_max value: 21.41469327545133 - type: nauc_precision_at_5_std value: 0.1543890645722277 - type: nauc_recall_at_1000_diff1 value: -1.7474947956413491 - type: nauc_recall_at_1000_max value: 46.22670991970479 - type: nauc_recall_at_1000_std value: 62.582840705588794 - type: nauc_recall_at_100_diff1 value: 16.116089801097345 - type: nauc_recall_at_100_max value: 52.54794580975103 - type: nauc_recall_at_100_std value: 33.720245696003246 - type: nauc_recall_at_10_diff1 value: 23.134924318655482 - type: nauc_recall_at_10_max value: 38.73754275649077 - type: nauc_recall_at_10_std value: 0.6137471711639239 - type: nauc_recall_at_1_diff1 value: 47.66181128677822 - type: nauc_recall_at_1_max value: 21.75204233166764 - type: nauc_recall_at_1_std value: -8.06951079061697 - type: nauc_recall_at_20_diff1 value: 24.130616271355017 - type: nauc_recall_at_20_max value: 48.306178640146136 - type: nauc_recall_at_20_std value: 9.290819557000022 - type: nauc_recall_at_3_diff1 value: 29.767415016250226 - type: nauc_recall_at_3_max value: 28.54289782140701 - type: nauc_recall_at_3_std value: -5.1395675072005576 - type: nauc_recall_at_5_diff1 value: 25.410613126870174 - type: nauc_recall_at_5_max value: 33.24658754857624 - type: nauc_recall_at_5_std value: -4.211226036746632 - type: ndcg_at_1 value: 62.175000000000004 - type: ndcg_at_10 value: 72.306 - type: ndcg_at_100 value: 75.074 - type: ndcg_at_1000 value: 75.581 - type: ndcg_at_20 value: 73.875 - type: ndcg_at_3 value: 65.641 - type: ndcg_at_5 value: 69.48299999999999 - type: precision_at_1 value: 62.175000000000004 - type: precision_at_10 value: 13.907 - type: precision_at_100 value: 1.591 - type: precision_at_1000 value: 0.166 - type: precision_at_20 value: 7.446999999999999 - type: precision_at_3 value: 35.619 - type: precision_at_5 value: 24.917 - type: recall_at_1 value: 44.187 - type: recall_at_10 value: 85.10600000000001 - type: recall_at_100 value: 95.488 - type: recall_at_1000 value: 98.831 - type: recall_at_20 value: 90.22200000000001 - type: recall_at_3 value: 68.789 - type: recall_at_5 value: 77.85499999999999 - task: type: Classification dataset: name: MTEB RuReviewsClassification (default) type: ai-forever/ru-reviews-classification config: default split: test revision: f6d2c31f4dc6b88f468552750bfec05b4b41b05a metrics: - type: accuracy value: 67.5830078125 - type: f1 value: 67.56931936632446 - type: f1_weighted value: 67.57137733752779 - type: main_score value: 67.5830078125 - task: type: STS dataset: name: MTEB RuSTSBenchmarkSTS (default) type: ai-forever/ru-stsbenchmark-sts config: default split: test revision: 7cf24f325c6da6195df55bef3d86b5e0616f3018 metrics: - type: cosine_pearson value: 85.90493484626788 - type: cosine_spearman value: 86.21965691667411 - type: euclidean_pearson value: 86.07499842984909 - type: euclidean_spearman value: 86.55506818735688 - type: main_score value: 86.21965691667411 - type: manhattan_pearson value: 85.95976420231729 - type: manhattan_spearman value: 86.48604243661234 - type: pearson value: 85.90493484626788 - type: spearman value: 86.21965691667411 - task: type: Classification dataset: name: MTEB RuSciBenchGRNTIClassification (default) type: ai-forever/ru-scibench-grnti-classification config: default split: test revision: 673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1 metrics: - type: accuracy value: 59.1943359375 - type: f1 value: 58.894480861440414 - type: f1_weighted value: 58.903615560240866 - type: main_score value: 59.1943359375 - task: type: Clustering dataset: name: MTEB RuSciBenchGRNTIClusteringP2P (default) type: ai-forever/ru-scibench-grnti-classification config: default split: test revision: 673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1 metrics: - type: main_score value: 57.99209448663228 - type: v_measure value: 57.99209448663228 - type: v_measure_std value: 1.0381163861993816 - task: type: Classification dataset: name: MTEB RuSciBenchOECDClassification (default) type: ai-forever/ru-scibench-oecd-classification config: default split: test revision: 26c88e99dcaba32bb45d0e1bfc21902337f6d471 metrics: - type: accuracy value: 45.556640625 - type: f1 value: 45.159163104085906 - type: f1_weighted value: 45.16098316398626 - type: main_score value: 45.556640625 - task: type: Clustering dataset: name: MTEB RuSciBenchOECDClusteringP2P (default) type: ai-forever/ru-scibench-oecd-classification config: default split: test revision: 26c88e99dcaba32bb45d0e1bfc21902337f6d471 metrics: - type: main_score value: 50.787548070488974 - type: v_measure value: 50.787548070488974 - type: v_measure_std value: 0.8569958168946827 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 4.843 - type: map_at_10 value: 11.752 - type: map_at_100 value: 13.919 - type: map_at_1000 value: 14.198 - type: map_at_20 value: 12.898000000000001 - type: map_at_3 value: 8.603 - type: map_at_5 value: 10.069 - type: mrr_at_1 value: 23.799999999999997 - type: mrr_at_10 value: 34.449999999999996 - type: mrr_at_100 value: 35.64 - type: mrr_at_1000 value: 35.691 - type: mrr_at_20 value: 35.213 - type: mrr_at_3 value: 31.383 - type: mrr_at_5 value: 33.062999999999995 - type: ndcg_at_1 value: 23.799999999999997 - type: ndcg_at_10 value: 19.811 - type: ndcg_at_100 value: 28.108 - type: ndcg_at_1000 value: 33.1 - type: ndcg_at_20 value: 22.980999999999998 - type: ndcg_at_3 value: 19.153000000000002 - type: ndcg_at_5 value: 16.408 - type: precision_at_1 value: 23.799999999999997 - type: precision_at_10 value: 10.16 - type: precision_at_100 value: 2.1999999999999997 - type: precision_at_1000 value: 0.34099999999999997 - type: precision_at_20 value: 6.915 - type: precision_at_3 value: 17.8 - type: precision_at_5 value: 14.14 - type: recall_at_1 value: 4.843 - type: recall_at_10 value: 20.595 - type: recall_at_100 value: 44.66 - type: recall_at_1000 value: 69.152 - type: recall_at_20 value: 28.04 - type: recall_at_3 value: 10.833 - type: recall_at_5 value: 14.346999999999998 - type: main_score value: 19.811 - task: type: PairClassification dataset: name: MTEB SICK-E-PL (default) type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: 71bba34b0ece6c56dfcf46d9758a27f7a90f17e9 metrics: - type: cosine_accuracy value: 80.90093762739502 - type: cosine_accuracy_threshold value: 94.40930485725403 - type: cosine_ap value: 71.15400909912427 - type: cosine_f1 value: 66.8213457076566 - type: cosine_f1_threshold value: 91.53673648834229 - type: cosine_precision value: 62.4922504649721 - type: cosine_recall value: 71.7948717948718 - type: dot_accuracy value: 78.41418671015083 - type: dot_accuracy_threshold value: 42924.45068359375 - type: dot_ap value: 63.34003025365763 - type: dot_f1 value: 62.518258837277244 - type: dot_f1_threshold value: 40900.738525390625 - type: dot_precision value: 52.99653293709758 - type: dot_recall value: 76.21082621082621 - type: euclidean_accuracy value: 80.67672238075826 - type: euclidean_accuracy_threshold value: 696.0524559020996 - type: euclidean_ap value: 70.88762835990224 - type: euclidean_f1 value: 66.711051930759 - type: euclidean_f1_threshold value: 878.5581588745117 - type: euclidean_precision value: 62.625 - type: euclidean_recall value: 71.36752136752136 - type: main_score value: 71.15400909912427 - type: manhattan_accuracy value: 80.65633917651854 - type: manhattan_accuracy_threshold value: 17277.72674560547 - type: manhattan_ap value: 70.67105336611716 - type: manhattan_f1 value: 66.51346027577151 - type: manhattan_f1_threshold value: 21687.957763671875 - type: manhattan_precision value: 61.69305724725944 - type: manhattan_recall value: 72.15099715099716 - type: max_accuracy value: 80.90093762739502 - type: max_ap value: 71.15400909912427 - type: max_f1 value: 66.8213457076566 - type: max_precision value: 62.625 - type: max_recall value: 76.21082621082621 - type: similarity_accuracy value: 80.90093762739502 - type: similarity_accuracy_threshold value: 94.40930485725403 - type: similarity_ap value: 71.15400909912427 - type: similarity_f1 value: 66.8213457076566 - type: similarity_f1_threshold value: 91.53673648834229 - type: similarity_precision value: 62.4922504649721 - type: similarity_recall value: 71.7948717948718 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 92.3339946866199 - type: cosine_spearman value: 89.61697355115497 - type: euclidean_pearson value: 90.3264916449669 - type: euclidean_spearman value: 89.36270451308866 - type: main_score value: 89.61697355115497 - type: manhattan_pearson value: 90.18909339052534 - type: manhattan_spearman value: 89.28337093097377 - type: pearson value: 92.3339946866199 - type: spearman value: 89.61697355115497 - task: type: STS dataset: name: MTEB SICK-R-PL (default) type: PL-MTEB/sickr-pl-sts config: default split: test revision: fd5c2441b7eeff8676768036142af4cfa42c1339 metrics: - type: cosine_pearson value: 85.27883048457821 - type: cosine_spearman value: 80.53204892678619 - type: euclidean_pearson value: 82.78520705216168 - type: euclidean_spearman value: 80.27848359873212 - type: main_score value: 80.53204892678619 - type: manhattan_pearson value: 82.63270640583454 - type: manhattan_spearman value: 80.21507977473146 - type: pearson value: 85.27883048457821 - type: spearman value: 80.53204892678619 - task: type: STS dataset: name: MTEB SICKFr (default) type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cosine_pearson value: 88.77029361817212 - type: cosine_spearman value: 83.9453600346894 - type: euclidean_pearson value: 85.85331086208573 - type: euclidean_spearman value: 83.70852031985308 - type: main_score value: 83.9453600346894 - type: manhattan_pearson value: 85.66222265885914 - type: manhattan_spearman value: 83.60833111525962 - type: pearson value: 88.77029361817212 - type: spearman value: 83.9453600346894 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 88.76435859522375 - type: cosine_spearman value: 82.43768167804375 - type: euclidean_pearson value: 87.43566183874832 - type: euclidean_spearman value: 82.82166873757507 - type: main_score value: 82.43768167804375 - type: manhattan_pearson value: 87.39450871380951 - type: manhattan_spearman value: 82.89253043430163 - type: pearson value: 88.76435859522375 - type: spearman value: 82.43768167804375 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 88.86627241652141 - type: cosine_spearman value: 89.49011599120688 - type: euclidean_pearson value: 89.3314120073772 - type: euclidean_spearman value: 89.8226502776963 - type: main_score value: 89.49011599120688 - type: manhattan_pearson value: 89.2252179076963 - type: manhattan_spearman value: 89.74573844021225 - type: pearson value: 88.86627241652141 - type: spearman value: 89.49011599120688 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 87.22891405215968 - type: cosine_spearman value: 84.9467188157614 - type: euclidean_pearson value: 87.20330004726237 - type: euclidean_spearman value: 85.34806059461808 - type: main_score value: 84.9467188157614 - type: manhattan_pearson value: 87.15224666107623 - type: manhattan_spearman value: 85.34596898699708 - type: pearson value: 87.22891405215968 - type: spearman value: 84.9467188157614 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 88.14066430111033 - type: cosine_spearman value: 89.31337445552545 - type: euclidean_pearson value: 89.08039335366983 - type: euclidean_spearman value: 89.6658762856415 - type: main_score value: 89.31337445552545 - type: manhattan_pearson value: 89.08057438154486 - type: manhattan_spearman value: 89.68673984203022 - type: pearson value: 88.14066430111033 - type: spearman value: 89.31337445552545 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 85.14908856657084 - type: cosine_spearman value: 86.84648320786727 - type: euclidean_pearson value: 86.11454713131947 - type: euclidean_spearman value: 86.77738862047961 - type: main_score value: 86.84648320786727 - type: manhattan_pearson value: 86.07804821916372 - type: manhattan_spearman value: 86.78676064310474 - type: pearson value: 85.14908856657084 - type: spearman value: 86.84648320786727 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 89.61633502468356 - type: cosine_spearman value: 89.99772663224805 - type: euclidean_pearson value: 90.14056501501044 - type: euclidean_spearman value: 90.04496896837503 - type: main_score value: 89.99772663224805 - type: manhattan_pearson value: 90.08964860311801 - type: manhattan_spearman value: 90.00091712362196 - type: pearson value: 89.61633502468356 - type: spearman value: 89.99772663224805 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 86.44548026840202 - type: cosine_spearman value: 87.26263108768539 - type: euclidean_pearson value: 86.42844593583838 - type: euclidean_spearman value: 86.89388428664364 - type: main_score value: 87.26263108768539 - type: manhattan_pearson value: 86.47186940800881 - type: manhattan_spearman value: 87.02163091089946 - type: pearson value: 86.44548026840202 - type: spearman value: 87.26263108768539 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 87.89345132532758 - type: cosine_spearman value: 87.96246221327699 - type: euclidean_pearson value: 88.49013032701419 - type: euclidean_spearman value: 87.81981265317344 - type: main_score value: 87.96246221327699 - type: manhattan_pearson value: 88.31360914178538 - type: manhattan_spearman value: 87.62734530005075 - type: pearson value: 87.89345132532758 - type: spearman value: 87.96246221327699 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 88.4084678497171 - type: cosine_spearman value: 88.77640638748285 - type: euclidean_pearson value: 89.60124312475843 - type: euclidean_spearman value: 88.4321442688528 - type: main_score value: 88.77640638748285 - type: manhattan_pearson value: 89.62375118021299 - type: manhattan_spearman value: 88.46998118661577 - type: pearson value: 88.4084678497171 - type: spearman value: 88.77640638748285 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 87.30688801326498 - type: cosine_spearman value: 87.55684697258378 - type: euclidean_pearson value: 87.89672951056794 - type: euclidean_spearman value: 87.28050429201674 - type: main_score value: 87.55684697258378 - type: manhattan_pearson value: 87.74292745320572 - type: manhattan_spearman value: 87.16383993876582 - type: pearson value: 87.30688801326498 - type: spearman value: 87.55684697258378 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 73.46180375170147 - type: cosine_spearman value: 73.39559590127081 - type: euclidean_pearson value: 73.72613901293681 - type: euclidean_spearman value: 71.85465165176795 - type: main_score value: 73.39559590127081 - type: manhattan_pearson value: 73.07859140869076 - type: manhattan_spearman value: 71.22047343718893 - type: pearson value: 73.46180375170147 - type: spearman value: 73.39559590127081 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 62.47531620842637 - type: cosine_spearman value: 66.22504667157702 - type: euclidean_pearson value: 66.76201254783692 - type: euclidean_spearman value: 66.86115760269463 - type: main_score value: 66.22504667157702 - type: manhattan_pearson value: 66.73847836793489 - type: manhattan_spearman value: 66.7677116377695 - type: pearson value: 62.47531620842637 - type: spearman value: 66.22504667157702 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 69.89707002436481 - type: cosine_spearman value: 72.2054865735116 - type: euclidean_pearson value: 71.81856615570756 - type: euclidean_spearman value: 72.72593304629407 - type: main_score value: 72.2054865735116 - type: manhattan_pearson value: 72.00362684700072 - type: manhattan_spearman value: 72.62783534769964 - type: pearson value: 69.89707002436481 - type: spearman value: 72.2054865735116 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 81.59623734395916 - type: cosine_spearman value: 83.28946105111358 - type: euclidean_pearson value: 79.377330171466 - type: euclidean_spearman value: 81.81029781662205 - type: main_score value: 83.28946105111358 - type: manhattan_pearson value: 78.96970881689698 - type: manhattan_spearman value: 81.91773236079703 - type: pearson value: 81.59623734395916 - type: spearman value: 83.28946105111358 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 55.03825643126142 - type: cosine_spearman value: 58.25792501780429 - type: euclidean_pearson value: 50.38007603973409 - type: euclidean_spearman value: 59.39961789383097 - type: main_score value: 58.25792501780429 - type: manhattan_pearson value: 50.518568927999155 - type: manhattan_spearman value: 59.84185466003894 - type: pearson value: 55.03825643126142 - type: spearman value: 58.25792501780429 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 77.77233721490776 - type: cosine_spearman value: 76.17596588017625 - type: euclidean_pearson value: 74.47600468156611 - type: euclidean_spearman value: 72.61278728057012 - type: main_score value: 76.17596588017625 - type: manhattan_pearson value: 74.48118910099699 - type: manhattan_spearman value: 73.33167419101696 - type: pearson value: 77.77233721490776 - type: spearman value: 76.17596588017625 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 42.87453608131507 - type: cosine_spearman value: 45.137849894401185 - type: euclidean_pearson value: 31.66964197694796 - type: euclidean_spearman value: 44.1014900837869 - type: main_score value: 45.137849894401185 - type: manhattan_pearson value: 31.007199259384745 - type: manhattan_spearman value: 43.48181523288926 - type: pearson value: 42.87453608131507 - type: spearman value: 45.137849894401185 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 66.87400150638176 - type: cosine_spearman value: 67.27861354834066 - type: euclidean_pearson value: 66.81789582140216 - type: euclidean_spearman value: 66.44220479858708 - type: main_score value: 67.27861354834066 - type: manhattan_pearson value: 66.92509859033235 - type: manhattan_spearman value: 66.46841124185076 - type: pearson value: 66.87400150638176 - type: spearman value: 67.27861354834066 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 61.819804551576084 - type: cosine_spearman value: 65.0864146772135 - type: euclidean_pearson value: 62.518151090361876 - type: euclidean_spearman value: 65.13608138548017 - type: main_score value: 65.0864146772135 - type: manhattan_pearson value: 62.51413246915267 - type: manhattan_spearman value: 65.19077543064323 - type: pearson value: 61.819804551576084 - type: spearman value: 65.0864146772135 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 54.85728696035389 - type: cosine_spearman value: 61.60906359227576 - type: euclidean_pearson value: 52.57582587901851 - type: euclidean_spearman value: 61.41823097598308 - type: main_score value: 61.60906359227576 - type: manhattan_pearson value: 52.500978361080506 - type: manhattan_spearman value: 61.30365596659758 - type: pearson value: 54.85728696035389 - type: spearman value: 61.60906359227576 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 67.68016005631422 - type: cosine_spearman value: 84.51542547285167 - type: euclidean_pearson value: 66.19871164667245 - type: euclidean_spearman value: 73.24670207647144 - type: main_score value: 84.51542547285167 - type: manhattan_pearson value: 67.0443525268974 - type: manhattan_spearman value: 73.24670207647144 - type: pearson value: 67.68016005631422 - type: spearman value: 84.51542547285167 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 47.49467414030747 - type: cosine_spearman value: 56.81512095681289 - type: euclidean_pearson value: 48.42860221765214 - type: euclidean_spearman value: 58.63197306329092 - type: main_score value: 56.81512095681289 - type: manhattan_pearson value: 48.39594959260441 - type: manhattan_spearman value: 58.63197306329092 - type: pearson value: 47.49467414030747 - type: spearman value: 56.81512095681289 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 76.8364678896155 - type: cosine_spearman value: 78.45516413087114 - type: euclidean_pearson value: 78.62779318576634 - type: euclidean_spearman value: 78.88760695649488 - type: main_score value: 78.45516413087114 - type: manhattan_pearson value: 78.62131335760031 - type: manhattan_spearman value: 78.81861844200388 - type: pearson value: 76.8364678896155 - type: spearman value: 78.45516413087114 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 65.16640313911604 - type: cosine_spearman value: 60.887608967403914 - type: euclidean_pearson value: 67.49902244990913 - type: euclidean_spearman value: 59.2458787136538 - type: main_score value: 60.887608967403914 - type: manhattan_pearson value: 67.34313506388378 - type: manhattan_spearman value: 59.05283429200166 - type: pearson value: 65.16640313911604 - type: spearman value: 60.887608967403914 - task: type: STS dataset: name: MTEB QBQTC (default) type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cosine_pearson value: 34.20049144526891 - type: cosine_spearman value: 36.41802814113771 - type: euclidean_pearson value: 34.56994213959062 - type: euclidean_spearman value: 36.06141660786936 - type: main_score value: 36.41802814113771 - type: manhattan_pearson value: 34.537041543916004 - type: manhattan_spearman value: 36.03341892777382 - type: pearson value: 34.20049144526891 - type: spearman value: 36.41802814113771 - task: type: STS dataset: name: MTEB STSB (default) type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cosine_pearson value: 81.5092853013241 - type: cosine_spearman value: 83.54005474244292 - type: euclidean_pearson value: 83.7246578378554 - type: euclidean_spearman value: 84.46767551087716 - type: main_score value: 83.54005474244292 - type: manhattan_pearson value: 83.65922665594636 - type: manhattan_spearman value: 84.42431449101848 - type: pearson value: 81.5092853013241 - type: spearman value: 83.54005474244292 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 87.70246866744966 - type: cosine_spearman value: 89.44070045346106 - type: euclidean_pearson value: 89.56956519641007 - type: euclidean_spearman value: 89.95830112784283 - type: main_score value: 89.44070045346106 - type: manhattan_pearson value: 89.48264471425145 - type: manhattan_spearman value: 89.87900732483114 - type: pearson value: 87.70246866744966 - type: spearman value: 89.44070045346106 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (de) type: mteb/stsb_multi_mt config: de split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 86.83701990805217 - type: cosine_spearman value: 87.80280785492258 - type: euclidean_pearson value: 87.77325330043514 - type: euclidean_spearman value: 88.3564607283144 - type: main_score value: 87.80280785492258 - type: manhattan_pearson value: 87.6745449945946 - type: manhattan_spearman value: 88.30660465978795 - type: pearson value: 86.83701990805217 - type: spearman value: 87.80280785492258 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (zh) type: mteb/stsb_multi_mt config: zh split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 84.27751020600267 - type: cosine_spearman value: 85.63500407412486 - type: euclidean_pearson value: 85.21829891649696 - type: euclidean_spearman value: 85.9384575715382 - type: main_score value: 85.63500407412486 - type: manhattan_pearson value: 85.10797194089801 - type: manhattan_spearman value: 85.8770162042784 - type: pearson value: 84.27751020600267 - type: spearman value: 85.63500407412486 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: mteb/stsb_multi_mt config: fr split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 86.56833656723254 - type: cosine_spearman value: 87.4393978501382 - type: euclidean_pearson value: 87.45171512751267 - type: euclidean_spearman value: 88.13106516566947 - type: main_score value: 87.4393978501382 - type: manhattan_pearson value: 87.33010961793333 - type: manhattan_spearman value: 88.06707425102182 - type: pearson value: 86.56833656723254 - type: spearman value: 87.4393978501382 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (pl) type: mteb/stsb_multi_mt config: pl split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 85.45065540325523 - type: cosine_spearman value: 85.47881076789359 - type: euclidean_pearson value: 85.1999493863155 - type: euclidean_spearman value: 85.7874947669187 - type: main_score value: 85.47881076789359 - type: manhattan_pearson value: 85.06075305990376 - type: manhattan_spearman value: 85.71563015639558 - type: pearson value: 85.45065540325523 - type: spearman value: 85.47881076789359 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (es) type: mteb/stsb_multi_mt config: es split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 87.11952824079832 - type: cosine_spearman value: 87.9643473573153 - type: euclidean_pearson value: 88.11750364639971 - type: euclidean_spearman value: 88.63695109016498 - type: main_score value: 87.9643473573153 - type: manhattan_pearson value: 88.00294453126699 - type: manhattan_spearman value: 88.53750241758391 - type: pearson value: 87.11952824079832 - type: spearman value: 87.9643473573153 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (ru) type: mteb/stsb_multi_mt config: ru split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 85.99804354414991 - type: cosine_spearman value: 86.30252111551002 - type: euclidean_pearson value: 86.1880652037762 - type: euclidean_spearman value: 86.69556223944502 - type: main_score value: 86.30252111551002 - type: manhattan_pearson value: 86.0736400320898 - type: manhattan_spearman value: 86.61747927593393 - type: pearson value: 85.99804354414991 - type: spearman value: 86.30252111551002 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (en) type: mteb/stsb_multi_mt config: en split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 87.70246861738103 - type: cosine_spearman value: 89.44070045346106 - type: euclidean_pearson value: 89.56956518833663 - type: euclidean_spearman value: 89.95830112784283 - type: main_score value: 89.44070045346106 - type: manhattan_pearson value: 89.48264470792915 - type: manhattan_spearman value: 89.87900732483114 - type: pearson value: 87.70246861738103 - type: spearman value: 89.44070045346106 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 84.88064122814694 - type: mrr value: 95.84832651009123 - type: main_score value: 84.88064122814694 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 57.289 - type: map_at_10 value: 67.88499999999999 - type: map_at_100 value: 68.477 - type: map_at_1000 value: 68.50500000000001 - type: map_at_20 value: 68.33500000000001 - type: map_at_3 value: 65.08 - type: map_at_5 value: 67.001 - type: mrr_at_1 value: 59.667 - type: mrr_at_10 value: 68.626 - type: mrr_at_100 value: 69.082 - type: mrr_at_1000 value: 69.108 - type: mrr_at_20 value: 68.958 - type: mrr_at_3 value: 66.667 - type: mrr_at_5 value: 67.983 - type: ndcg_at_1 value: 59.667 - type: ndcg_at_10 value: 72.309 - type: ndcg_at_100 value: 74.58399999999999 - type: ndcg_at_1000 value: 75.25500000000001 - type: ndcg_at_20 value: 73.656 - type: ndcg_at_3 value: 67.791 - type: ndcg_at_5 value: 70.45 - type: precision_at_1 value: 59.667 - type: precision_at_10 value: 9.567 - type: precision_at_100 value: 1.073 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.083 - type: precision_at_3 value: 26.333000000000002 - type: precision_at_5 value: 17.666999999999998 - type: recall_at_1 value: 57.289 - type: recall_at_10 value: 84.756 - type: recall_at_100 value: 94.5 - type: recall_at_1000 value: 99.667 - type: recall_at_20 value: 89.7 - type: recall_at_3 value: 73.22800000000001 - type: recall_at_5 value: 79.444 - type: main_score value: 72.309 - task: type: Clustering dataset: name: MTEB SpanishNewsClusteringP2P (default) type: jinaai/spanish_news_clustering config: default split: test revision: bf8ca8ddc5b7da4f7004720ddf99bbe0483480e6 metrics: - type: main_score value: 45.04477709795154 - type: v_measure value: 45.04477709795154 - type: v_measure_std value: 0.0 - task: type: Retrieval dataset: name: MTEB SpanishPassageRetrievalS2S (default) type: jinaai/spanish_passage_retrieval config: default split: test revision: 9cddf2ce5209ade52c2115ccfa00eb22c6d3a837 metrics: - type: main_score value: 69.83 - type: map_at_1 value: 15.736 - type: map_at_10 value: 52.027 - type: map_at_100 value: 65.08800000000001 - type: map_at_1000 value: 65.08800000000001 - type: map_at_20 value: 60.79900000000001 - type: map_at_3 value: 32.869 - type: map_at_5 value: 41.436 - type: mrr_at_1 value: 75.44910179640718 - type: mrr_at_10 value: 84.43446440452426 - type: mrr_at_100 value: 84.48052612723271 - type: mrr_at_1000 value: 84.48052612723271 - type: mrr_at_20 value: 84.48052612723271 - type: mrr_at_3 value: 83.13373253493013 - type: mrr_at_5 value: 84.3013972055888 - type: nauc_map_at_1000_diff1 value: 50.611540149694356 - type: nauc_map_at_1000_max value: 2.1102430434260238 - type: nauc_map_at_1000_std value: -18.88993521335793 - type: nauc_map_at_100_diff1 value: 50.611540149694356 - type: nauc_map_at_100_max value: 2.1102430434260238 - type: nauc_map_at_100_std value: -18.88993521335793 - type: nauc_map_at_10_diff1 value: 59.13518981755268 - type: nauc_map_at_10_max value: -9.810386627392807 - type: nauc_map_at_10_std value: -38.31810152345078 - type: nauc_map_at_1_diff1 value: 74.96782567287174 - type: nauc_map_at_1_max value: -29.648279252607875 - type: nauc_map_at_1_std value: -54.017459339141595 - type: nauc_map_at_20_diff1 value: 55.26694458629849 - type: nauc_map_at_20_max value: -1.9490244535020729 - type: nauc_map_at_20_std value: -25.22211659104076 - type: nauc_map_at_3_diff1 value: 71.67607885031732 - type: nauc_map_at_3_max value: -25.078101661694507 - type: nauc_map_at_3_std value: -50.55408861920259 - type: nauc_map_at_5_diff1 value: 61.50111515417668 - type: nauc_map_at_5_max value: -16.4114670513168 - type: nauc_map_at_5_std value: -44.391416134859135 - type: nauc_mrr_at_1000_diff1 value: 74.18848063283234 - type: nauc_mrr_at_1000_max value: 21.929205946778005 - type: nauc_mrr_at_1000_std value: -36.27399268489433 - type: nauc_mrr_at_100_diff1 value: 74.18848063283234 - type: nauc_mrr_at_100_max value: 21.929205946778005 - type: nauc_mrr_at_100_std value: -36.27399268489433 - type: nauc_mrr_at_10_diff1 value: 74.27231582268745 - type: nauc_mrr_at_10_max value: 21.481133301135337 - type: nauc_mrr_at_10_std value: -36.72070854872902 - type: nauc_mrr_at_1_diff1 value: 76.54855950439561 - type: nauc_mrr_at_1_max value: 26.99938321212366 - type: nauc_mrr_at_1_std value: -33.098742603429635 - type: nauc_mrr_at_20_diff1 value: 74.18848063283234 - type: nauc_mrr_at_20_max value: 21.929205946778005 - type: nauc_mrr_at_20_std value: -36.27399268489433 - type: nauc_mrr_at_3_diff1 value: 72.05379526740143 - type: nauc_mrr_at_3_max value: 18.875831185752528 - type: nauc_mrr_at_3_std value: -37.27302006456391 - type: nauc_mrr_at_5_diff1 value: 74.25342356682029 - type: nauc_mrr_at_5_max value: 20.756340085088738 - type: nauc_mrr_at_5_std value: -37.99507208540703 - type: nauc_ndcg_at_1000_diff1 value: 53.259363764380275 - type: nauc_ndcg_at_1000_max value: 12.936954959423218 - type: nauc_ndcg_at_1000_std value: -16.953898675672153 - type: nauc_ndcg_at_100_diff1 value: 53.259363764380275 - type: nauc_ndcg_at_100_max value: 12.936954959423218 - type: nauc_ndcg_at_100_std value: -16.953898675672153 - type: nauc_ndcg_at_10_diff1 value: 53.70942345413554 - type: nauc_ndcg_at_10_max value: -3.8465093347016186 - type: nauc_ndcg_at_10_std value: -31.208127919994755 - type: nauc_ndcg_at_1_diff1 value: 75.30551289259554 - type: nauc_ndcg_at_1_max value: 25.53292054129834 - type: nauc_ndcg_at_1_std value: -33.285498788395145 - type: nauc_ndcg_at_20_diff1 value: 57.62409278278133 - type: nauc_ndcg_at_20_max value: 2.8040586426056233 - type: nauc_ndcg_at_20_std value: -26.270875776221704 - type: nauc_ndcg_at_3_diff1 value: 48.42294834754225 - type: nauc_ndcg_at_3_max value: 16.912467881065822 - type: nauc_ndcg_at_3_std value: -13.324841189277873 - type: nauc_ndcg_at_5_diff1 value: 47.512819802794596 - type: nauc_ndcg_at_5_max value: 14.645518203506594 - type: nauc_ndcg_at_5_std value: -17.641450435599275 - type: nauc_precision_at_1000_diff1 value: -34.43320975829637 - type: nauc_precision_at_1000_max value: 29.08585622578186 - type: nauc_precision_at_1000_std value: 46.55117940162061 - type: nauc_precision_at_100_diff1 value: -34.433209758296364 - type: nauc_precision_at_100_max value: 29.085856225781885 - type: nauc_precision_at_100_std value: 46.55117940162065 - type: nauc_precision_at_10_diff1 value: -21.895306304096902 - type: nauc_precision_at_10_max value: 33.190476527593745 - type: nauc_precision_at_10_std value: 37.64916268614298 - type: nauc_precision_at_1_diff1 value: 75.30551289259554 - type: nauc_precision_at_1_max value: 25.53292054129834 - type: nauc_precision_at_1_std value: -33.285498788395145 - type: nauc_precision_at_20_diff1 value: -27.63076748060466 - type: nauc_precision_at_20_max value: 30.689810416086154 - type: nauc_precision_at_20_std value: 46.164191636131626 - type: nauc_precision_at_3_diff1 value: 20.547345067837288 - type: nauc_precision_at_3_max value: 26.177050942827528 - type: nauc_precision_at_3_std value: 5.960466052973099 - type: nauc_precision_at_5_diff1 value: -8.928755534002669 - type: nauc_precision_at_5_max value: 40.83262650073459 - type: nauc_precision_at_5_std value: 26.158537031161494 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_10_diff1 value: 53.08654386169444 - type: nauc_recall_at_10_max value: -23.276269379519356 - type: nauc_recall_at_10_std value: -50.80707792706157 - type: nauc_recall_at_1_diff1 value: 74.96782567287174 - type: nauc_recall_at_1_max value: -29.648279252607875 - type: nauc_recall_at_1_std value: -54.017459339141595 - type: nauc_recall_at_20_diff1 value: 51.60121897059633 - type: nauc_recall_at_20_max value: -14.241779530735387 - type: nauc_recall_at_20_std value: -37.877451525215456 - type: nauc_recall_at_3_diff1 value: 66.99474984329694 - type: nauc_recall_at_3_max value: -30.802787353187966 - type: nauc_recall_at_3_std value: -53.58737792129713 - type: nauc_recall_at_5_diff1 value: 54.64214444958567 - type: nauc_recall_at_5_max value: -23.341309362104703 - type: nauc_recall_at_5_std value: -51.381363923145265 - type: ndcg_at_1 value: 76.048 - type: ndcg_at_10 value: 69.83 - type: ndcg_at_100 value: 82.11500000000001 - type: ndcg_at_1000 value: 82.11500000000001 - type: ndcg_at_20 value: 75.995 - type: ndcg_at_3 value: 69.587 - type: ndcg_at_5 value: 69.062 - type: precision_at_1 value: 76.048 - type: precision_at_10 value: 43.653 - type: precision_at_100 value: 7.718999999999999 - type: precision_at_1000 value: 0.772 - type: precision_at_20 value: 31.108000000000004 - type: precision_at_3 value: 63.87199999999999 - type: precision_at_5 value: 56.407 - type: recall_at_1 value: 15.736 - type: recall_at_10 value: 66.873 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 85.01100000000001 - type: recall_at_3 value: 36.441 - type: recall_at_5 value: 49.109 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.87326732673267 - type: cosine_accuracy_threshold value: 86.0752820968628 - type: cosine_ap value: 96.98758090713252 - type: cosine_f1 value: 93.52881698685542 - type: cosine_f1_threshold value: 86.0752820968628 - type: cosine_precision value: 94.58077709611452 - type: cosine_recall value: 92.5 - type: dot_accuracy value: 99.82574257425742 - type: dot_accuracy_threshold value: 40484.73815917969 - type: dot_ap value: 95.68959907254845 - type: dot_f1 value: 91.31293188548865 - type: dot_f1_threshold value: 40336.810302734375 - type: dot_precision value: 90.15594541910332 - type: dot_recall value: 92.5 - type: euclidean_accuracy value: 99.87128712871286 - type: euclidean_accuracy_threshold value: 1162.5749588012695 - type: euclidean_ap value: 96.92640435656577 - type: euclidean_f1 value: 93.4475806451613 - type: euclidean_f1_threshold value: 1162.5749588012695 - type: euclidean_precision value: 94.20731707317073 - type: euclidean_recall value: 92.7 - type: main_score value: 96.98758090713252 - type: manhattan_accuracy value: 99.86930693069307 - type: manhattan_accuracy_threshold value: 28348.71826171875 - type: manhattan_ap value: 96.93832673967925 - type: manhattan_f1 value: 93.33333333333333 - type: manhattan_f1_threshold value: 28348.71826171875 - type: manhattan_precision value: 94.28571428571428 - type: manhattan_recall value: 92.4 - type: max_accuracy value: 99.87326732673267 - type: max_ap value: 96.98758090713252 - type: max_f1 value: 93.52881698685542 - type: max_precision value: 94.58077709611452 - type: max_recall value: 92.7 - type: similarity_accuracy value: 99.87326732673267 - type: similarity_accuracy_threshold value: 86.0752820968628 - type: similarity_ap value: 96.98758090713252 - type: similarity_f1 value: 93.52881698685542 - type: similarity_f1_threshold value: 86.0752820968628 - type: similarity_precision value: 94.58077709611452 - type: similarity_recall value: 92.5 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 65.6560129719848 - type: v_measure value: 65.6560129719848 - type: v_measure_std value: 4.781229811487539 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 35.07546243853692 - type: v_measure value: 35.07546243853692 - type: v_measure_std value: 1.1978740356240998 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.771005199508835 - type: mrr value: 52.65443298531534 - type: main_score value: 51.771005199508835 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 29.48686238342228 - type: cosine_spearman value: 29.706543509170054 - type: dot_pearson value: 27.95853155597859 - type: dot_spearman value: 27.604287986935162 - type: main_score value: 29.706543509170054 - type: pearson value: 29.48686238342228 - type: spearman value: 29.706543509170054 - task: type: Summarization dataset: name: MTEB SummEvalFr (default) type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cosine_pearson value: 31.551301434917868 - type: cosine_spearman value: 30.709049789175186 - type: dot_pearson value: 27.77050901756549 - type: dot_spearman value: 26.715505953561795 - type: main_score value: 30.709049789175186 - type: pearson value: 31.551301434917868 - type: spearman value: 30.709049789175186 - task: type: Reranking dataset: name: MTEB SyntecReranking (default) type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 73.31666666666666 - type: mrr value: 73.31666666666666 - type: main_score value: 73.31666666666666 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval (default) type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 19661ccdca4dfc2d15122d776b61685f48c68ca9 metrics: - type: main_score value: 83.851 - type: map_at_1 value: 68.0 - type: map_at_10 value: 79.187 - type: map_at_100 value: 79.32900000000001 - type: map_at_1000 value: 79.32900000000001 - type: map_at_20 value: 79.32900000000001 - type: map_at_3 value: 77.333 - type: map_at_5 value: 78.93299999999999 - type: mrr_at_1 value: 68.0 - type: mrr_at_10 value: 79.18730158730159 - type: mrr_at_100 value: 79.32945845004669 - type: mrr_at_1000 value: 79.32945845004669 - type: mrr_at_20 value: 79.32945845004669 - type: mrr_at_3 value: 77.33333333333333 - type: mrr_at_5 value: 78.93333333333332 - type: nauc_map_at_1000_diff1 value: 63.31103256935259 - type: nauc_map_at_1000_max value: 11.073749121365623 - type: nauc_map_at_1000_std value: 7.4973309839738 - type: nauc_map_at_100_diff1 value: 63.31103256935259 - type: nauc_map_at_100_max value: 11.073749121365623 - type: nauc_map_at_100_std value: 7.4973309839738 - type: nauc_map_at_10_diff1 value: 62.91585737195978 - type: nauc_map_at_10_max value: 11.770664508983133 - type: nauc_map_at_10_std value: 8.179883948527962 - type: nauc_map_at_1_diff1 value: 66.1236265634718 - type: nauc_map_at_1_max value: 7.000207311173955 - type: nauc_map_at_1_std value: 6.54412272821497 - type: nauc_map_at_20_diff1 value: 63.31103256935259 - type: nauc_map_at_20_max value: 11.073749121365623 - type: nauc_map_at_20_std value: 7.4973309839738 - type: nauc_map_at_3_diff1 value: 62.14039574010254 - type: nauc_map_at_3_max value: 11.06996398110187 - type: nauc_map_at_3_std value: 7.288759297085769 - type: nauc_map_at_5_diff1 value: 63.0401271126211 - type: nauc_map_at_5_max value: 10.779317801858609 - type: nauc_map_at_5_std value: 6.476660484760681 - type: nauc_mrr_at_1000_diff1 value: 63.31103256935259 - type: nauc_mrr_at_1000_max value: 11.073749121365623 - type: nauc_mrr_at_1000_std value: 7.4973309839738 - type: nauc_mrr_at_100_diff1 value: 63.31103256935259 - type: nauc_mrr_at_100_max value: 11.073749121365623 - type: nauc_mrr_at_100_std value: 7.4973309839738 - type: nauc_mrr_at_10_diff1 value: 62.91585737195978 - type: nauc_mrr_at_10_max value: 11.770664508983133 - type: nauc_mrr_at_10_std value: 8.179883948527962 - type: nauc_mrr_at_1_diff1 value: 66.1236265634718 - type: nauc_mrr_at_1_max value: 7.000207311173955 - type: nauc_mrr_at_1_std value: 6.54412272821497 - type: nauc_mrr_at_20_diff1 value: 63.31103256935259 - type: nauc_mrr_at_20_max value: 11.073749121365623 - type: nauc_mrr_at_20_std value: 7.4973309839738 - type: nauc_mrr_at_3_diff1 value: 62.14039574010254 - type: nauc_mrr_at_3_max value: 11.06996398110187 - type: nauc_mrr_at_3_std value: 7.288759297085769 - type: nauc_mrr_at_5_diff1 value: 63.0401271126211 - type: nauc_mrr_at_5_max value: 10.779317801858609 - type: nauc_mrr_at_5_std value: 6.476660484760681 - type: nauc_ndcg_at_1000_diff1 value: 62.9544299483241 - type: nauc_ndcg_at_1000_max value: 11.577079766964538 - type: nauc_ndcg_at_1000_std value: 7.703856790100716 - type: nauc_ndcg_at_100_diff1 value: 62.9544299483241 - type: nauc_ndcg_at_100_max value: 11.577079766964538 - type: nauc_ndcg_at_100_std value: 7.703856790100716 - type: nauc_ndcg_at_10_diff1 value: 61.29907952217381 - type: nauc_ndcg_at_10_max value: 14.760627422715425 - type: nauc_ndcg_at_10_std value: 10.805573898143368 - type: nauc_ndcg_at_1_diff1 value: 66.1236265634718 - type: nauc_ndcg_at_1_max value: 7.000207311173955 - type: nauc_ndcg_at_1_std value: 6.54412272821497 - type: nauc_ndcg_at_20_diff1 value: 62.9544299483241 - type: nauc_ndcg_at_20_max value: 11.577079766964538 - type: nauc_ndcg_at_20_std value: 7.703856790100716 - type: nauc_ndcg_at_3_diff1 value: 60.25643527856101 - type: nauc_ndcg_at_3_max value: 12.236302709487546 - type: nauc_ndcg_at_3_std value: 7.36883189112067 - type: nauc_ndcg_at_5_diff1 value: 61.65220590318238 - type: nauc_ndcg_at_5_max value: 11.39969101913945 - type: nauc_ndcg_at_5_std value: 5.406207922379402 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: .nan - type: nauc_precision_at_100_max value: .nan - type: nauc_precision_at_100_std value: .nan - type: nauc_precision_at_10_diff1 value: 19.14098972922579 - type: nauc_precision_at_10_max value: 100.0 - type: nauc_precision_at_10_std value: 93.46405228758135 - type: nauc_precision_at_1_diff1 value: 66.1236265634718 - type: nauc_precision_at_1_max value: 7.000207311173955 - type: nauc_precision_at_1_std value: 6.54412272821497 - type: nauc_precision_at_20_diff1 value: 100.0 - type: nauc_precision_at_20_max value: 100.0 - type: nauc_precision_at_20_std value: 100.0 - type: nauc_precision_at_3_diff1 value: 50.29636629155561 - type: nauc_precision_at_3_max value: 18.00532600292076 - type: nauc_precision_at_3_std value: 7.649686453053768 - type: nauc_precision_at_5_diff1 value: 43.522408963585356 - type: nauc_precision_at_5_max value: 16.923436041082983 - type: nauc_precision_at_5_std value: -10.854341736694092 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_10_diff1 value: 19.1409897292252 - type: nauc_recall_at_10_max value: 100.0 - type: nauc_recall_at_10_std value: 93.46405228758134 - type: nauc_recall_at_1_diff1 value: 66.1236265634718 - type: nauc_recall_at_1_max value: 7.000207311173955 - type: nauc_recall_at_1_std value: 6.54412272821497 - type: nauc_recall_at_20_diff1 value: .nan - type: nauc_recall_at_20_max value: .nan - type: nauc_recall_at_20_std value: .nan - type: nauc_recall_at_3_diff1 value: 50.29636629155569 - type: nauc_recall_at_3_max value: 18.005326002920754 - type: nauc_recall_at_3_std value: 7.649686453053851 - type: nauc_recall_at_5_diff1 value: 43.5224089635856 - type: nauc_recall_at_5_max value: 16.92343604108335 - type: nauc_recall_at_5_std value: -10.854341736694499 - type: ndcg_at_1 value: 68.0 - type: ndcg_at_10 value: 83.851 - type: ndcg_at_100 value: 84.36099999999999 - type: ndcg_at_1000 value: 84.36099999999999 - type: ndcg_at_20 value: 84.36099999999999 - type: ndcg_at_3 value: 80.333 - type: ndcg_at_5 value: 83.21600000000001 - type: precision_at_1 value: 68.0 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 5.0 - type: precision_at_3 value: 29.666999999999998 - type: precision_at_5 value: 19.2 - type: recall_at_1 value: 68.0 - type: recall_at_10 value: 98.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 100.0 - type: recall_at_3 value: 89.0 - type: recall_at_5 value: 96.0 - task: type: Reranking dataset: name: MTEB T2Reranking (default) type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 65.3088203970324 - type: mrr value: 74.79505862376546 - type: main_score value: 65.3088203970324 - task: type: Retrieval dataset: name: MTEB T2Retrieval (default) type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: main_score value: 83.163 - type: map_at_1 value: 26.875 - type: map_at_10 value: 75.454 - type: map_at_100 value: 79.036 - type: map_at_1000 value: 79.111 - type: map_at_20 value: 78.145 - type: map_at_3 value: 53.181 - type: map_at_5 value: 65.362 - type: mrr_at_1 value: 88.90057864281957 - type: mrr_at_10 value: 91.53186397301344 - type: mrr_at_100 value: 91.62809075510003 - type: mrr_at_1000 value: 91.63198173030787 - type: mrr_at_20 value: 91.59414668799909 - type: mrr_at_3 value: 91.0792565316499 - type: mrr_at_5 value: 91.35718043135199 - type: nauc_map_at_1000_diff1 value: 12.364843957982409 - type: nauc_map_at_1000_max value: 52.07043464458799 - type: nauc_map_at_1000_std value: 16.040095055100494 - type: nauc_map_at_100_diff1 value: 12.370621073823022 - type: nauc_map_at_100_max value: 51.960738727635636 - type: nauc_map_at_100_std value: 15.935832440430747 - type: nauc_map_at_10_diff1 value: 16.852819486606585 - type: nauc_map_at_10_max value: 40.11184760756059 - type: nauc_map_at_10_std value: 0.9306648364102376 - type: nauc_map_at_1_diff1 value: 52.87356542654683 - type: nauc_map_at_1_max value: -22.210039746171255 - type: nauc_map_at_1_std value: -38.11345358035342 - type: nauc_map_at_20_diff1 value: 13.045089059562837 - type: nauc_map_at_20_max value: 49.591383082160036 - type: nauc_map_at_20_std value: 12.54330050352008 - type: nauc_map_at_3_diff1 value: 38.08172234377615 - type: nauc_map_at_3_max value: -6.868621684867697 - type: nauc_map_at_3_std value: -35.4712388845996 - type: nauc_map_at_5_diff1 value: 29.665551705577474 - type: nauc_map_at_5_max value: 10.958628576519045 - type: nauc_map_at_5_std value: -25.113120842097057 - type: nauc_mrr_at_1000_diff1 value: 47.39372999496945 - type: nauc_mrr_at_1000_max value: 83.11274997493808 - type: nauc_mrr_at_1000_std value: 39.74195374546631 - type: nauc_mrr_at_100_diff1 value: 47.396678946057676 - type: nauc_mrr_at_100_max value: 83.1192584274415 - type: nauc_mrr_at_100_std value: 39.75840860374685 - type: nauc_mrr_at_10_diff1 value: 47.35365644138715 - type: nauc_mrr_at_10_max value: 83.189165639531 - type: nauc_mrr_at_10_std value: 39.83653157887758 - type: nauc_mrr_at_1_diff1 value: 47.98740362820094 - type: nauc_mrr_at_1_max value: 80.32340034580369 - type: nauc_mrr_at_1_std value: 34.57857131423388 - type: nauc_mrr_at_20_diff1 value: 47.399132055537194 - type: nauc_mrr_at_20_max value: 83.16329919869686 - type: nauc_mrr_at_20_std value: 39.84204692042734 - type: nauc_mrr_at_3_diff1 value: 47.09295580511751 - type: nauc_mrr_at_3_max value: 82.95831045602642 - type: nauc_mrr_at_3_std value: 38.98036804692351 - type: nauc_mrr_at_5_diff1 value: 47.20100268549764 - type: nauc_mrr_at_5_max value: 83.16652480381642 - type: nauc_mrr_at_5_std value: 39.55690491560902 - type: nauc_ndcg_at_1000_diff1 value: 17.201962509184547 - type: nauc_ndcg_at_1000_max value: 63.75820559259539 - type: nauc_ndcg_at_1000_std value: 29.28676096486067 - type: nauc_ndcg_at_100_diff1 value: 16.76847216096811 - type: nauc_ndcg_at_100_max value: 62.646517934470744 - type: nauc_ndcg_at_100_std value: 28.7441617667637 - type: nauc_ndcg_at_10_diff1 value: 16.559511980751886 - type: nauc_ndcg_at_10_max value: 54.35027464277944 - type: nauc_ndcg_at_10_std value: 16.98089333577716 - type: nauc_ndcg_at_1_diff1 value: 47.98740362820094 - type: nauc_ndcg_at_1_max value: 80.32340034580369 - type: nauc_ndcg_at_1_std value: 34.57857131423388 - type: nauc_ndcg_at_20_diff1 value: 16.721525245428243 - type: nauc_ndcg_at_20_max value: 57.683661870555724 - type: nauc_ndcg_at_20_std value: 21.736044200026853 - type: nauc_ndcg_at_3_diff1 value: 12.488009696556192 - type: nauc_ndcg_at_3_max value: 69.2365575305502 - type: nauc_ndcg_at_3_std value: 30.622418945055323 - type: nauc_ndcg_at_5_diff1 value: 12.364114556230609 - type: nauc_ndcg_at_5_max value: 62.33360746285387 - type: nauc_ndcg_at_5_std value: 24.898000803570227 - type: nauc_precision_at_1000_diff1 value: -35.14745130154524 - type: nauc_precision_at_1000_max value: 48.811507982849065 - type: nauc_precision_at_1000_std value: 62.43036496029399 - type: nauc_precision_at_100_diff1 value: -35.15276411320076 - type: nauc_precision_at_100_max value: 50.87010333741109 - type: nauc_precision_at_100_std value: 63.418221030407175 - type: nauc_precision_at_10_diff1 value: -34.84255710936113 - type: nauc_precision_at_10_max value: 56.588401051428825 - type: nauc_precision_at_10_std value: 57.4763370653757 - type: nauc_precision_at_1_diff1 value: 47.98740362820094 - type: nauc_precision_at_1_max value: 80.32340034580369 - type: nauc_precision_at_1_std value: 34.57857131423388 - type: nauc_precision_at_20_diff1 value: -35.165762365233505 - type: nauc_precision_at_20_max value: 54.148762449660424 - type: nauc_precision_at_20_std value: 61.569719669368716 - type: nauc_precision_at_3_diff1 value: -28.63023175340299 - type: nauc_precision_at_3_max value: 68.69825987618499 - type: nauc_precision_at_3_std value: 48.15479495755423 - type: nauc_precision_at_5_diff1 value: -34.13811355456687 - type: nauc_precision_at_5_max value: 62.369363941490604 - type: nauc_precision_at_5_std value: 52.282904411187914 - type: nauc_recall_at_1000_diff1 value: 8.686444579162663 - type: nauc_recall_at_1000_max value: 59.58864478011338 - type: nauc_recall_at_1000_std value: 56.692774954297455 - type: nauc_recall_at_100_diff1 value: 8.820596225758342 - type: nauc_recall_at_100_max value: 53.15048885657892 - type: nauc_recall_at_100_std value: 39.78931159236714 - type: nauc_recall_at_10_diff1 value: 16.022301106315027 - type: nauc_recall_at_10_max value: 29.83242342459543 - type: nauc_recall_at_10_std value: -4.805965555875844 - type: nauc_recall_at_1_diff1 value: 52.87356542654683 - type: nauc_recall_at_1_max value: -22.210039746171255 - type: nauc_recall_at_1_std value: -38.11345358035342 - type: nauc_recall_at_20_diff1 value: 10.35772828627265 - type: nauc_recall_at_20_max value: 43.06420839754062 - type: nauc_recall_at_20_std value: 15.040522218235692 - type: nauc_recall_at_3_diff1 value: 36.23953684770224 - type: nauc_recall_at_3_max value: -11.709269151700374 - type: nauc_recall_at_3_std value: -38.13943178150384 - type: nauc_recall_at_5_diff1 value: 28.644872415763384 - type: nauc_recall_at_5_max value: 2.062151266111129 - type: nauc_recall_at_5_std value: -30.81114034774277 - type: ndcg_at_1 value: 88.901 - type: ndcg_at_10 value: 83.163 - type: ndcg_at_100 value: 86.854 - type: ndcg_at_1000 value: 87.602 - type: ndcg_at_20 value: 84.908 - type: ndcg_at_3 value: 84.848 - type: ndcg_at_5 value: 83.372 - type: precision_at_1 value: 88.901 - type: precision_at_10 value: 41.343 - type: precision_at_100 value: 4.957000000000001 - type: precision_at_1000 value: 0.513 - type: precision_at_20 value: 22.955000000000002 - type: precision_at_3 value: 74.29599999999999 - type: precision_at_5 value: 62.251999999999995 - type: recall_at_1 value: 26.875 - type: recall_at_10 value: 81.902 - type: recall_at_100 value: 93.988 - type: recall_at_1000 value: 97.801 - type: recall_at_20 value: 87.809 - type: recall_at_3 value: 54.869 - type: recall_at_5 value: 68.728 - task: type: PairClassification dataset: name: MTEB TERRa (default) type: ai-forever/terra-pairclassification config: default split: dev revision: 7b58f24536063837d644aab9a023c62199b2a612 metrics: - type: cosine_accuracy value: 60.586319218241044 - type: cosine_accuracy_threshold value: 82.49806761741638 - type: cosine_ap value: 58.73198048427448 - type: cosine_f1 value: 67.37967914438502 - type: cosine_f1_threshold value: 77.46461033821106 - type: cosine_precision value: 57.01357466063348 - type: cosine_recall value: 82.35294117647058 - type: dot_accuracy value: 60.26058631921825 - type: dot_accuracy_threshold value: 35627.020263671875 - type: dot_ap value: 57.418783612898224 - type: dot_f1 value: 66.51982378854623 - type: dot_f1_threshold value: 27620.843505859375 - type: dot_precision value: 50.16611295681063 - type: dot_recall value: 98.69281045751634 - type: euclidean_accuracy value: 60.26058631921825 - type: euclidean_accuracy_threshold value: 1255.4466247558594 - type: euclidean_ap value: 58.748656145387955 - type: euclidean_f1 value: 66.99029126213591 - type: euclidean_f1_threshold value: 1565.1330947875977 - type: euclidean_precision value: 53.28185328185329 - type: euclidean_recall value: 90.19607843137256 - type: main_score value: 58.8479126365766 - type: manhattan_accuracy value: 59.934853420195445 - type: manhattan_accuracy_threshold value: 29897.271728515625 - type: manhattan_ap value: 58.8479126365766 - type: manhattan_f1 value: 66.81318681318683 - type: manhattan_f1_threshold value: 46291.802978515625 - type: manhattan_precision value: 50.331125827814574 - type: manhattan_recall value: 99.34640522875817 - type: max_accuracy value: 60.586319218241044 - type: max_ap value: 58.8479126365766 - type: max_f1 value: 67.37967914438502 - type: max_precision value: 57.01357466063348 - type: max_recall value: 99.34640522875817 - type: similarity_accuracy value: 60.586319218241044 - type: similarity_accuracy_threshold value: 82.49806761741638 - type: similarity_ap value: 58.73198048427448 - type: similarity_f1 value: 67.37967914438502 - type: similarity_f1_threshold value: 77.46461033821106 - type: similarity_precision value: 57.01357466063348 - type: similarity_recall value: 82.35294117647058 - task: type: Classification dataset: name: MTEB TNews (default) type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 45.967999999999996 - type: f1 value: 44.699306100915706 - type: f1_weighted value: 46.03730319014832 - type: main_score value: 45.967999999999996 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.251 - type: map_at_10 value: 1.9480000000000002 - type: map_at_100 value: 11.082 - type: map_at_1000 value: 26.700000000000003 - type: map_at_20 value: 3.3529999999999998 - type: map_at_3 value: 0.679 - type: map_at_5 value: 1.079 - type: mrr_at_1 value: 94.0 - type: mrr_at_10 value: 95.786 - type: mrr_at_100 value: 95.786 - type: mrr_at_1000 value: 95.786 - type: mrr_at_20 value: 95.786 - type: mrr_at_3 value: 95.0 - type: mrr_at_5 value: 95.5 - type: ndcg_at_1 value: 91.0 - type: ndcg_at_10 value: 77.71900000000001 - type: ndcg_at_100 value: 57.726 - type: ndcg_at_1000 value: 52.737 - type: ndcg_at_20 value: 72.54 - type: ndcg_at_3 value: 83.397 - type: ndcg_at_5 value: 80.806 - type: precision_at_1 value: 94.0 - type: precision_at_10 value: 81.0 - type: precision_at_100 value: 59.199999999999996 - type: precision_at_1000 value: 23.244 - type: precision_at_20 value: 75.2 - type: precision_at_3 value: 88.0 - type: precision_at_5 value: 84.8 - type: recall_at_1 value: 0.251 - type: recall_at_10 value: 2.1229999999999998 - type: recall_at_100 value: 14.496999999999998 - type: recall_at_1000 value: 50.09 - type: recall_at_20 value: 3.8309999999999995 - type: recall_at_3 value: 0.696 - type: recall_at_5 value: 1.1400000000000001 - type: main_score value: 77.71900000000001 - task: type: Clustering dataset: name: MTEB TenKGnadClusteringP2P (default) type: slvnwhrl/tenkgnad-clustering-p2p config: default split: test revision: 5c59e41555244b7e45c9a6be2d720ab4bafae558 metrics: - type: main_score value: 43.763609722295215 - type: v_measure value: 43.763609722295215 - type: v_measure_std value: 2.8751199473862457 - task: type: Clustering dataset: name: MTEB TenKGnadClusteringS2S (default) type: slvnwhrl/tenkgnad-clustering-s2s config: default split: test revision: 6cddbe003f12b9b140aec477b583ac4191f01786 metrics: - type: main_score value: 39.762424448504355 - type: v_measure value: 39.762424448504355 - type: v_measure_std value: 3.30146124979502 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P (default) type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: main_score value: 63.133819258289456 - type: v_measure value: 63.133819258289456 - type: v_measure_std value: 1.8854253356479695 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S (default) type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: main_score value: 58.98195851785808 - type: v_measure value: 58.98195851785808 - type: v_measure_std value: 1.6237600076393737 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.3550000000000004 - type: map_at_10 value: 10.08 - type: map_at_100 value: 16.136 - type: map_at_1000 value: 17.605 - type: map_at_20 value: 12.561 - type: map_at_3 value: 5.641 - type: map_at_5 value: 7.3260000000000005 - type: mrr_at_1 value: 46.939 - type: mrr_at_10 value: 58.152 - type: mrr_at_100 value: 58.594 - type: mrr_at_1000 value: 58.601000000000006 - type: mrr_at_20 value: 58.279 - type: mrr_at_3 value: 55.102 - type: mrr_at_5 value: 56.531 - type: ndcg_at_1 value: 44.897999999999996 - type: ndcg_at_10 value: 26.298 - type: ndcg_at_100 value: 37.596000000000004 - type: ndcg_at_1000 value: 49.424 - type: ndcg_at_20 value: 27.066000000000003 - type: ndcg_at_3 value: 31.528 - type: ndcg_at_5 value: 28.219 - type: precision_at_1 value: 46.939 - type: precision_at_10 value: 22.245 - type: precision_at_100 value: 7.531000000000001 - type: precision_at_1000 value: 1.5350000000000001 - type: precision_at_20 value: 17.041 - type: precision_at_3 value: 30.612000000000002 - type: precision_at_5 value: 26.122 - type: recall_at_1 value: 3.3550000000000004 - type: recall_at_10 value: 16.41 - type: recall_at_100 value: 47.272 - type: recall_at_1000 value: 83.584 - type: recall_at_20 value: 24.091 - type: recall_at_3 value: 6.8180000000000005 - type: recall_at_5 value: 9.677 - type: main_score value: 26.298 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 91.2890625 - type: ap value: 33.95547153875715 - type: ap_weighted value: 33.95547153875715 - type: f1 value: 75.10768597556462 - type: f1_weighted value: 92.00161208992606 - type: main_score value: 91.2890625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 71.3978494623656 - type: f1 value: 71.7194818511814 - type: f1_weighted value: 71.13860187349744 - type: main_score value: 71.3978494623656 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 52.4921688720602 - type: v_measure value: 52.4921688720602 - type: v_measure_std value: 0.992768152658908 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 85.11652858079513 - type: cosine_accuracy_threshold value: 87.90839910507202 - type: cosine_ap value: 70.90459908851724 - type: cosine_f1 value: 65.66581227877457 - type: cosine_f1_threshold value: 85.13308763504028 - type: cosine_precision value: 61.094708153531684 - type: cosine_recall value: 70.97625329815304 - type: dot_accuracy value: 83.41181379269239 - type: dot_accuracy_threshold value: 43110.113525390625 - type: dot_ap value: 65.64869491143095 - type: dot_f1 value: 62.05308447460914 - type: dot_f1_threshold value: 41412.542724609375 - type: dot_precision value: 57.38623626989464 - type: dot_recall value: 67.54617414248021 - type: euclidean_accuracy value: 85.15229182809799 - type: euclidean_accuracy_threshold value: 1043.08500289917 - type: euclidean_ap value: 70.71204383269375 - type: euclidean_f1 value: 65.20304568527919 - type: euclidean_f1_threshold value: 1179.2595863342285 - type: euclidean_precision value: 62.81173594132029 - type: euclidean_recall value: 67.78364116094987 - type: main_score value: 70.90459908851724 - type: manhattan_accuracy value: 85.1820945341837 - type: manhattan_accuracy_threshold value: 26115.0390625 - type: manhattan_ap value: 70.66113937117431 - type: manhattan_f1 value: 65.33383628819313 - type: manhattan_f1_threshold value: 29105.181884765625 - type: manhattan_precision value: 62.40691808791736 - type: manhattan_recall value: 68.54881266490766 - type: max_accuracy value: 85.1820945341837 - type: max_ap value: 70.90459908851724 - type: max_f1 value: 65.66581227877457 - type: max_precision value: 62.81173594132029 - type: max_recall value: 70.97625329815304 - type: similarity_accuracy value: 85.11652858079513 - type: similarity_accuracy_threshold value: 87.90839910507202 - type: similarity_ap value: 70.90459908851724 - type: similarity_f1 value: 65.66581227877457 - type: similarity_f1_threshold value: 85.13308763504028 - type: similarity_precision value: 61.094708153531684 - type: similarity_recall value: 70.97625329815304 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 88.10299996119068 - type: cosine_accuracy_threshold value: 84.34982895851135 - type: cosine_ap value: 84.13755787769226 - type: cosine_f1 value: 76.0967548076923 - type: cosine_f1_threshold value: 82.8936219215393 - type: cosine_precision value: 74.28864769727193 - type: cosine_recall value: 77.99507237449954 - type: dot_accuracy value: 86.64182869561843 - type: dot_accuracy_threshold value: 38794.677734375 - type: dot_ap value: 80.20301567411457 - type: dot_f1 value: 73.50650291634967 - type: dot_f1_threshold value: 37447.23205566406 - type: dot_precision value: 69.41498460485802 - type: dot_recall value: 78.11056359716662 - type: euclidean_accuracy value: 87.9361198432103 - type: euclidean_accuracy_threshold value: 1184.421157836914 - type: euclidean_ap value: 83.79582690117218 - type: euclidean_f1 value: 75.81431709042175 - type: euclidean_f1_threshold value: 1258.2727432250977 - type: euclidean_precision value: 73.39099099099099 - type: euclidean_recall value: 78.40314136125654 - type: main_score value: 84.13755787769226 - type: manhattan_accuracy value: 87.96134590755618 - type: manhattan_accuracy_threshold value: 29077.291870117188 - type: manhattan_ap value: 83.79487172269923 - type: manhattan_f1 value: 75.82421603424935 - type: manhattan_f1_threshold value: 31224.124145507812 - type: manhattan_precision value: 72.24740255212329 - type: manhattan_recall value: 79.77363720357253 - type: max_accuracy value: 88.10299996119068 - type: max_ap value: 84.13755787769226 - type: max_f1 value: 76.0967548076923 - type: max_precision value: 74.28864769727193 - type: max_recall value: 79.77363720357253 - type: similarity_accuracy value: 88.10299996119068 - type: similarity_accuracy_threshold value: 84.34982895851135 - type: similarity_ap value: 84.13755787769226 - type: similarity_f1 value: 76.0967548076923 - type: similarity_f1_threshold value: 82.8936219215393 - type: similarity_precision value: 74.28864769727193 - type: similarity_recall value: 77.99507237449954 - task: type: Retrieval dataset: name: MTEB VideoRetrieval (default) type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: main_score value: 70.433 - type: map_at_1 value: 55.7 - type: map_at_10 value: 66.013 - type: map_at_100 value: 66.534 - type: map_at_1000 value: 66.547 - type: map_at_20 value: 66.334 - type: map_at_3 value: 64.2 - type: map_at_5 value: 65.445 - type: mrr_at_1 value: 55.7 - type: mrr_at_10 value: 66.01329365079364 - type: mrr_at_100 value: 66.53350061744233 - type: mrr_at_1000 value: 66.54744831962995 - type: mrr_at_20 value: 66.3335147364675 - type: mrr_at_3 value: 64.2 - type: mrr_at_5 value: 65.44500000000002 - type: nauc_map_at_1000_diff1 value: 76.26428836976245 - type: nauc_map_at_1000_max value: 35.41847367373575 - type: nauc_map_at_1000_std value: -33.04639860831992 - type: nauc_map_at_100_diff1 value: 76.25793229023193 - type: nauc_map_at_100_max value: 35.43663260110076 - type: nauc_map_at_100_std value: -33.04238139882945 - type: nauc_map_at_10_diff1 value: 76.2108281297711 - type: nauc_map_at_10_max value: 35.59442419423183 - type: nauc_map_at_10_std value: -33.32346518997277 - type: nauc_map_at_1_diff1 value: 79.17728405262736 - type: nauc_map_at_1_max value: 31.880738163589527 - type: nauc_map_at_1_std value: -30.891888718004584 - type: nauc_map_at_20_diff1 value: 76.2181333410193 - type: nauc_map_at_20_max value: 35.43448818430876 - type: nauc_map_at_20_std value: -33.35682442863193 - type: nauc_map_at_3_diff1 value: 76.10046541433466 - type: nauc_map_at_3_max value: 34.6831278555291 - type: nauc_map_at_3_std value: -34.030826044831116 - type: nauc_map_at_5_diff1 value: 75.96513023582064 - type: nauc_map_at_5_max value: 34.66920832438069 - type: nauc_map_at_5_std value: -33.79799777830796 - type: nauc_mrr_at_1000_diff1 value: 76.26428836976245 - type: nauc_mrr_at_1000_max value: 35.41847367373575 - type: nauc_mrr_at_1000_std value: -33.04639860831992 - type: nauc_mrr_at_100_diff1 value: 76.25793229023193 - type: nauc_mrr_at_100_max value: 35.43663260110076 - type: nauc_mrr_at_100_std value: -33.04238139882945 - type: nauc_mrr_at_10_diff1 value: 76.2108281297711 - type: nauc_mrr_at_10_max value: 35.59442419423183 - type: nauc_mrr_at_10_std value: -33.32346518997277 - type: nauc_mrr_at_1_diff1 value: 79.17728405262736 - type: nauc_mrr_at_1_max value: 31.880738163589527 - type: nauc_mrr_at_1_std value: -30.891888718004584 - type: nauc_mrr_at_20_diff1 value: 76.2181333410193 - type: nauc_mrr_at_20_max value: 35.43448818430876 - type: nauc_mrr_at_20_std value: -33.35682442863193 - type: nauc_mrr_at_3_diff1 value: 76.10046541433466 - type: nauc_mrr_at_3_max value: 34.6831278555291 - type: nauc_mrr_at_3_std value: -34.030826044831116 - type: nauc_mrr_at_5_diff1 value: 75.96513023582064 - type: nauc_mrr_at_5_max value: 34.66920832438069 - type: nauc_mrr_at_5_std value: -33.79799777830796 - type: nauc_ndcg_at_1000_diff1 value: 75.68118206798317 - type: nauc_ndcg_at_1000_max value: 37.12252980787349 - type: nauc_ndcg_at_1000_std value: -31.457578337430505 - type: nauc_ndcg_at_100_diff1 value: 75.46730761564156 - type: nauc_ndcg_at_100_max value: 37.549890025544265 - type: nauc_ndcg_at_100_std value: -31.35066985945112 - type: nauc_ndcg_at_10_diff1 value: 75.09890404887037 - type: nauc_ndcg_at_10_max value: 38.024147790014204 - type: nauc_ndcg_at_10_std value: -33.67408368593356 - type: nauc_ndcg_at_1_diff1 value: 79.17728405262736 - type: nauc_ndcg_at_1_max value: 31.880738163589527 - type: nauc_ndcg_at_1_std value: -30.891888718004584 - type: nauc_ndcg_at_20_diff1 value: 75.12977548171354 - type: nauc_ndcg_at_20_max value: 37.524926748917956 - type: nauc_ndcg_at_20_std value: -33.771344674947485 - type: nauc_ndcg_at_3_diff1 value: 74.94037476984154 - type: nauc_ndcg_at_3_max value: 35.60345554050552 - type: nauc_ndcg_at_3_std value: -35.256991346321854 - type: nauc_ndcg_at_5_diff1 value: 74.54265907753783 - type: nauc_ndcg_at_5_max value: 35.57662819978585 - type: nauc_ndcg_at_5_std value: -34.879794448418465 - type: nauc_precision_at_1000_diff1 value: 74.52277207179142 - type: nauc_precision_at_1000_max value: 94.25510945118707 - type: nauc_precision_at_1000_std value: 91.6874157070222 - type: nauc_precision_at_100_diff1 value: 65.98346655735419 - type: nauc_precision_at_100_max value: 78.81168727653687 - type: nauc_precision_at_100_std value: 27.241465691967708 - type: nauc_precision_at_10_diff1 value: 69.55050319096688 - type: nauc_precision_at_10_max value: 51.827749140893374 - type: nauc_precision_at_10_std value: -34.60818605792837 - type: nauc_precision_at_1_diff1 value: 79.17728405262736 - type: nauc_precision_at_1_max value: 31.880738163589527 - type: nauc_precision_at_1_std value: -30.891888718004584 - type: nauc_precision_at_20_diff1 value: 68.08078305042736 - type: nauc_precision_at_20_max value: 52.83318878288501 - type: nauc_precision_at_20_std value: -35.46070292817927 - type: nauc_precision_at_3_diff1 value: 70.76249609881901 - type: nauc_precision_at_3_max value: 38.86561868624655 - type: nauc_precision_at_3_std value: -39.68917853446992 - type: nauc_precision_at_5_diff1 value: 68.39110629013278 - type: nauc_precision_at_5_max value: 39.28677163904683 - type: nauc_precision_at_5_std value: -39.39101423819562 - type: nauc_recall_at_1000_diff1 value: 74.52277207179175 - type: nauc_recall_at_1000_max value: 94.25510945118776 - type: nauc_recall_at_1000_std value: 91.68741570702382 - type: nauc_recall_at_100_diff1 value: 65.9834665573548 - type: nauc_recall_at_100_max value: 78.81168727653679 - type: nauc_recall_at_100_std value: 27.241465691967598 - type: nauc_recall_at_10_diff1 value: 69.55050319096708 - type: nauc_recall_at_10_max value: 51.82774914089347 - type: nauc_recall_at_10_std value: -34.6081860579283 - type: nauc_recall_at_1_diff1 value: 79.17728405262736 - type: nauc_recall_at_1_max value: 31.880738163589527 - type: nauc_recall_at_1_std value: -30.891888718004584 - type: nauc_recall_at_20_diff1 value: 68.08078305042746 - type: nauc_recall_at_20_max value: 52.833188782885244 - type: nauc_recall_at_20_std value: -35.46070292817895 - type: nauc_recall_at_3_diff1 value: 70.76249609881896 - type: nauc_recall_at_3_max value: 38.865618686246464 - type: nauc_recall_at_3_std value: -39.68917853446999 - type: nauc_recall_at_5_diff1 value: 68.39110629013274 - type: nauc_recall_at_5_max value: 39.28677163904688 - type: nauc_recall_at_5_std value: -39.39101423819562 - type: ndcg_at_1 value: 55.7 - type: ndcg_at_10 value: 70.433 - type: ndcg_at_100 value: 72.975 - type: ndcg_at_1000 value: 73.283 - type: ndcg_at_20 value: 71.58 - type: ndcg_at_3 value: 66.83099999999999 - type: ndcg_at_5 value: 69.085 - type: precision_at_1 value: 55.7 - type: precision_at_10 value: 8.4 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.098 - type: precision_at_20 value: 4.425 - type: precision_at_3 value: 24.8 - type: precision_at_5 value: 15.98 - type: recall_at_1 value: 55.7 - type: recall_at_10 value: 84.0 - type: recall_at_100 value: 95.89999999999999 - type: recall_at_1000 value: 98.2 - type: recall_at_20 value: 88.5 - type: recall_at_3 value: 74.4 - type: recall_at_5 value: 79.9 - task: type: Classification dataset: name: MTEB Waimai (default) type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 86.58999999999999 - type: ap value: 70.02619249927523 - type: ap_weighted value: 70.02619249927523 - type: f1 value: 84.97572770889423 - type: f1_weighted value: 86.6865713531272 - type: main_score value: 86.58999999999999 - task: type: Retrieval dataset: name: MTEB XMarket (en) type: jinaai/xmarket_ml config: en split: test revision: dfe57acff5b62c23732a7b7d3e3fb84ff501708b metrics: - type: main_score value: 34.772999999999996 - type: map_at_1 value: 7.2620000000000005 - type: map_at_10 value: 17.98 - type: map_at_100 value: 24.828 - type: map_at_1000 value: 26.633000000000003 - type: map_at_20 value: 20.699 - type: map_at_3 value: 12.383 - type: map_at_5 value: 14.871 - type: mrr_at_1 value: 34.718100890207715 - type: mrr_at_10 value: 43.9336827525092 - type: mrr_at_100 value: 44.66474011066837 - type: mrr_at_1000 value: 44.7075592197356 - type: mrr_at_20 value: 44.35984436569346 - type: mrr_at_3 value: 41.73901893981052 - type: mrr_at_5 value: 43.025973550207134 - type: nauc_map_at_1000_diff1 value: 13.899869081196364 - type: nauc_map_at_1000_max value: 46.60452816386231 - type: nauc_map_at_1000_std value: 24.87925799401773 - type: nauc_map_at_100_diff1 value: 16.164805650871084 - type: nauc_map_at_100_max value: 44.720912958558095 - type: nauc_map_at_100_std value: 20.236734536210477 - type: nauc_map_at_10_diff1 value: 23.58580520913581 - type: nauc_map_at_10_max value: 31.276151869914216 - type: nauc_map_at_10_std value: -0.1833326246041355 - type: nauc_map_at_1_diff1 value: 37.02663305598722 - type: nauc_map_at_1_max value: 14.931071531116528 - type: nauc_map_at_1_std value: -12.478790028708453 - type: nauc_map_at_20_diff1 value: 20.718297881540593 - type: nauc_map_at_20_max value: 36.62264094841859 - type: nauc_map_at_20_std value: 6.658514770057742 - type: nauc_map_at_3_diff1 value: 29.379034581120006 - type: nauc_map_at_3_max value: 21.387214269548803 - type: nauc_map_at_3_std value: -9.3404121914247 - type: nauc_map_at_5_diff1 value: 26.627169792839485 - type: nauc_map_at_5_max value: 25.393331109666388 - type: nauc_map_at_5_std value: -6.023485287246353 - type: nauc_mrr_at_1000_diff1 value: 12.047232036652295 - type: nauc_mrr_at_1000_max value: 46.611862580860645 - type: nauc_mrr_at_1000_std value: 27.89146066442305 - type: nauc_mrr_at_100_diff1 value: 12.05261747449997 - type: nauc_mrr_at_100_max value: 46.61328535381203 - type: nauc_mrr_at_100_std value: 27.886145596874535 - type: nauc_mrr_at_10_diff1 value: 12.006935553036941 - type: nauc_mrr_at_10_max value: 46.53351686240496 - type: nauc_mrr_at_10_std value: 27.708742470257462 - type: nauc_mrr_at_1_diff1 value: 13.323408127738782 - type: nauc_mrr_at_1_max value: 43.78884661002012 - type: nauc_mrr_at_1_std value: 25.164417588165673 - type: nauc_mrr_at_20_diff1 value: 12.036022973968011 - type: nauc_mrr_at_20_max value: 46.56537838037131 - type: nauc_mrr_at_20_std value: 27.78189157249635 - type: nauc_mrr_at_3_diff1 value: 11.943896700976381 - type: nauc_mrr_at_3_max value: 46.33644663073225 - type: nauc_mrr_at_3_std value: 27.523915405053845 - type: nauc_mrr_at_5_diff1 value: 12.03108009033769 - type: nauc_mrr_at_5_max value: 46.49103616896692 - type: nauc_mrr_at_5_std value: 27.630879129863366 - type: nauc_ndcg_at_1000_diff1 value: 9.766823796017324 - type: nauc_ndcg_at_1000_max value: 52.85844801910602 - type: nauc_ndcg_at_1000_std value: 36.43271437761207 - type: nauc_ndcg_at_100_diff1 value: 12.035059298282036 - type: nauc_ndcg_at_100_max value: 50.05520240705682 - type: nauc_ndcg_at_100_std value: 29.87678724506636 - type: nauc_ndcg_at_10_diff1 value: 10.281893031139424 - type: nauc_ndcg_at_10_max value: 47.02153679426017 - type: nauc_ndcg_at_10_std value: 26.624948330369126 - type: nauc_ndcg_at_1_diff1 value: 13.323408127738782 - type: nauc_ndcg_at_1_max value: 43.78884661002012 - type: nauc_ndcg_at_1_std value: 25.164417588165673 - type: nauc_ndcg_at_20_diff1 value: 11.463524849646598 - type: nauc_ndcg_at_20_max value: 47.415073186019704 - type: nauc_ndcg_at_20_std value: 26.359019620164307 - type: nauc_ndcg_at_3_diff1 value: 9.689199913805394 - type: nauc_ndcg_at_3_max value: 45.68151849572808 - type: nauc_ndcg_at_3_std value: 26.559193219799486 - type: nauc_ndcg_at_5_diff1 value: 9.448823370356575 - type: nauc_ndcg_at_5_max value: 46.19999662690141 - type: nauc_ndcg_at_5_std value: 26.8411706726069 - type: nauc_precision_at_1000_diff1 value: -20.379065598727024 - type: nauc_precision_at_1000_max value: 13.162562437268427 - type: nauc_precision_at_1000_std value: 22.658226157785812 - type: nauc_precision_at_100_diff1 value: -16.458155977309282 - type: nauc_precision_at_100_max value: 35.97956789169889 - type: nauc_precision_at_100_std value: 48.878375009979194 - type: nauc_precision_at_10_diff1 value: -7.810992317607771 - type: nauc_precision_at_10_max value: 49.307339277444754 - type: nauc_precision_at_10_std value: 42.82533951854582 - type: nauc_precision_at_1_diff1 value: 13.323408127738782 - type: nauc_precision_at_1_max value: 43.78884661002012 - type: nauc_precision_at_1_std value: 25.164417588165673 - type: nauc_precision_at_20_diff1 value: -11.43933465149542 - type: nauc_precision_at_20_max value: 46.93722753460038 - type: nauc_precision_at_20_std value: 47.36223769029678 - type: nauc_precision_at_3_diff1 value: 1.3230178593599737 - type: nauc_precision_at_3_max value: 48.49039534395576 - type: nauc_precision_at_3_std value: 33.161384183129194 - type: nauc_precision_at_5_diff1 value: -3.185516457926519 - type: nauc_precision_at_5_max value: 49.5814309394308 - type: nauc_precision_at_5_std value: 37.57637865900281 - type: nauc_recall_at_1000_diff1 value: 7.839499443984168 - type: nauc_recall_at_1000_max value: 52.67165467640894 - type: nauc_recall_at_1000_std value: 48.85318316702583 - type: nauc_recall_at_100_diff1 value: 14.117557049589418 - type: nauc_recall_at_100_max value: 40.59046301348715 - type: nauc_recall_at_100_std value: 24.379680901739505 - type: nauc_recall_at_10_diff1 value: 20.04536052614054 - type: nauc_recall_at_10_max value: 25.54148839721574 - type: nauc_recall_at_10_std value: -1.938182527562211 - type: nauc_recall_at_1_diff1 value: 37.02663305598722 - type: nauc_recall_at_1_max value: 14.931071531116528 - type: nauc_recall_at_1_std value: -12.478790028708453 - type: nauc_recall_at_20_diff1 value: 17.959977483235566 - type: nauc_recall_at_20_max value: 29.88502687870809 - type: nauc_recall_at_20_std value: 4.26527395196852 - type: nauc_recall_at_3_diff1 value: 26.297810954500456 - type: nauc_recall_at_3_max value: 18.819406079307402 - type: nauc_recall_at_3_std value: -10.002237229729081 - type: nauc_recall_at_5_diff1 value: 22.739080899568485 - type: nauc_recall_at_5_max value: 21.0322968243985 - type: nauc_recall_at_5_std value: -6.927749435306422 - type: ndcg_at_1 value: 34.717999999999996 - type: ndcg_at_10 value: 34.772999999999996 - type: ndcg_at_100 value: 39.407 - type: ndcg_at_1000 value: 44.830999999999996 - type: ndcg_at_20 value: 35.667 - type: ndcg_at_3 value: 34.332 - type: ndcg_at_5 value: 34.408 - type: precision_at_1 value: 34.717999999999996 - type: precision_at_10 value: 23.430999999999997 - type: precision_at_100 value: 9.31 - type: precision_at_1000 value: 2.259 - type: precision_at_20 value: 18.826999999999998 - type: precision_at_3 value: 30.553 - type: precision_at_5 value: 27.792 - type: recall_at_1 value: 7.2620000000000005 - type: recall_at_10 value: 26.384 - type: recall_at_100 value: 52.506 - type: recall_at_1000 value: 73.38 - type: recall_at_20 value: 34.032000000000004 - type: recall_at_3 value: 14.821000000000002 - type: recall_at_5 value: 19.481 - task: type: Retrieval dataset: name: MTEB XMarket (de) type: jinaai/xmarket_ml config: de split: test revision: dfe57acff5b62c23732a7b7d3e3fb84ff501708b metrics: - type: main_score value: 28.316000000000003 - type: map_at_1 value: 8.667 - type: map_at_10 value: 17.351 - type: map_at_100 value: 21.02 - type: map_at_1000 value: 21.951 - type: map_at_20 value: 18.994 - type: map_at_3 value: 13.23 - type: map_at_5 value: 15.17 - type: mrr_at_1 value: 27.27272727272727 - type: mrr_at_10 value: 36.10858487561485 - type: mrr_at_100 value: 36.92033814316568 - type: mrr_at_1000 value: 36.972226653870365 - type: mrr_at_20 value: 36.58914906427944 - type: mrr_at_3 value: 33.642969201552305 - type: mrr_at_5 value: 35.13417554289494 - type: nauc_map_at_1000_diff1 value: 23.345116790998063 - type: nauc_map_at_1000_max value: 44.447240670835725 - type: nauc_map_at_1000_std value: 18.34636500680144 - type: nauc_map_at_100_diff1 value: 24.458120909292347 - type: nauc_map_at_100_max value: 43.31851431140378 - type: nauc_map_at_100_std value: 15.654778355549965 - type: nauc_map_at_10_diff1 value: 29.376508937265044 - type: nauc_map_at_10_max value: 36.650196725140795 - type: nauc_map_at_10_std value: 4.682465435374843 - type: nauc_map_at_1_diff1 value: 40.382365672683214 - type: nauc_map_at_1_max value: 22.894341150096785 - type: nauc_map_at_1_std value: -5.610725673968323 - type: nauc_map_at_20_diff1 value: 27.197033425732908 - type: nauc_map_at_20_max value: 39.71672400647207 - type: nauc_map_at_20_std value: 8.944436813309933 - type: nauc_map_at_3_diff1 value: 34.49739294661502 - type: nauc_map_at_3_max value: 29.006972420735284 - type: nauc_map_at_3_std value: -3.0372650571243986 - type: nauc_map_at_5_diff1 value: 32.764901537277105 - type: nauc_map_at_5_max value: 32.658533295918154 - type: nauc_map_at_5_std value: 0.029626452286996906 - type: nauc_mrr_at_1000_diff1 value: 19.521229956280603 - type: nauc_mrr_at_1000_max value: 44.39409866211472 - type: nauc_mrr_at_1000_std value: 23.580697307036058 - type: nauc_mrr_at_100_diff1 value: 19.51312676591073 - type: nauc_mrr_at_100_max value: 44.39559153963895 - type: nauc_mrr_at_100_std value: 23.57913711397437 - type: nauc_mrr_at_10_diff1 value: 19.584635617935145 - type: nauc_mrr_at_10_max value: 44.44842226236198 - type: nauc_mrr_at_10_std value: 23.382684909390434 - type: nauc_mrr_at_1_diff1 value: 20.92594790923806 - type: nauc_mrr_at_1_max value: 40.593939625252816 - type: nauc_mrr_at_1_std value: 20.37467598073644 - type: nauc_mrr_at_20_diff1 value: 19.590641822115725 - type: nauc_mrr_at_20_max value: 44.42512299604718 - type: nauc_mrr_at_20_std value: 23.45564260800024 - type: nauc_mrr_at_3_diff1 value: 20.005307129527232 - type: nauc_mrr_at_3_max value: 43.68300366192776 - type: nauc_mrr_at_3_std value: 22.297190480842005 - type: nauc_mrr_at_5_diff1 value: 19.852896386271716 - type: nauc_mrr_at_5_max value: 44.20641808920062 - type: nauc_mrr_at_5_std value: 22.966517330852895 - type: nauc_ndcg_at_1000_diff1 value: 17.800116251376103 - type: nauc_ndcg_at_1000_max value: 50.98332718061365 - type: nauc_ndcg_at_1000_std value: 31.464484658102577 - type: nauc_ndcg_at_100_diff1 value: 19.555159680541088 - type: nauc_ndcg_at_100_max value: 48.56377130899141 - type: nauc_ndcg_at_100_std value: 25.77572748714817 - type: nauc_ndcg_at_10_diff1 value: 20.003008726679415 - type: nauc_ndcg_at_10_max value: 45.1293725480628 - type: nauc_ndcg_at_10_std value: 21.149213260765872 - type: nauc_ndcg_at_1_diff1 value: 21.00986278773023 - type: nauc_ndcg_at_1_max value: 40.524637076774894 - type: nauc_ndcg_at_1_std value: 20.29682194006685 - type: nauc_ndcg_at_20_diff1 value: 20.659734137312284 - type: nauc_ndcg_at_20_max value: 45.73108736599869 - type: nauc_ndcg_at_20_std value: 21.200736170346133 - type: nauc_ndcg_at_3_diff1 value: 19.200120542882544 - type: nauc_ndcg_at_3_max value: 42.89772612963168 - type: nauc_ndcg_at_3_std value: 20.713292754978983 - type: nauc_ndcg_at_5_diff1 value: 19.96329647992544 - type: nauc_ndcg_at_5_max value: 44.296627037787324 - type: nauc_ndcg_at_5_std value: 21.200135784971973 - type: nauc_precision_at_1000_diff1 value: -11.543221249009427 - type: nauc_precision_at_1000_max value: 9.132801614448221 - type: nauc_precision_at_1000_std value: 21.203720655381055 - type: nauc_precision_at_100_diff1 value: -12.510945425786039 - type: nauc_precision_at_100_max value: 31.42530963666252 - type: nauc_precision_at_100_std value: 44.99672783467617 - type: nauc_precision_at_10_diff1 value: -4.025802651746804 - type: nauc_precision_at_10_max value: 47.50967924227793 - type: nauc_precision_at_10_std value: 41.1558559268985 - type: nauc_precision_at_1_diff1 value: 21.00986278773023 - type: nauc_precision_at_1_max value: 40.524637076774894 - type: nauc_precision_at_1_std value: 20.29682194006685 - type: nauc_precision_at_20_diff1 value: -8.059482951110002 - type: nauc_precision_at_20_max value: 44.28832115946278 - type: nauc_precision_at_20_std value: 45.2005585353651 - type: nauc_precision_at_3_diff1 value: 8.53530005716248 - type: nauc_precision_at_3_max value: 46.48353678905102 - type: nauc_precision_at_3_std value: 28.868791323881972 - type: nauc_precision_at_5_diff1 value: 3.093619954821814 - type: nauc_precision_at_5_max value: 48.43294475817019 - type: nauc_precision_at_5_std value: 34.83430452745434 - type: nauc_recall_at_1000_diff1 value: 9.93680206699751 - type: nauc_recall_at_1000_max value: 52.97840222394363 - type: nauc_recall_at_1000_std value: 46.370023604436255 - type: nauc_recall_at_100_diff1 value: 14.100542445524972 - type: nauc_recall_at_100_max value: 42.853775131475224 - type: nauc_recall_at_100_std value: 26.93029971231028 - type: nauc_recall_at_10_diff1 value: 22.774547475714716 - type: nauc_recall_at_10_max value: 33.984586405015044 - type: nauc_recall_at_10_std value: 5.332325172373655 - type: nauc_recall_at_1_diff1 value: 40.382365672683214 - type: nauc_recall_at_1_max value: 22.894341150096785 - type: nauc_recall_at_1_std value: -5.610725673968323 - type: nauc_recall_at_20_diff1 value: 19.751060483835936 - type: nauc_recall_at_20_max value: 36.18774034635102 - type: nauc_recall_at_20_std value: 10.362242090308577 - type: nauc_recall_at_3_diff1 value: 30.29462372902671 - type: nauc_recall_at_3_max value: 27.377175450099635 - type: nauc_recall_at_3_std value: -3.015752705993425 - type: nauc_recall_at_5_diff1 value: 28.096893312615723 - type: nauc_recall_at_5_max value: 30.485075571512425 - type: nauc_recall_at_5_std value: 0.09106417003502826 - type: ndcg_at_1 value: 27.248 - type: ndcg_at_10 value: 28.316000000000003 - type: ndcg_at_100 value: 33.419 - type: ndcg_at_1000 value: 38.134 - type: ndcg_at_20 value: 29.707 - type: ndcg_at_3 value: 26.93 - type: ndcg_at_5 value: 27.363 - type: precision_at_1 value: 27.248 - type: precision_at_10 value: 15.073 - type: precision_at_100 value: 5.061 - type: precision_at_1000 value: 1.325 - type: precision_at_20 value: 11.407 - type: precision_at_3 value: 21.823 - type: precision_at_5 value: 18.984 - type: recall_at_1 value: 8.667 - type: recall_at_10 value: 26.984 - type: recall_at_100 value: 49.753 - type: recall_at_1000 value: 70.354 - type: recall_at_20 value: 33.955999999999996 - type: recall_at_3 value: 16.086 - type: recall_at_5 value: 20.544999999999998 - task: type: Retrieval dataset: name: MTEB XMarket (es) type: jinaai/xmarket_ml config: es split: test revision: dfe57acff5b62c23732a7b7d3e3fb84ff501708b metrics: - type: main_score value: 26.592 - type: map_at_1 value: 8.081000000000001 - type: map_at_10 value: 16.486 - type: map_at_100 value: 19.996 - type: map_at_1000 value: 20.889 - type: map_at_20 value: 18.088 - type: map_at_3 value: 12.864 - type: map_at_5 value: 14.515 - type: mrr_at_1 value: 24.643356643356643 - type: mrr_at_10 value: 33.755599955599926 - type: mrr_at_100 value: 34.55914769326114 - type: mrr_at_1000 value: 34.614384237219745 - type: mrr_at_20 value: 34.228909650276194 - type: mrr_at_3 value: 31.445221445221456 - type: mrr_at_5 value: 32.71375291375297 - type: nauc_map_at_1000_diff1 value: 19.17751654240679 - type: nauc_map_at_1000_max value: 43.493743561136434 - type: nauc_map_at_1000_std value: 21.14477911550252 - type: nauc_map_at_100_diff1 value: 20.259227234415395 - type: nauc_map_at_100_max value: 42.510860292169106 - type: nauc_map_at_100_std value: 18.63085160442346 - type: nauc_map_at_10_diff1 value: 24.12419385640694 - type: nauc_map_at_10_max value: 35.99892932069915 - type: nauc_map_at_10_std value: 8.488520124325058 - type: nauc_map_at_1_diff1 value: 35.09239143996649 - type: nauc_map_at_1_max value: 23.72498533914286 - type: nauc_map_at_1_std value: -4.164387883546102 - type: nauc_map_at_20_diff1 value: 22.411418237320817 - type: nauc_map_at_20_max value: 39.12496266094892 - type: nauc_map_at_20_std value: 12.371656353894227 - type: nauc_map_at_3_diff1 value: 28.106972376813506 - type: nauc_map_at_3_max value: 29.57824316865409 - type: nauc_map_at_3_std value: 1.8928791254813127 - type: nauc_map_at_5_diff1 value: 26.4958239149419 - type: nauc_map_at_5_max value: 32.45906016649239 - type: nauc_map_at_5_std value: 4.612735963224018 - type: nauc_mrr_at_1000_diff1 value: 17.614812607094446 - type: nauc_mrr_at_1000_max value: 41.13031556228715 - type: nauc_mrr_at_1000_std value: 22.564112871230318 - type: nauc_mrr_at_100_diff1 value: 17.614044568011085 - type: nauc_mrr_at_100_max value: 41.129436273086796 - type: nauc_mrr_at_100_std value: 22.566763500658766 - type: nauc_mrr_at_10_diff1 value: 17.61869494452089 - type: nauc_mrr_at_10_max value: 41.091542329381426 - type: nauc_mrr_at_10_std value: 22.370473458633594 - type: nauc_mrr_at_1_diff1 value: 20.321421442201913 - type: nauc_mrr_at_1_max value: 38.36531448180009 - type: nauc_mrr_at_1_std value: 18.422203207777688 - type: nauc_mrr_at_20_diff1 value: 17.614767736091625 - type: nauc_mrr_at_20_max value: 41.11221420736687 - type: nauc_mrr_at_20_std value: 22.44271891522012 - type: nauc_mrr_at_3_diff1 value: 17.98184651584625 - type: nauc_mrr_at_3_max value: 40.424293610470144 - type: nauc_mrr_at_3_std value: 21.554750947206706 - type: nauc_mrr_at_5_diff1 value: 17.72088314927416 - type: nauc_mrr_at_5_max value: 40.662724739072694 - type: nauc_mrr_at_5_std value: 21.822957528431928 - type: nauc_ndcg_at_1000_diff1 value: 15.310699428328398 - type: nauc_ndcg_at_1000_max value: 48.83921393349997 - type: nauc_ndcg_at_1000_std value: 32.22600294110774 - type: nauc_ndcg_at_100_diff1 value: 16.62672763977423 - type: nauc_ndcg_at_100_max value: 47.36060653537392 - type: nauc_ndcg_at_100_std value: 27.879865162871575 - type: nauc_ndcg_at_10_diff1 value: 16.436684176028116 - type: nauc_ndcg_at_10_max value: 43.00026520872974 - type: nauc_ndcg_at_10_std value: 22.507354939162806 - type: nauc_ndcg_at_1_diff1 value: 20.321421442201913 - type: nauc_ndcg_at_1_max value: 38.36531448180009 - type: nauc_ndcg_at_1_std value: 18.422203207777688 - type: nauc_ndcg_at_20_diff1 value: 17.127747123248835 - type: nauc_ndcg_at_20_max value: 44.57322943752733 - type: nauc_ndcg_at_20_std value: 23.146541187377036 - type: nauc_ndcg_at_3_diff1 value: 16.372742984728514 - type: nauc_ndcg_at_3_max value: 40.91938017883993 - type: nauc_ndcg_at_3_std value: 21.50917089194154 - type: nauc_ndcg_at_5_diff1 value: 16.40486505525073 - type: nauc_ndcg_at_5_max value: 41.94597203181329 - type: nauc_ndcg_at_5_std value: 22.068260809047562 - type: nauc_precision_at_1000_diff1 value: -15.9415313729527 - type: nauc_precision_at_1000_max value: 12.653329948983643 - type: nauc_precision_at_1000_std value: 26.371820703256173 - type: nauc_precision_at_100_diff1 value: -11.851070166675289 - type: nauc_precision_at_100_max value: 32.164365923950115 - type: nauc_precision_at_100_std value: 45.930226426725426 - type: nauc_precision_at_10_diff1 value: -3.1352660378259163 - type: nauc_precision_at_10_max value: 45.48359878733272 - type: nauc_precision_at_10_std value: 40.2917038044196 - type: nauc_precision_at_1_diff1 value: 20.321421442201913 - type: nauc_precision_at_1_max value: 38.36531448180009 - type: nauc_precision_at_1_std value: 18.422203207777688 - type: nauc_precision_at_20_diff1 value: -7.087513342144751 - type: nauc_precision_at_20_max value: 43.66272019058357 - type: nauc_precision_at_20_std value: 44.22863351071686 - type: nauc_precision_at_3_diff1 value: 7.836185032609045 - type: nauc_precision_at_3_max value: 44.85412904097269 - type: nauc_precision_at_3_std value: 30.209139149500057 - type: nauc_precision_at_5_diff1 value: 3.028150537253791 - type: nauc_precision_at_5_max value: 45.73661708882973 - type: nauc_precision_at_5_std value: 34.65500311185052 - type: nauc_recall_at_1000_diff1 value: 9.526124668370704 - type: nauc_recall_at_1000_max value: 51.4190208452196 - type: nauc_recall_at_1000_std value: 45.694891695646426 - type: nauc_recall_at_100_diff1 value: 12.68466215400009 - type: nauc_recall_at_100_max value: 42.79112054268112 - type: nauc_recall_at_100_std value: 28.61954251400998 - type: nauc_recall_at_10_diff1 value: 17.95124413416829 - type: nauc_recall_at_10_max value: 33.1192036755167 - type: nauc_recall_at_10_std value: 9.3588175959525 - type: nauc_recall_at_1_diff1 value: 35.09239143996649 - type: nauc_recall_at_1_max value: 23.72498533914286 - type: nauc_recall_at_1_std value: -4.164387883546102 - type: nauc_recall_at_20_diff1 value: 16.24916980445646 - type: nauc_recall_at_20_max value: 36.51316122236076 - type: nauc_recall_at_20_std value: 13.641588062425736 - type: nauc_recall_at_3_diff1 value: 23.263199724138786 - type: nauc_recall_at_3_max value: 27.67354561610614 - type: nauc_recall_at_3_std value: 3.103127242654415 - type: nauc_recall_at_5_diff1 value: 20.719704839229635 - type: nauc_recall_at_5_max value: 29.66480839111333 - type: nauc_recall_at_5_std value: 5.514884455797986 - type: ndcg_at_1 value: 24.643 - type: ndcg_at_10 value: 26.592 - type: ndcg_at_100 value: 31.887 - type: ndcg_at_1000 value: 36.695 - type: ndcg_at_20 value: 28.166000000000004 - type: ndcg_at_3 value: 25.238 - type: ndcg_at_5 value: 25.545 - type: precision_at_1 value: 24.643 - type: precision_at_10 value: 13.730999999999998 - type: precision_at_100 value: 4.744000000000001 - type: precision_at_1000 value: 1.167 - type: precision_at_20 value: 10.562000000000001 - type: precision_at_3 value: 20.288999999999998 - type: precision_at_5 value: 17.337 - type: recall_at_1 value: 8.081000000000001 - type: recall_at_10 value: 25.911 - type: recall_at_100 value: 48.176 - type: recall_at_1000 value: 69.655 - type: recall_at_20 value: 32.924 - type: recall_at_3 value: 16.125 - type: recall_at_5 value: 19.988 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (deu-deu) type: jinaai/xpqa config: deu-deu split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 84.552 - type: map_at_1 value: 59.023 - type: map_at_10 value: 81.051 - type: map_at_100 value: 81.539 - type: map_at_1000 value: 81.54299999999999 - type: map_at_20 value: 81.401 - type: map_at_3 value: 76.969 - type: map_at_5 value: 80.07600000000001 - type: mrr_at_1 value: 77.67624020887729 - type: mrr_at_10 value: 83.30509967259314 - type: mrr_at_100 value: 83.58599391639456 - type: mrr_at_1000 value: 83.58970114722587 - type: mrr_at_20 value: 83.50275980440317 - type: mrr_at_3 value: 82.07136640557006 - type: mrr_at_5 value: 82.94604003481287 - type: nauc_map_at_1000_diff1 value: 63.12885104269942 - type: nauc_map_at_1000_max value: 57.7017996674959 - type: nauc_map_at_1000_std value: -24.951068985070513 - type: nauc_map_at_100_diff1 value: 63.12866509393162 - type: nauc_map_at_100_max value: 57.70176426013332 - type: nauc_map_at_100_std value: -24.96012290790273 - type: nauc_map_at_10_diff1 value: 62.847709436211204 - type: nauc_map_at_10_max value: 57.408873624779524 - type: nauc_map_at_10_std value: -25.635130363219062 - type: nauc_map_at_1_diff1 value: 71.89683981857102 - type: nauc_map_at_1_max value: 20.204460967432645 - type: nauc_map_at_1_std value: -23.07894656629493 - type: nauc_map_at_20_diff1 value: 63.00504457011043 - type: nauc_map_at_20_max value: 57.66009512514262 - type: nauc_map_at_20_std value: -25.100138593754885 - type: nauc_map_at_3_diff1 value: 63.199874607788274 - type: nauc_map_at_3_max value: 47.54482033763308 - type: nauc_map_at_3_std value: -27.714557098916963 - type: nauc_map_at_5_diff1 value: 63.01006523518669 - type: nauc_map_at_5_max value: 56.501965964288495 - type: nauc_map_at_5_std value: -25.367825762790925 - type: nauc_mrr_at_1000_diff1 value: 66.24988063948112 - type: nauc_mrr_at_1000_max value: 63.56921667744273 - type: nauc_mrr_at_1000_std value: -22.073973768031863 - type: nauc_mrr_at_100_diff1 value: 66.24919554296275 - type: nauc_mrr_at_100_max value: 63.57382447608361 - type: nauc_mrr_at_100_std value: -22.084627248538187 - type: nauc_mrr_at_10_diff1 value: 66.0143885124066 - type: nauc_mrr_at_10_max value: 63.51277586011898 - type: nauc_mrr_at_10_std value: -22.477523960705454 - type: nauc_mrr_at_1_diff1 value: 68.25415199323474 - type: nauc_mrr_at_1_max value: 63.069019003272416 - type: nauc_mrr_at_1_std value: -18.77085924093244 - type: nauc_mrr_at_20_diff1 value: 66.16203167351055 - type: nauc_mrr_at_20_max value: 63.607477776215845 - type: nauc_mrr_at_20_std value: -22.15083176017266 - type: nauc_mrr_at_3_diff1 value: 66.39368842782302 - type: nauc_mrr_at_3_max value: 63.11411066585295 - type: nauc_mrr_at_3_std value: -22.63174342814071 - type: nauc_mrr_at_5_diff1 value: 66.17932562332354 - type: nauc_mrr_at_5_max value: 63.70434825329594 - type: nauc_mrr_at_5_std value: -21.704012812430438 - type: nauc_ndcg_at_1000_diff1 value: 63.958010361549356 - type: nauc_ndcg_at_1000_max value: 60.516445000134624 - type: nauc_ndcg_at_1000_std value: -24.264672248289923 - type: nauc_ndcg_at_100_diff1 value: 63.97654644758022 - type: nauc_ndcg_at_100_max value: 60.62187552803407 - type: nauc_ndcg_at_100_std value: -24.317149225778312 - type: nauc_ndcg_at_10_diff1 value: 62.505321221321566 - type: nauc_ndcg_at_10_max value: 59.77891112351258 - type: nauc_ndcg_at_10_std value: -26.90910005589911 - type: nauc_ndcg_at_1_diff1 value: 68.25415199323474 - type: nauc_ndcg_at_1_max value: 63.069019003272416 - type: nauc_ndcg_at_1_std value: -18.77085924093244 - type: nauc_ndcg_at_20_diff1 value: 63.04281805056225 - type: nauc_ndcg_at_20_max value: 60.600957307444226 - type: nauc_ndcg_at_20_std value: -24.954862079889203 - type: nauc_ndcg_at_3_diff1 value: 62.970441139740316 - type: nauc_ndcg_at_3_max value: 57.543715669055295 - type: nauc_ndcg_at_3_std value: -25.659388431714703 - type: nauc_ndcg_at_5_diff1 value: 62.82652127664541 - type: nauc_ndcg_at_5_max value: 58.6970443258532 - type: nauc_ndcg_at_5_std value: -25.66329354851023 - type: nauc_precision_at_1000_diff1 value: -33.38530947486223 - type: nauc_precision_at_1000_max value: 25.972468024345414 - type: nauc_precision_at_1000_std value: 17.460222955117978 - type: nauc_precision_at_100_diff1 value: -32.45175999251703 - type: nauc_precision_at_100_max value: 26.367996120487337 - type: nauc_precision_at_100_std value: 17.097957946391208 - type: nauc_precision_at_10_diff1 value: -26.97411235289487 - type: nauc_precision_at_10_max value: 31.504961687240762 - type: nauc_precision_at_10_std value: 11.125341183874687 - type: nauc_precision_at_1_diff1 value: 68.25415199323474 - type: nauc_precision_at_1_max value: 63.069019003272416 - type: nauc_precision_at_1_std value: -18.77085924093244 - type: nauc_precision_at_20_diff1 value: -29.8678078736273 - type: nauc_precision_at_20_max value: 29.031222186584504 - type: nauc_precision_at_20_std value: 14.943600563087928 - type: nauc_precision_at_3_diff1 value: -15.92947221299854 - type: nauc_precision_at_3_max value: 37.73833494235097 - type: nauc_precision_at_3_std value: 3.1573228443500847 - type: nauc_precision_at_5_diff1 value: -22.269156821101642 - type: nauc_precision_at_5_max value: 35.65821838116355 - type: nauc_precision_at_5_std value: 9.265930386198972 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 66.17058859539249 - type: nauc_recall_at_100_max value: 78.066942935192 - type: nauc_recall_at_100_std value: -22.213377762074686 - type: nauc_recall_at_10_diff1 value: 50.82149700700275 - type: nauc_recall_at_10_max value: 56.68053325008221 - type: nauc_recall_at_10_std value: -41.81657941433277 - type: nauc_recall_at_1_diff1 value: 71.89683981857102 - type: nauc_recall_at_1_max value: 20.204460967432645 - type: nauc_recall_at_1_std value: -23.07894656629493 - type: nauc_recall_at_20_diff1 value: 48.28076011857885 - type: nauc_recall_at_20_max value: 63.29641555519295 - type: nauc_recall_at_20_std value: -32.953559708819405 - type: nauc_recall_at_3_diff1 value: 58.15516956312558 - type: nauc_recall_at_3_max value: 42.66315890283056 - type: nauc_recall_at_3_std value: -32.16572530544806 - type: nauc_recall_at_5_diff1 value: 55.900844052439766 - type: nauc_recall_at_5_max value: 55.23702018862884 - type: nauc_recall_at_5_std value: -30.105929528165 - type: ndcg_at_1 value: 77.676 - type: ndcg_at_10 value: 84.552 - type: ndcg_at_100 value: 86.232 - type: ndcg_at_1000 value: 86.33800000000001 - type: ndcg_at_20 value: 85.515 - type: ndcg_at_3 value: 81.112 - type: ndcg_at_5 value: 82.943 - type: precision_at_1 value: 77.676 - type: precision_at_10 value: 15.17 - type: precision_at_100 value: 1.6230000000000002 - type: precision_at_1000 value: 0.163 - type: precision_at_20 value: 7.858999999999999 - type: precision_at_3 value: 42.994 - type: precision_at_5 value: 28.747 - type: recall_at_1 value: 59.023 - type: recall_at_10 value: 92.465 - type: recall_at_100 value: 99.18400000000001 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 95.844 - type: recall_at_3 value: 81.826 - type: recall_at_5 value: 88.22 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (deu-eng) type: jinaai/xpqa config: deu-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 82.149 - type: map_at_1 value: 56.277 - type: map_at_10 value: 78.36999999999999 - type: map_at_100 value: 78.94 - type: map_at_1000 value: 78.95 - type: map_at_20 value: 78.818 - type: map_at_3 value: 74.25 - type: map_at_5 value: 77.11099999999999 - type: mrr_at_1 value: 74.28198433420366 - type: mrr_at_10 value: 80.57487877657589 - type: mrr_at_100 value: 80.94025764149008 - type: mrr_at_1000 value: 80.94608738871234 - type: mrr_at_20 value: 80.86240675885023 - type: mrr_at_3 value: 79.4604003481288 - type: mrr_at_5 value: 80.10008703220191 - type: nauc_map_at_1000_diff1 value: 60.44369249057189 - type: nauc_map_at_1000_max value: 49.822240441830246 - type: nauc_map_at_1000_std value: -27.34026380762817 - type: nauc_map_at_100_diff1 value: 60.44635668050401 - type: nauc_map_at_100_max value: 49.838675926660684 - type: nauc_map_at_100_std value: -27.310365556055583 - type: nauc_map_at_10_diff1 value: 60.18546951726522 - type: nauc_map_at_10_max value: 49.72075398096832 - type: nauc_map_at_10_std value: -27.86056102461558 - type: nauc_map_at_1_diff1 value: 71.2906657099758 - type: nauc_map_at_1_max value: 18.970399251589 - type: nauc_map_at_1_std value: -27.260776614286602 - type: nauc_map_at_20_diff1 value: 60.3525975566164 - type: nauc_map_at_20_max value: 49.852487866710646 - type: nauc_map_at_20_std value: -27.305173830170332 - type: nauc_map_at_3_diff1 value: 60.66803500571236 - type: nauc_map_at_3_max value: 41.18191941521972 - type: nauc_map_at_3_std value: -28.71383593401732 - type: nauc_map_at_5_diff1 value: 60.57216514504887 - type: nauc_map_at_5_max value: 47.99837400446299 - type: nauc_map_at_5_std value: -28.756183015949986 - type: nauc_mrr_at_1000_diff1 value: 63.77031955602516 - type: nauc_mrr_at_1000_max value: 54.26907383811417 - type: nauc_mrr_at_1000_std value: -26.227442087164714 - type: nauc_mrr_at_100_diff1 value: 63.77196650108669 - type: nauc_mrr_at_100_max value: 54.281801457913126 - type: nauc_mrr_at_100_std value: -26.216077891830793 - type: nauc_mrr_at_10_diff1 value: 63.50095284903051 - type: nauc_mrr_at_10_max value: 54.3186301730016 - type: nauc_mrr_at_10_std value: -26.29570241722173 - type: nauc_mrr_at_1_diff1 value: 65.15855770999057 - type: nauc_mrr_at_1_max value: 53.213286738515066 - type: nauc_mrr_at_1_std value: -24.683178252901943 - type: nauc_mrr_at_20_diff1 value: 63.74936550280859 - type: nauc_mrr_at_20_max value: 54.355343751439065 - type: nauc_mrr_at_20_std value: -26.197316900009817 - type: nauc_mrr_at_3_diff1 value: 63.912612979082695 - type: nauc_mrr_at_3_max value: 53.75399024225975 - type: nauc_mrr_at_3_std value: -27.194143264554675 - type: nauc_mrr_at_5_diff1 value: 63.72491059053639 - type: nauc_mrr_at_5_max value: 53.66107604019352 - type: nauc_mrr_at_5_std value: -26.92281560584754 - type: nauc_ndcg_at_1000_diff1 value: 61.304218998714354 - type: nauc_ndcg_at_1000_max value: 52.409135743660386 - type: nauc_ndcg_at_1000_std value: -26.539796489464056 - type: nauc_ndcg_at_100_diff1 value: 61.40355045085304 - type: nauc_ndcg_at_100_max value: 52.79402259608008 - type: nauc_ndcg_at_100_std value: -25.927273456979965 - type: nauc_ndcg_at_10_diff1 value: 59.93675608684116 - type: nauc_ndcg_at_10_max value: 52.617848197542706 - type: nauc_ndcg_at_10_std value: -27.314820020095887 - type: nauc_ndcg_at_1_diff1 value: 65.15855770999057 - type: nauc_ndcg_at_1_max value: 53.213286738515066 - type: nauc_ndcg_at_1_std value: -24.683178252901943 - type: nauc_ndcg_at_20_diff1 value: 60.85093704358376 - type: nauc_ndcg_at_20_max value: 53.14529242671602 - type: nauc_ndcg_at_20_std value: -25.93187916231906 - type: nauc_ndcg_at_3_diff1 value: 60.42301123518882 - type: nauc_ndcg_at_3_max value: 49.59021992975956 - type: nauc_ndcg_at_3_std value: -27.397117967810363 - type: nauc_ndcg_at_5_diff1 value: 60.78655153154219 - type: nauc_ndcg_at_5_max value: 49.54194799556953 - type: nauc_ndcg_at_5_std value: -29.467910172913413 - type: nauc_precision_at_1000_diff1 value: -34.35027108027456 - type: nauc_precision_at_1000_max value: 23.762671066858815 - type: nauc_precision_at_1000_std value: 16.1704780298982 - type: nauc_precision_at_100_diff1 value: -32.66610016754961 - type: nauc_precision_at_100_max value: 25.504044603109588 - type: nauc_precision_at_100_std value: 16.932402988816786 - type: nauc_precision_at_10_diff1 value: -25.720903145017342 - type: nauc_precision_at_10_max value: 30.37029690599926 - type: nauc_precision_at_10_std value: 10.560753160200314 - type: nauc_precision_at_1_diff1 value: 65.15855770999057 - type: nauc_precision_at_1_max value: 53.213286738515066 - type: nauc_precision_at_1_std value: -24.683178252901943 - type: nauc_precision_at_20_diff1 value: -29.577582332619084 - type: nauc_precision_at_20_max value: 27.984145595920417 - type: nauc_precision_at_20_std value: 15.083711704044727 - type: nauc_precision_at_3_diff1 value: -14.736267532892697 - type: nauc_precision_at_3_max value: 36.12211021824307 - type: nauc_precision_at_3_std value: 3.068643876519412 - type: nauc_precision_at_5_diff1 value: -19.846707283120825 - type: nauc_precision_at_5_max value: 33.573804532177896 - type: nauc_precision_at_5_std value: 5.700545622744924 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 68.24749796604452 - type: nauc_recall_at_100_max value: 83.30024864929815 - type: nauc_recall_at_100_std value: 21.23763053711522 - type: nauc_recall_at_10_diff1 value: 50.704049683241436 - type: nauc_recall_at_10_max value: 57.64578984555556 - type: nauc_recall_at_10_std value: -26.632759037746073 - type: nauc_recall_at_1_diff1 value: 71.2906657099758 - type: nauc_recall_at_1_max value: 18.970399251589 - type: nauc_recall_at_1_std value: -27.260776614286602 - type: nauc_recall_at_20_diff1 value: 54.124480837579505 - type: nauc_recall_at_20_max value: 66.4641515433479 - type: nauc_recall_at_20_std value: -14.615911455379393 - type: nauc_recall_at_3_diff1 value: 56.54358788321059 - type: nauc_recall_at_3_max value: 37.765735322465744 - type: nauc_recall_at_3_std value: -30.824147408598574 - type: nauc_recall_at_5_diff1 value: 56.392894535029214 - type: nauc_recall_at_5_max value: 45.959268387521554 - type: nauc_recall_at_5_std value: -33.58175576925282 - type: ndcg_at_1 value: 74.28200000000001 - type: ndcg_at_10 value: 82.149 - type: ndcg_at_100 value: 84.129 - type: ndcg_at_1000 value: 84.307 - type: ndcg_at_20 value: 83.39999999999999 - type: ndcg_at_3 value: 78.583 - type: ndcg_at_5 value: 80.13900000000001 - type: precision_at_1 value: 74.28200000000001 - type: precision_at_10 value: 14.960999999999999 - type: precision_at_100 value: 1.6119999999999999 - type: precision_at_1000 value: 0.163 - type: precision_at_20 value: 7.813000000000001 - type: precision_at_3 value: 41.819 - type: precision_at_5 value: 27.911 - type: recall_at_1 value: 56.277 - type: recall_at_10 value: 90.729 - type: recall_at_100 value: 98.792 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 95.148 - type: recall_at_3 value: 79.989 - type: recall_at_5 value: 85.603 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-deu) type: jinaai/xpqa config: eng-deu split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 60.428000000000004 - type: map_at_1 value: 33.453 - type: map_at_10 value: 54.217000000000006 - type: map_at_100 value: 55.832 - type: map_at_1000 value: 55.884 - type: map_at_20 value: 55.236 - type: map_at_3 value: 48.302 - type: map_at_5 value: 51.902 - type: mrr_at_1 value: 53.916449086161876 - type: mrr_at_10 value: 61.4685647975465 - type: mrr_at_100 value: 62.13718159287348 - type: mrr_at_1000 value: 62.15799113826325 - type: mrr_at_20 value: 61.885388764243544 - type: mrr_at_3 value: 59.44299390774582 - type: mrr_at_5 value: 60.26544821583981 - type: nauc_map_at_1000_diff1 value: 39.824412602121804 - type: nauc_map_at_1000_max value: 39.49332709959374 - type: nauc_map_at_1000_std value: -17.27462623749702 - type: nauc_map_at_100_diff1 value: 39.80528910003463 - type: nauc_map_at_100_max value: 39.51471609156093 - type: nauc_map_at_100_std value: -17.275536933094937 - type: nauc_map_at_10_diff1 value: 39.28558292349772 - type: nauc_map_at_10_max value: 38.13220294838968 - type: nauc_map_at_10_std value: -18.235985574392863 - type: nauc_map_at_1_diff1 value: 43.68892397816937 - type: nauc_map_at_1_max value: 14.478978190224353 - type: nauc_map_at_1_std value: -18.435031919225477 - type: nauc_map_at_20_diff1 value: 39.8733530971344 - type: nauc_map_at_20_max value: 39.30513202591992 - type: nauc_map_at_20_std value: -17.62362848144766 - type: nauc_map_at_3_diff1 value: 40.31116611188815 - type: nauc_map_at_3_max value: 31.107314675202165 - type: nauc_map_at_3_std value: -19.52930881946966 - type: nauc_map_at_5_diff1 value: 39.1241499095765 - type: nauc_map_at_5_max value: 37.330543901034055 - type: nauc_map_at_5_std value: -17.893862772447548 - type: nauc_mrr_at_1000_diff1 value: 43.07490530140024 - type: nauc_mrr_at_1000_max value: 42.28469195779226 - type: nauc_mrr_at_1000_std value: -15.583217110180737 - type: nauc_mrr_at_100_diff1 value: 43.068836494603886 - type: nauc_mrr_at_100_max value: 42.29612450479168 - type: nauc_mrr_at_100_std value: -15.57218089438229 - type: nauc_mrr_at_10_diff1 value: 42.88685919151777 - type: nauc_mrr_at_10_max value: 41.89944452003811 - type: nauc_mrr_at_10_std value: -15.909673572763165 - type: nauc_mrr_at_1_diff1 value: 45.67646898532131 - type: nauc_mrr_at_1_max value: 43.0541870425035 - type: nauc_mrr_at_1_std value: -15.597124291613563 - type: nauc_mrr_at_20_diff1 value: 43.14141873150977 - type: nauc_mrr_at_20_max value: 42.33063543184022 - type: nauc_mrr_at_20_std value: -15.607612016107304 - type: nauc_mrr_at_3_diff1 value: 43.18370928261982 - type: nauc_mrr_at_3_max value: 42.18529980773961 - type: nauc_mrr_at_3_std value: -15.900151400673629 - type: nauc_mrr_at_5_diff1 value: 42.43443044877765 - type: nauc_mrr_at_5_max value: 42.05818605278972 - type: nauc_mrr_at_5_std value: -15.436502733299893 - type: nauc_ndcg_at_1000_diff1 value: 40.60606676178781 - type: nauc_ndcg_at_1000_max value: 41.71923393878376 - type: nauc_ndcg_at_1000_std value: -15.694740326899556 - type: nauc_ndcg_at_100_diff1 value: 40.15270376312309 - type: nauc_ndcg_at_100_max value: 42.234126305709225 - type: nauc_ndcg_at_100_std value: -15.436051984708952 - type: nauc_ndcg_at_10_diff1 value: 39.142259831299455 - type: nauc_ndcg_at_10_max value: 38.61470104273746 - type: nauc_ndcg_at_10_std value: -18.577452829132742 - type: nauc_ndcg_at_1_diff1 value: 45.67646898532131 - type: nauc_ndcg_at_1_max value: 43.0541870425035 - type: nauc_ndcg_at_1_std value: -15.597124291613563 - type: nauc_ndcg_at_20_diff1 value: 40.805159395901306 - type: nauc_ndcg_at_20_max value: 41.58685629374952 - type: nauc_ndcg_at_20_std value: -16.862408156222592 - type: nauc_ndcg_at_3_diff1 value: 39.12028215488432 - type: nauc_ndcg_at_3_max value: 39.70580596343164 - type: nauc_ndcg_at_3_std value: -16.705546903936213 - type: nauc_ndcg_at_5_diff1 value: 38.42075404927361 - type: nauc_ndcg_at_5_max value: 38.064219879504385 - type: nauc_ndcg_at_5_std value: -17.20282111665876 - type: nauc_precision_at_1000_diff1 value: -4.419224540552891 - type: nauc_precision_at_1000_max value: 35.686022591225246 - type: nauc_precision_at_1000_std value: 15.023520191032972 - type: nauc_precision_at_100_diff1 value: -2.9027602601603895 - type: nauc_precision_at_100_max value: 39.99864013028808 - type: nauc_precision_at_100_std value: 13.863497117255525 - type: nauc_precision_at_10_diff1 value: 5.539104839809501 - type: nauc_precision_at_10_max value: 42.41625740557432 - type: nauc_precision_at_10_std value: 1.0894693748662556 - type: nauc_precision_at_1_diff1 value: 45.67646898532131 - type: nauc_precision_at_1_max value: 43.0541870425035 - type: nauc_precision_at_1_std value: -15.597124291613563 - type: nauc_precision_at_20_diff1 value: 4.734562571681868 - type: nauc_precision_at_20_max value: 44.35081213316202 - type: nauc_precision_at_20_std value: 6.642891478284595 - type: nauc_precision_at_3_diff1 value: 13.936559341472101 - type: nauc_precision_at_3_max value: 45.426668552497524 - type: nauc_precision_at_3_std value: -5.219785419247125 - type: nauc_precision_at_5_diff1 value: 8.366706789546015 - type: nauc_precision_at_5_max value: 46.161942989326896 - type: nauc_precision_at_5_std value: -0.193140343545876 - type: nauc_recall_at_1000_diff1 value: 45.61785312444842 - type: nauc_recall_at_1000_max value: 75.68258976531774 - type: nauc_recall_at_1000_std value: 37.469059422121575 - type: nauc_recall_at_100_diff1 value: 26.798748531805096 - type: nauc_recall_at_100_max value: 54.72134095197765 - type: nauc_recall_at_100_std value: -1.5967608233799417 - type: nauc_recall_at_10_diff1 value: 32.13211696200521 - type: nauc_recall_at_10_max value: 31.13866254975895 - type: nauc_recall_at_10_std value: -22.31404161136118 - type: nauc_recall_at_1_diff1 value: 43.68892397816937 - type: nauc_recall_at_1_max value: 14.478978190224353 - type: nauc_recall_at_1_std value: -18.435031919225477 - type: nauc_recall_at_20_diff1 value: 38.597996930461385 - type: nauc_recall_at_20_max value: 42.49849027366794 - type: nauc_recall_at_20_std value: -16.536471900752154 - type: nauc_recall_at_3_diff1 value: 35.343730012759266 - type: nauc_recall_at_3_max value: 26.898722085043392 - type: nauc_recall_at_3_std value: -19.4459792273884 - type: nauc_recall_at_5_diff1 value: 31.8310298012186 - type: nauc_recall_at_5_max value: 32.67800489655844 - type: nauc_recall_at_5_std value: -16.800929103347283 - type: ndcg_at_1 value: 53.916 - type: ndcg_at_10 value: 60.428000000000004 - type: ndcg_at_100 value: 65.95 - type: ndcg_at_1000 value: 66.88 - type: ndcg_at_20 value: 62.989 - type: ndcg_at_3 value: 55.204 - type: ndcg_at_5 value: 56.42700000000001 - type: precision_at_1 value: 53.916 - type: precision_at_10 value: 14.346999999999998 - type: precision_at_100 value: 1.849 - type: precision_at_1000 value: 0.196 - type: precision_at_20 value: 8.022 - type: precision_at_3 value: 34.552 - type: precision_at_5 value: 24.569 - type: recall_at_1 value: 33.453 - type: recall_at_10 value: 71.07900000000001 - type: recall_at_100 value: 93.207 - type: recall_at_1000 value: 99.60799999999999 - type: recall_at_20 value: 79.482 - type: recall_at_3 value: 53.98 - type: recall_at_5 value: 60.781 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-pol) type: jinaai/xpqa config: eng-pol split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 34.042 - type: map_at_1 value: 13.236 - type: map_at_10 value: 27.839999999999996 - type: map_at_100 value: 30.171999999999997 - type: map_at_1000 value: 30.349999999999998 - type: map_at_20 value: 29.044999999999998 - type: map_at_3 value: 22.58 - type: map_at_5 value: 25.83 - type: mrr_at_1 value: 30.318471337579616 - type: mrr_at_10 value: 37.4983823678091 - type: mrr_at_100 value: 38.5784523175009 - type: mrr_at_1000 value: 38.63608698968148 - type: mrr_at_20 value: 38.02996157871825 - type: mrr_at_3 value: 34.798301486199584 - type: mrr_at_5 value: 36.39702760084925 - type: nauc_map_at_1000_diff1 value: 21.07199789609177 - type: nauc_map_at_1000_max value: 25.959233507893277 - type: nauc_map_at_1000_std value: -28.011925372852826 - type: nauc_map_at_100_diff1 value: 21.086788412737548 - type: nauc_map_at_100_max value: 25.8611620203686 - type: nauc_map_at_100_std value: -28.179239912057515 - type: nauc_map_at_10_diff1 value: 21.23841745922078 - type: nauc_map_at_10_max value: 25.44290342378288 - type: nauc_map_at_10_std value: -28.75578689110275 - type: nauc_map_at_1_diff1 value: 28.87454015638211 - type: nauc_map_at_1_max value: 17.50681123879997 - type: nauc_map_at_1_std value: -30.382831850562432 - type: nauc_map_at_20_diff1 value: 21.076559713540455 - type: nauc_map_at_20_max value: 25.538154202494535 - type: nauc_map_at_20_std value: -28.518764617658555 - type: nauc_map_at_3_diff1 value: 22.159185358766468 - type: nauc_map_at_3_max value: 23.01652660927249 - type: nauc_map_at_3_std value: -29.567722713221862 - type: nauc_map_at_5_diff1 value: 21.35578810370897 - type: nauc_map_at_5_max value: 25.550550437767395 - type: nauc_map_at_5_std value: -28.7889035461355 - type: nauc_mrr_at_1000_diff1 value: 22.28633009221923 - type: nauc_mrr_at_1000_max value: 26.920205393136392 - type: nauc_mrr_at_1000_std value: -25.887791634977642 - type: nauc_mrr_at_100_diff1 value: 22.2754975739755 - type: nauc_mrr_at_100_max value: 26.90235716615346 - type: nauc_mrr_at_100_std value: -25.891596020584345 - type: nauc_mrr_at_10_diff1 value: 22.415076305593534 - type: nauc_mrr_at_10_max value: 26.504643796222222 - type: nauc_mrr_at_10_std value: -26.6046081215833 - type: nauc_mrr_at_1_diff1 value: 23.406748619244368 - type: nauc_mrr_at_1_max value: 29.058228240823553 - type: nauc_mrr_at_1_std value: -26.450169820901078 - type: nauc_mrr_at_20_diff1 value: 22.29233141817678 - type: nauc_mrr_at_20_max value: 26.69021351064081 - type: nauc_mrr_at_20_std value: -26.086596227376656 - type: nauc_mrr_at_3_diff1 value: 22.20746187500145 - type: nauc_mrr_at_3_max value: 27.143725946169457 - type: nauc_mrr_at_3_std value: -26.7017708594376 - type: nauc_mrr_at_5_diff1 value: 22.71898965233195 - type: nauc_mrr_at_5_max value: 26.932386658571662 - type: nauc_mrr_at_5_std value: -26.725541058780234 - type: nauc_ndcg_at_1000_diff1 value: 20.541734305148466 - type: nauc_ndcg_at_1000_max value: 27.180534238090758 - type: nauc_ndcg_at_1000_std value: -23.74197745177845 - type: nauc_ndcg_at_100_diff1 value: 20.570052839937468 - type: nauc_ndcg_at_100_max value: 26.21605034405486 - type: nauc_ndcg_at_100_std value: -25.359817188805028 - type: nauc_ndcg_at_10_diff1 value: 21.241423075073467 - type: nauc_ndcg_at_10_max value: 24.599199195239475 - type: nauc_ndcg_at_10_std value: -28.404540333309008 - type: nauc_ndcg_at_1_diff1 value: 23.406748619244368 - type: nauc_ndcg_at_1_max value: 29.058228240823553 - type: nauc_ndcg_at_1_std value: -26.450169820901078 - type: nauc_ndcg_at_20_diff1 value: 20.740460046196873 - type: nauc_ndcg_at_20_max value: 24.82380195169634 - type: nauc_ndcg_at_20_std value: -27.376298834244313 - type: nauc_ndcg_at_3_diff1 value: 19.994948682426504 - type: nauc_ndcg_at_3_max value: 26.153790759405105 - type: nauc_ndcg_at_3_std value: -27.194548404540885 - type: nauc_ndcg_at_5_diff1 value: 21.48414272096384 - type: nauc_ndcg_at_5_max value: 25.239652015076373 - type: nauc_ndcg_at_5_std value: -28.2620160957961 - type: nauc_precision_at_1000_diff1 value: -0.7557639926687744 - type: nauc_precision_at_1000_max value: 24.265591636994436 - type: nauc_precision_at_1000_std value: 16.833104654292654 - type: nauc_precision_at_100_diff1 value: 4.647847665941115 - type: nauc_precision_at_100_max value: 24.42192644844434 - type: nauc_precision_at_100_std value: 0.2718848568876648 - type: nauc_precision_at_10_diff1 value: 9.465969286722654 - type: nauc_precision_at_10_max value: 27.448993150448043 - type: nauc_precision_at_10_std value: -16.519099596502212 - type: nauc_precision_at_1_diff1 value: 23.406748619244368 - type: nauc_precision_at_1_max value: 29.058228240823553 - type: nauc_precision_at_1_std value: -26.450169820901078 - type: nauc_precision_at_20_diff1 value: 8.021421615668114 - type: nauc_precision_at_20_max value: 26.18556481398635 - type: nauc_precision_at_20_std value: -12.207152108668367 - type: nauc_precision_at_3_diff1 value: 11.783572803634241 - type: nauc_precision_at_3_max value: 29.259715774978893 - type: nauc_precision_at_3_std value: -20.407524967717425 - type: nauc_precision_at_5_diff1 value: 10.371728615220821 - type: nauc_precision_at_5_max value: 30.270642833482864 - type: nauc_precision_at_5_std value: -18.407334880575494 - type: nauc_recall_at_1000_diff1 value: 6.008969959111555 - type: nauc_recall_at_1000_max value: 39.79691734058127 - type: nauc_recall_at_1000_std value: 32.43591825510109 - type: nauc_recall_at_100_diff1 value: 15.2374566058917 - type: nauc_recall_at_100_max value: 23.058785539503717 - type: nauc_recall_at_100_std value: -15.962888794058165 - type: nauc_recall_at_10_diff1 value: 19.46184821807753 - type: nauc_recall_at_10_max value: 19.001003513986866 - type: nauc_recall_at_10_std value: -27.753332786663876 - type: nauc_recall_at_1_diff1 value: 28.87454015638211 - type: nauc_recall_at_1_max value: 17.50681123879997 - type: nauc_recall_at_1_std value: -30.382831850562432 - type: nauc_recall_at_20_diff1 value: 17.237090858517405 - type: nauc_recall_at_20_max value: 18.42118474134871 - type: nauc_recall_at_20_std value: -24.862787724031957 - type: nauc_recall_at_3_diff1 value: 18.813019521758577 - type: nauc_recall_at_3_max value: 19.198572333053544 - type: nauc_recall_at_3_std value: -28.5644958605618 - type: nauc_recall_at_5_diff1 value: 20.247501986329482 - type: nauc_recall_at_5_max value: 21.121526202170358 - type: nauc_recall_at_5_std value: -27.220378617864853 - type: ndcg_at_1 value: 30.318 - type: ndcg_at_10 value: 34.042 - type: ndcg_at_100 value: 42.733 - type: ndcg_at_1000 value: 46.015 - type: ndcg_at_20 value: 37.053999999999995 - type: ndcg_at_3 value: 29.254 - type: ndcg_at_5 value: 30.514000000000003 - type: precision_at_1 value: 30.318 - type: precision_at_10 value: 10.981 - type: precision_at_100 value: 1.889 - type: precision_at_1000 value: 0.234 - type: precision_at_20 value: 6.643000000000001 - type: precision_at_3 value: 22.166 - type: precision_at_5 value: 17.477999999999998 - type: recall_at_1 value: 13.236 - type: recall_at_10 value: 41.461 - type: recall_at_100 value: 75.008 - type: recall_at_1000 value: 96.775 - type: recall_at_20 value: 50.754 - type: recall_at_3 value: 26.081 - type: recall_at_5 value: 33.168 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-cmn) type: jinaai/xpqa config: eng-cmn split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 37.504 - type: map_at_1 value: 16.019 - type: map_at_10 value: 30.794 - type: map_at_100 value: 33.157 - type: map_at_1000 value: 33.324999999999996 - type: map_at_20 value: 32.161 - type: map_at_3 value: 25.372 - type: map_at_5 value: 28.246 - type: mrr_at_1 value: 30.461165048543688 - type: mrr_at_10 value: 39.393107566651224 - type: mrr_at_100 value: 40.570039540602295 - type: mrr_at_1000 value: 40.6306116407744 - type: mrr_at_20 value: 40.09428159978876 - type: mrr_at_3 value: 37.176375404530745 - type: mrr_at_5 value: 38.09870550161812 - type: nauc_map_at_1000_diff1 value: 30.82306881892873 - type: nauc_map_at_1000_max value: 5.877636000666466 - type: nauc_map_at_1000_std value: -30.7140513386797 - type: nauc_map_at_100_diff1 value: 30.85192449151961 - type: nauc_map_at_100_max value: 5.809195131550909 - type: nauc_map_at_100_std value: -30.838556702972063 - type: nauc_map_at_10_diff1 value: 30.50359163635058 - type: nauc_map_at_10_max value: 6.373491595869303 - type: nauc_map_at_10_std value: -29.89368007827676 - type: nauc_map_at_1_diff1 value: 38.60240510083884 - type: nauc_map_at_1_max value: 10.407392664609139 - type: nauc_map_at_1_std value: -17.76327278732833 - type: nauc_map_at_20_diff1 value: 30.897489125753598 - type: nauc_map_at_20_max value: 5.9303381898248 - type: nauc_map_at_20_std value: -30.863345188760515 - type: nauc_map_at_3_diff1 value: 32.8150951852729 - type: nauc_map_at_3_max value: 7.671931402215177 - type: nauc_map_at_3_std value: -25.654809758216533 - type: nauc_map_at_5_diff1 value: 31.19558194781019 - type: nauc_map_at_5_max value: 6.426885613116939 - type: nauc_map_at_5_std value: -28.609027858850016 - type: nauc_mrr_at_1000_diff1 value: 30.7596332048733 - type: nauc_mrr_at_1000_max value: 1.1970748115580212 - type: nauc_mrr_at_1000_std value: -34.647570668150216 - type: nauc_mrr_at_100_diff1 value: 30.74693370788581 - type: nauc_mrr_at_100_max value: 1.1673272262754841 - type: nauc_mrr_at_100_std value: -34.67761028542745 - type: nauc_mrr_at_10_diff1 value: 30.537820575183076 - type: nauc_mrr_at_10_max value: 1.0261868725502707 - type: nauc_mrr_at_10_std value: -34.999990560631204 - type: nauc_mrr_at_1_diff1 value: 35.51868580113285 - type: nauc_mrr_at_1_max value: 5.117103773147307 - type: nauc_mrr_at_1_std value: -30.633913466736956 - type: nauc_mrr_at_20_diff1 value: 30.67318175430903 - type: nauc_mrr_at_20_max value: 1.0979983974981327 - type: nauc_mrr_at_20_std value: -34.8388339739997 - type: nauc_mrr_at_3_diff1 value: 30.884642006045702 - type: nauc_mrr_at_3_max value: 1.7970996544095983 - type: nauc_mrr_at_3_std value: -34.290172894906085 - type: nauc_mrr_at_5_diff1 value: 30.89687518368571 - type: nauc_mrr_at_5_max value: 1.2123714988495347 - type: nauc_mrr_at_5_std value: -35.01704580471926 - type: nauc_ndcg_at_1000_diff1 value: 29.214476799077342 - type: nauc_ndcg_at_1000_max value: 3.6379035546112872 - type: nauc_ndcg_at_1000_std value: -32.35757522049194 - type: nauc_ndcg_at_100_diff1 value: 29.130004541376298 - type: nauc_ndcg_at_100_max value: 2.9580589185293045 - type: nauc_ndcg_at_100_std value: -33.26884643871724 - type: nauc_ndcg_at_10_diff1 value: 28.521001084366393 - type: nauc_ndcg_at_10_max value: 3.630223957267483 - type: nauc_ndcg_at_10_std value: -33.14524140940815 - type: nauc_ndcg_at_1_diff1 value: 35.51868580113285 - type: nauc_ndcg_at_1_max value: 5.117103773147307 - type: nauc_ndcg_at_1_std value: -30.633913466736956 - type: nauc_ndcg_at_20_diff1 value: 29.194462756848782 - type: nauc_ndcg_at_20_max value: 2.61162903136461 - type: nauc_ndcg_at_20_std value: -34.59161403211834 - type: nauc_ndcg_at_3_diff1 value: 30.183555327135203 - type: nauc_ndcg_at_3_max value: 5.61949040917093 - type: nauc_ndcg_at_3_std value: -30.350117794058175 - type: nauc_ndcg_at_5_diff1 value: 29.74420394139971 - type: nauc_ndcg_at_5_max value: 3.952183813937688 - type: nauc_ndcg_at_5_std value: -31.807833795302038 - type: nauc_precision_at_1000_diff1 value: -5.467049121617333 - type: nauc_precision_at_1000_max value: -3.993986884198271 - type: nauc_precision_at_1000_std value: -13.703967324212224 - type: nauc_precision_at_100_diff1 value: 1.5585428307943647 - type: nauc_precision_at_100_max value: -4.250455723613214 - type: nauc_precision_at_100_std value: -22.294689856776493 - type: nauc_precision_at_10_diff1 value: 11.076036917255259 - type: nauc_precision_at_10_max value: -1.5859394644365377 - type: nauc_precision_at_10_std value: -34.94912594413202 - type: nauc_precision_at_1_diff1 value: 35.51868580113285 - type: nauc_precision_at_1_max value: 5.117103773147307 - type: nauc_precision_at_1_std value: -30.633913466736956 - type: nauc_precision_at_20_diff1 value: 9.311484455773828 - type: nauc_precision_at_20_max value: -3.678383428592432 - type: nauc_precision_at_20_std value: -33.700002761401635 - type: nauc_precision_at_3_diff1 value: 19.2787260874381 - type: nauc_precision_at_3_max value: 0.18292109396940018 - type: nauc_precision_at_3_std value: -35.23939824276542 - type: nauc_precision_at_5_diff1 value: 14.97930592298584 - type: nauc_precision_at_5_max value: -1.63540635880963 - type: nauc_precision_at_5_std value: -35.908283558321315 - type: nauc_recall_at_1000_diff1 value: 26.63056473607804 - type: nauc_recall_at_1000_max value: 62.7304558520689 - type: nauc_recall_at_1000_std value: 58.12421701377561 - type: nauc_recall_at_100_diff1 value: 21.42127379898579 - type: nauc_recall_at_100_max value: 1.4748203516921914 - type: nauc_recall_at_100_std value: -27.56467339041136 - type: nauc_recall_at_10_diff1 value: 21.20479652609812 - type: nauc_recall_at_10_max value: 1.7394881489709888 - type: nauc_recall_at_10_std value: -32.15116902585072 - type: nauc_recall_at_1_diff1 value: 38.60240510083884 - type: nauc_recall_at_1_max value: 10.407392664609139 - type: nauc_recall_at_1_std value: -17.76327278732833 - type: nauc_recall_at_20_diff1 value: 23.049652721582632 - type: nauc_recall_at_20_max value: -1.7715787106286838 - type: nauc_recall_at_20_std value: -36.14203686002867 - type: nauc_recall_at_3_diff1 value: 26.522179829461873 - type: nauc_recall_at_3_max value: 6.078208732431124 - type: nauc_recall_at_3_std value: -25.02625711226274 - type: nauc_recall_at_5_diff1 value: 24.19538553561693 - type: nauc_recall_at_5_max value: 2.4963810785503524 - type: nauc_recall_at_5_std value: -30.449635496921257 - type: ndcg_at_1 value: 30.461 - type: ndcg_at_10 value: 37.504 - type: ndcg_at_100 value: 46.156000000000006 - type: ndcg_at_1000 value: 48.985 - type: ndcg_at_20 value: 41.025 - type: ndcg_at_3 value: 32.165 - type: ndcg_at_5 value: 33.072 - type: precision_at_1 value: 30.461 - type: precision_at_10 value: 11.032 - type: precision_at_100 value: 1.8870000000000002 - type: precision_at_1000 value: 0.22499999999999998 - type: precision_at_20 value: 6.833 - type: precision_at_3 value: 22.532 - type: precision_at_5 value: 16.966 - type: recall_at_1 value: 16.019 - type: recall_at_10 value: 47.557 - type: recall_at_100 value: 80.376 - type: recall_at_1000 value: 98.904 - type: recall_at_20 value: 58.48100000000001 - type: recall_at_3 value: 30.682 - type: recall_at_5 value: 36.714999999999996 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-spa) type: jinaai/xpqa config: eng-spa split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 53.359 - type: map_at_1 value: 22.892000000000003 - type: map_at_10 value: 45.773 - type: map_at_100 value: 47.778999999999996 - type: map_at_1000 value: 47.882999999999996 - type: map_at_20 value: 46.869 - type: map_at_3 value: 37.643 - type: map_at_5 value: 43.120999999999995 - type: mrr_at_1 value: 47.28877679697352 - type: mrr_at_10 value: 56.95890630316857 - type: mrr_at_100 value: 57.71103367009639 - type: mrr_at_1000 value: 57.73661441948852 - type: mrr_at_20 value: 57.37701091311334 - type: mrr_at_3 value: 54.74989491382929 - type: mrr_at_5 value: 56.08659100462372 - type: nauc_map_at_1000_diff1 value: 27.8347129954991 - type: nauc_map_at_1000_max value: 38.04300600762859 - type: nauc_map_at_1000_std value: -18.294653328262868 - type: nauc_map_at_100_diff1 value: 27.818449297770858 - type: nauc_map_at_100_max value: 38.03533462156633 - type: nauc_map_at_100_std value: -18.332989980880644 - type: nauc_map_at_10_diff1 value: 27.520664180018358 - type: nauc_map_at_10_max value: 37.67109855753314 - type: nauc_map_at_10_std value: -18.496721673888683 - type: nauc_map_at_1_diff1 value: 37.56020148060502 - type: nauc_map_at_1_max value: 10.298394230150745 - type: nauc_map_at_1_std value: -20.41359936101547 - type: nauc_map_at_20_diff1 value: 27.615023038189722 - type: nauc_map_at_20_max value: 37.808525116320254 - type: nauc_map_at_20_std value: -18.49235775420803 - type: nauc_map_at_3_diff1 value: 30.797347567428424 - type: nauc_map_at_3_max value: 29.374407828869497 - type: nauc_map_at_3_std value: -19.75905772914969 - type: nauc_map_at_5_diff1 value: 28.431802888884803 - type: nauc_map_at_5_max value: 35.57723911610521 - type: nauc_map_at_5_std value: -19.093588845366824 - type: nauc_mrr_at_1000_diff1 value: 33.263611009054586 - type: nauc_mrr_at_1000_max value: 40.620639901613664 - type: nauc_mrr_at_1000_std value: -17.083016011032036 - type: nauc_mrr_at_100_diff1 value: 33.25375012559163 - type: nauc_mrr_at_100_max value: 40.62376205172005 - type: nauc_mrr_at_100_std value: -17.091930575226684 - type: nauc_mrr_at_10_diff1 value: 33.05787202690095 - type: nauc_mrr_at_10_max value: 40.4516362611674 - type: nauc_mrr_at_10_std value: -17.088910666499892 - type: nauc_mrr_at_1_diff1 value: 36.424151087824555 - type: nauc_mrr_at_1_max value: 40.955715626650445 - type: nauc_mrr_at_1_std value: -16.56636409111209 - type: nauc_mrr_at_20_diff1 value: 33.12029456858138 - type: nauc_mrr_at_20_max value: 40.56409347292635 - type: nauc_mrr_at_20_std value: -17.102034817242068 - type: nauc_mrr_at_3_diff1 value: 33.52377926814156 - type: nauc_mrr_at_3_max value: 40.824911575046876 - type: nauc_mrr_at_3_std value: -16.855935748811092 - type: nauc_mrr_at_5_diff1 value: 33.08646471768442 - type: nauc_mrr_at_5_max value: 40.59323589955881 - type: nauc_mrr_at_5_std value: -16.77829710500156 - type: nauc_ndcg_at_1000_diff1 value: 28.741186244590207 - type: nauc_ndcg_at_1000_max value: 40.0113825410539 - type: nauc_ndcg_at_1000_std value: -17.15655081742458 - type: nauc_ndcg_at_100_diff1 value: 28.680521359782972 - type: nauc_ndcg_at_100_max value: 39.94751899984445 - type: nauc_ndcg_at_100_std value: -17.82813814043932 - type: nauc_ndcg_at_10_diff1 value: 27.22858072673168 - type: nauc_ndcg_at_10_max value: 38.600188968554725 - type: nauc_ndcg_at_10_std value: -18.517203924893614 - type: nauc_ndcg_at_1_diff1 value: 36.424151087824555 - type: nauc_ndcg_at_1_max value: 40.955715626650445 - type: nauc_ndcg_at_1_std value: -16.56636409111209 - type: nauc_ndcg_at_20_diff1 value: 27.56875900623774 - type: nauc_ndcg_at_20_max value: 38.95264310199067 - type: nauc_ndcg_at_20_std value: -18.709973965688445 - type: nauc_ndcg_at_3_diff1 value: 28.682842749851574 - type: nauc_ndcg_at_3_max value: 38.361215408395964 - type: nauc_ndcg_at_3_std value: -16.800291231827515 - type: nauc_ndcg_at_5_diff1 value: 28.178239259093484 - type: nauc_ndcg_at_5_max value: 36.77096292606479 - type: nauc_ndcg_at_5_std value: -18.718861696641145 - type: nauc_precision_at_1000_diff1 value: -7.3686253252869305 - type: nauc_precision_at_1000_max value: 31.98896996987639 - type: nauc_precision_at_1000_std value: 13.125659676392267 - type: nauc_precision_at_100_diff1 value: -2.8239113056969156 - type: nauc_precision_at_100_max value: 36.95062472971812 - type: nauc_precision_at_100_std value: 7.230228733647562 - type: nauc_precision_at_10_diff1 value: 2.5515545798843555 - type: nauc_precision_at_10_max value: 45.46146019314904 - type: nauc_precision_at_10_std value: -1.3249340536211553 - type: nauc_precision_at_1_diff1 value: 36.424151087824555 - type: nauc_precision_at_1_max value: 40.955715626650445 - type: nauc_precision_at_1_std value: -16.56636409111209 - type: nauc_precision_at_20_diff1 value: 0.7202861770489576 - type: nauc_precision_at_20_max value: 41.9937596214609 - type: nauc_precision_at_20_std value: 0.2756400069730064 - type: nauc_precision_at_3_diff1 value: 12.89221206929447 - type: nauc_precision_at_3_max value: 48.57775126381142 - type: nauc_precision_at_3_std value: -8.042242254131068 - type: nauc_precision_at_5_diff1 value: 7.063616193387763 - type: nauc_precision_at_5_max value: 47.26496887331675 - type: nauc_precision_at_5_std value: -4.735805200913049 - type: nauc_recall_at_1000_diff1 value: 2.6650052980682224 - type: nauc_recall_at_1000_max value: 81.94826279951472 - type: nauc_recall_at_1000_std value: 48.46012388224573 - type: nauc_recall_at_100_diff1 value: 24.516371948375827 - type: nauc_recall_at_100_max value: 39.17639620389552 - type: nauc_recall_at_100_std value: -17.884197602579533 - type: nauc_recall_at_10_diff1 value: 19.93892097640112 - type: nauc_recall_at_10_max value: 33.079079440022106 - type: nauc_recall_at_10_std value: -20.22227622801884 - type: nauc_recall_at_1_diff1 value: 37.56020148060502 - type: nauc_recall_at_1_max value: 10.298394230150745 - type: nauc_recall_at_1_std value: -20.41359936101547 - type: nauc_recall_at_20_diff1 value: 20.363784035670633 - type: nauc_recall_at_20_max value: 33.39352971625336 - type: nauc_recall_at_20_std value: -21.712050932168875 - type: nauc_recall_at_3_diff1 value: 26.220072121604655 - type: nauc_recall_at_3_max value: 25.853218030218507 - type: nauc_recall_at_3_std value: -17.830613372910907 - type: nauc_recall_at_5_diff1 value: 22.25850162680252 - type: nauc_recall_at_5_max value: 30.89620539042785 - type: nauc_recall_at_5_std value: -19.16786434439169 - type: ndcg_at_1 value: 47.288999999999994 - type: ndcg_at_10 value: 53.359 - type: ndcg_at_100 value: 60.25899999999999 - type: ndcg_at_1000 value: 61.902 - type: ndcg_at_20 value: 56.025000000000006 - type: ndcg_at_3 value: 47.221999999999994 - type: ndcg_at_5 value: 49.333 - type: precision_at_1 value: 47.288999999999994 - type: precision_at_10 value: 16.003 - type: precision_at_100 value: 2.221 - type: precision_at_1000 value: 0.246 - type: precision_at_20 value: 8.985 - type: precision_at_3 value: 34.510000000000005 - type: precision_at_5 value: 26.961000000000002 - type: recall_at_1 value: 22.892000000000003 - type: recall_at_10 value: 62.928 - type: recall_at_100 value: 89.105 - type: recall_at_1000 value: 99.319 - type: recall_at_20 value: 71.387 - type: recall_at_3 value: 43.492999999999995 - type: recall_at_5 value: 53.529 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (eng-fra) type: jinaai/xpqa config: eng-fra split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 54.888000000000005 - type: map_at_1 value: 26.079 - type: map_at_10 value: 47.434 - type: map_at_100 value: 49.376 - type: map_at_1000 value: 49.461 - type: map_at_20 value: 48.634 - type: map_at_3 value: 40.409 - type: map_at_5 value: 44.531 - type: mrr_at_1 value: 46.86248331108144 - type: mrr_at_10 value: 56.45506177548896 - type: mrr_at_100 value: 57.20360629445577 - type: mrr_at_1000 value: 57.227004696897986 - type: mrr_at_20 value: 56.905302765737865 - type: mrr_at_3 value: 54.09434801958164 - type: mrr_at_5 value: 55.40943480195811 - type: nauc_map_at_1000_diff1 value: 37.739936045535885 - type: nauc_map_at_1000_max value: 35.92625003516368 - type: nauc_map_at_1000_std value: -15.825119611638398 - type: nauc_map_at_100_diff1 value: 37.71697833661983 - type: nauc_map_at_100_max value: 35.91174068136317 - type: nauc_map_at_100_std value: -15.838841891589006 - type: nauc_map_at_10_diff1 value: 37.52309268219689 - type: nauc_map_at_10_max value: 35.4887130483351 - type: nauc_map_at_10_std value: -16.61132378136234 - type: nauc_map_at_1_diff1 value: 42.705087329207984 - type: nauc_map_at_1_max value: 12.047671550242974 - type: nauc_map_at_1_std value: -17.156030827065834 - type: nauc_map_at_20_diff1 value: 37.59446680137666 - type: nauc_map_at_20_max value: 35.80559546695052 - type: nauc_map_at_20_std value: -16.158338316249786 - type: nauc_map_at_3_diff1 value: 38.618415267131816 - type: nauc_map_at_3_max value: 27.030227996183925 - type: nauc_map_at_3_std value: -18.962500694157857 - type: nauc_map_at_5_diff1 value: 37.980845601534256 - type: nauc_map_at_5_max value: 32.82374761283266 - type: nauc_map_at_5_std value: -17.856875825229565 - type: nauc_mrr_at_1000_diff1 value: 40.26059509279346 - type: nauc_mrr_at_1000_max value: 39.28453752990871 - type: nauc_mrr_at_1000_std value: -13.306217279524212 - type: nauc_mrr_at_100_diff1 value: 40.23390833398881 - type: nauc_mrr_at_100_max value: 39.26041461025653 - type: nauc_mrr_at_100_std value: -13.317700798873153 - type: nauc_mrr_at_10_diff1 value: 40.163737640180145 - type: nauc_mrr_at_10_max value: 39.27138538165913 - type: nauc_mrr_at_10_std value: -13.472971360323038 - type: nauc_mrr_at_1_diff1 value: 42.95339241383707 - type: nauc_mrr_at_1_max value: 40.62982307619158 - type: nauc_mrr_at_1_std value: -10.429597045942748 - type: nauc_mrr_at_20_diff1 value: 40.23703505923782 - type: nauc_mrr_at_20_max value: 39.27051308063652 - type: nauc_mrr_at_20_std value: -13.390197643922038 - type: nauc_mrr_at_3_diff1 value: 40.5721313555661 - type: nauc_mrr_at_3_max value: 39.254774354468594 - type: nauc_mrr_at_3_std value: -13.773803807863827 - type: nauc_mrr_at_5_diff1 value: 40.41081287079734 - type: nauc_mrr_at_5_max value: 39.515241132077335 - type: nauc_mrr_at_5_std value: -13.306544090087336 - type: nauc_ndcg_at_1000_diff1 value: 38.04772268296103 - type: nauc_ndcg_at_1000_max value: 38.03364565521176 - type: nauc_ndcg_at_1000_std value: -14.203182726102263 - type: nauc_ndcg_at_100_diff1 value: 37.51752795463643 - type: nauc_ndcg_at_100_max value: 37.809671511710604 - type: nauc_ndcg_at_100_std value: -13.880578225081408 - type: nauc_ndcg_at_10_diff1 value: 36.78438984005559 - type: nauc_ndcg_at_10_max value: 36.98105155993232 - type: nauc_ndcg_at_10_std value: -16.886308645939113 - type: nauc_ndcg_at_1_diff1 value: 42.95339241383707 - type: nauc_ndcg_at_1_max value: 40.62982307619158 - type: nauc_ndcg_at_1_std value: -10.429597045942748 - type: nauc_ndcg_at_20_diff1 value: 36.94164323893683 - type: nauc_ndcg_at_20_max value: 37.333583379288285 - type: nauc_ndcg_at_20_std value: -15.853318071434716 - type: nauc_ndcg_at_3_diff1 value: 36.905604845477384 - type: nauc_ndcg_at_3_max value: 35.10252586688781 - type: nauc_ndcg_at_3_std value: -17.128435988977742 - type: nauc_ndcg_at_5_diff1 value: 37.96742463612705 - type: nauc_ndcg_at_5_max value: 34.65945109443365 - type: nauc_ndcg_at_5_std value: -17.916428667861183 - type: nauc_precision_at_1000_diff1 value: -3.740861894117653 - type: nauc_precision_at_1000_max value: 31.993854396874177 - type: nauc_precision_at_1000_std value: 17.445629474196448 - type: nauc_precision_at_100_diff1 value: -0.4825948747911606 - type: nauc_precision_at_100_max value: 35.834638448782954 - type: nauc_precision_at_100_std value: 16.82718796079511 - type: nauc_precision_at_10_diff1 value: 8.285949866268147 - type: nauc_precision_at_10_max value: 45.3292519726866 - type: nauc_precision_at_10_std value: 4.5574850748441555 - type: nauc_precision_at_1_diff1 value: 42.95339241383707 - type: nauc_precision_at_1_max value: 40.62982307619158 - type: nauc_precision_at_1_std value: -10.429597045942748 - type: nauc_precision_at_20_diff1 value: 4.890590733611442 - type: nauc_precision_at_20_max value: 41.83051757078859 - type: nauc_precision_at_20_std value: 9.197347125630467 - type: nauc_precision_at_3_diff1 value: 17.79940075411976 - type: nauc_precision_at_3_max value: 45.224103632426946 - type: nauc_precision_at_3_std value: -5.017203435609909 - type: nauc_precision_at_5_diff1 value: 13.548063145911929 - type: nauc_precision_at_5_max value: 46.84837547409909 - type: nauc_precision_at_5_std value: -0.8925939386354484 - type: nauc_recall_at_1000_diff1 value: 74.48441717138078 - type: nauc_recall_at_1000_max value: 74.66717137705027 - type: nauc_recall_at_1000_std value: 0.24030117471512125 - type: nauc_recall_at_100_diff1 value: 22.553777341988656 - type: nauc_recall_at_100_max value: 31.67861029246527 - type: nauc_recall_at_100_std value: 0.2707450517253687 - type: nauc_recall_at_10_diff1 value: 28.490866614443235 - type: nauc_recall_at_10_max value: 31.722970141434352 - type: nauc_recall_at_10_std value: -21.97893365028007 - type: nauc_recall_at_1_diff1 value: 42.705087329207984 - type: nauc_recall_at_1_max value: 12.047671550242974 - type: nauc_recall_at_1_std value: -17.156030827065834 - type: nauc_recall_at_20_diff1 value: 27.44043454173112 - type: nauc_recall_at_20_max value: 31.454281772040716 - type: nauc_recall_at_20_std value: -20.1735695305415 - type: nauc_recall_at_3_diff1 value: 34.08447534706394 - type: nauc_recall_at_3_max value: 21.793973773840865 - type: nauc_recall_at_3_std value: -22.753978372378906 - type: nauc_recall_at_5_diff1 value: 33.59686526199479 - type: nauc_recall_at_5_max value: 29.188889073761302 - type: nauc_recall_at_5_std value: -21.96156333744562 - type: ndcg_at_1 value: 46.861999999999995 - type: ndcg_at_10 value: 54.888000000000005 - type: ndcg_at_100 value: 61.477000000000004 - type: ndcg_at_1000 value: 62.768 - type: ndcg_at_20 value: 57.812 - type: ndcg_at_3 value: 48.721 - type: ndcg_at_5 value: 50.282000000000004 - type: precision_at_1 value: 46.861999999999995 - type: precision_at_10 value: 15.167 - type: precision_at_100 value: 2.072 - type: precision_at_1000 value: 0.22499999999999998 - type: precision_at_20 value: 8.672 - type: precision_at_3 value: 33.066 - type: precision_at_5 value: 24.726 - type: recall_at_1 value: 26.079 - type: recall_at_10 value: 66.095 - type: recall_at_100 value: 91.65299999999999 - type: recall_at_1000 value: 99.83999999999999 - type: recall_at_20 value: 75.28 - type: recall_at_3 value: 46.874 - type: recall_at_5 value: 55.062 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (pol-eng) type: jinaai/xpqa config: pol-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 50.831 - type: map_at_1 value: 25.549 - type: map_at_10 value: 44.432 - type: map_at_100 value: 46.431 - type: map_at_1000 value: 46.525 - type: map_at_20 value: 45.595 - type: map_at_3 value: 38.574000000000005 - type: map_at_5 value: 42.266999999999996 - type: mrr_at_1 value: 43.5006435006435 - type: mrr_at_10 value: 51.561255132683684 - type: mrr_at_100 value: 52.59912482635216 - type: mrr_at_1000 value: 52.631337587043056 - type: mrr_at_20 value: 52.23234440063273 - type: mrr_at_3 value: 48.97039897039895 - type: mrr_at_5 value: 50.31531531531527 - type: nauc_map_at_1000_diff1 value: 35.907901295900174 - type: nauc_map_at_1000_max value: 24.573763602041687 - type: nauc_map_at_1000_std value: -29.524077960309313 - type: nauc_map_at_100_diff1 value: 35.86869121827827 - type: nauc_map_at_100_max value: 24.532343818487494 - type: nauc_map_at_100_std value: -29.613979124488864 - type: nauc_map_at_10_diff1 value: 35.90171794022391 - type: nauc_map_at_10_max value: 23.90914892943268 - type: nauc_map_at_10_std value: -30.43698820061533 - type: nauc_map_at_1_diff1 value: 50.80313333312038 - type: nauc_map_at_1_max value: 16.649890421888156 - type: nauc_map_at_1_std value: -22.323989416471683 - type: nauc_map_at_20_diff1 value: 35.77755470212964 - type: nauc_map_at_20_max value: 24.199895270297034 - type: nauc_map_at_20_std value: -30.223411960170647 - type: nauc_map_at_3_diff1 value: 38.964124882315936 - type: nauc_map_at_3_max value: 21.187432510177167 - type: nauc_map_at_3_std value: -28.976663506389887 - type: nauc_map_at_5_diff1 value: 36.04644236616672 - type: nauc_map_at_5_max value: 23.501186429317094 - type: nauc_map_at_5_std value: -30.068144596060748 - type: nauc_mrr_at_1000_diff1 value: 41.36555452105447 - type: nauc_mrr_at_1000_max value: 26.376799280402867 - type: nauc_mrr_at_1000_std value: -30.008603028757424 - type: nauc_mrr_at_100_diff1 value: 41.35523965220727 - type: nauc_mrr_at_100_max value: 26.402612115967706 - type: nauc_mrr_at_100_std value: -29.991754627128024 - type: nauc_mrr_at_10_diff1 value: 41.001395127259315 - type: nauc_mrr_at_10_max value: 26.104860505051384 - type: nauc_mrr_at_10_std value: -30.38420449487516 - type: nauc_mrr_at_1_diff1 value: 44.882846373248206 - type: nauc_mrr_at_1_max value: 26.61905322890808 - type: nauc_mrr_at_1_std value: -28.724565662206153 - type: nauc_mrr_at_20_diff1 value: 41.278009142648834 - type: nauc_mrr_at_20_max value: 26.284565529087295 - type: nauc_mrr_at_20_std value: -30.19549140549242 - type: nauc_mrr_at_3_diff1 value: 41.74663893951077 - type: nauc_mrr_at_3_max value: 26.263048464325884 - type: nauc_mrr_at_3_std value: -30.676733442965688 - type: nauc_mrr_at_5_diff1 value: 41.11461477846568 - type: nauc_mrr_at_5_max value: 25.94713927964926 - type: nauc_mrr_at_5_std value: -30.317066480767817 - type: nauc_ndcg_at_1000_diff1 value: 36.34161052445199 - type: nauc_ndcg_at_1000_max value: 26.321036033696206 - type: nauc_ndcg_at_1000_std value: -27.59146917115399 - type: nauc_ndcg_at_100_diff1 value: 35.66557800007035 - type: nauc_ndcg_at_100_max value: 26.282211208336136 - type: nauc_ndcg_at_100_std value: -27.905634124461333 - type: nauc_ndcg_at_10_diff1 value: 35.34872687407275 - type: nauc_ndcg_at_10_max value: 24.018561915792272 - type: nauc_ndcg_at_10_std value: -31.57712772869015 - type: nauc_ndcg_at_1_diff1 value: 44.882846373248206 - type: nauc_ndcg_at_1_max value: 26.865602442152554 - type: nauc_ndcg_at_1_std value: -28.509295454329152 - type: nauc_ndcg_at_20_diff1 value: 35.46177768045546 - type: nauc_ndcg_at_20_max value: 24.921273675141542 - type: nauc_ndcg_at_20_std value: -30.84348812979793 - type: nauc_ndcg_at_3_diff1 value: 36.84688489063923 - type: nauc_ndcg_at_3_max value: 24.088513229463736 - type: nauc_ndcg_at_3_std value: -30.05640995379297 - type: nauc_ndcg_at_5_diff1 value: 35.623143276796185 - type: nauc_ndcg_at_5_max value: 23.76654250474061 - type: nauc_ndcg_at_5_std value: -30.87847710074466 - type: nauc_precision_at_1000_diff1 value: -16.270532533886932 - type: nauc_precision_at_1000_max value: 17.37365042394671 - type: nauc_precision_at_1000_std value: 16.27166715693082 - type: nauc_precision_at_100_diff1 value: -13.175264889436313 - type: nauc_precision_at_100_max value: 19.488571046893963 - type: nauc_precision_at_100_std value: 9.055429698007798 - type: nauc_precision_at_10_diff1 value: 0.6806938753592942 - type: nauc_precision_at_10_max value: 21.933083960522616 - type: nauc_precision_at_10_std value: -18.2147036942157 - type: nauc_precision_at_1_diff1 value: 44.882846373248206 - type: nauc_precision_at_1_max value: 26.865602442152554 - type: nauc_precision_at_1_std value: -28.509295454329152 - type: nauc_precision_at_20_diff1 value: -4.318119150162302 - type: nauc_precision_at_20_max value: 21.089702301041687 - type: nauc_precision_at_20_std value: -10.333077681479546 - type: nauc_precision_at_3_diff1 value: 11.496076462671107 - type: nauc_precision_at_3_max value: 23.018301549827008 - type: nauc_precision_at_3_std value: -23.98652995416454 - type: nauc_precision_at_5_diff1 value: 4.271050668117355 - type: nauc_precision_at_5_max value: 23.61051327966779 - type: nauc_precision_at_5_std value: -21.557618503107847 - type: nauc_recall_at_1000_diff1 value: 62.23955911850697 - type: nauc_recall_at_1000_max value: 83.20491723365542 - type: nauc_recall_at_1000_std value: 66.5173462601958 - type: nauc_recall_at_100_diff1 value: 20.503778602988177 - type: nauc_recall_at_100_max value: 29.379026288767506 - type: nauc_recall_at_100_std value: -16.139120874540573 - type: nauc_recall_at_10_diff1 value: 27.659110249896557 - type: nauc_recall_at_10_max value: 19.69557968026332 - type: nauc_recall_at_10_std value: -33.95657132767551 - type: nauc_recall_at_1_diff1 value: 50.80313333312038 - type: nauc_recall_at_1_max value: 16.649890421888156 - type: nauc_recall_at_1_std value: -22.323989416471683 - type: nauc_recall_at_20_diff1 value: 27.084453724565176 - type: nauc_recall_at_20_max value: 21.40080632474994 - type: nauc_recall_at_20_std value: -32.83683639340239 - type: nauc_recall_at_3_diff1 value: 34.32950941333572 - type: nauc_recall_at_3_max value: 18.55616615958199 - type: nauc_recall_at_3_std value: -30.375983327454076 - type: nauc_recall_at_5_diff1 value: 29.44516734974564 - type: nauc_recall_at_5_max value: 20.630543534300312 - type: nauc_recall_at_5_std value: -31.30763062499127 - type: ndcg_at_1 value: 43.501 - type: ndcg_at_10 value: 50.831 - type: ndcg_at_100 value: 58.17099999999999 - type: ndcg_at_1000 value: 59.705 - type: ndcg_at_20 value: 54.047999999999995 - type: ndcg_at_3 value: 44.549 - type: ndcg_at_5 value: 46.861000000000004 - type: precision_at_1 value: 43.501 - type: precision_at_10 value: 12.895999999999999 - type: precision_at_100 value: 1.9 - type: precision_at_1000 value: 0.21 - type: precision_at_20 value: 7.593 - type: precision_at_3 value: 29.215000000000003 - type: precision_at_5 value: 21.57 - type: recall_at_1 value: 25.549 - type: recall_at_10 value: 61.795 - type: recall_at_100 value: 90.019 - type: recall_at_1000 value: 99.807 - type: recall_at_20 value: 72.096 - type: recall_at_3 value: 43.836999999999996 - type: recall_at_5 value: 51.714000000000006 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (pol-pol) type: jinaai/xpqa config: pol-pol split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 53.70399999999999 - type: map_at_1 value: 27.739000000000004 - type: map_at_10 value: 47.469 - type: map_at_100 value: 49.392 - type: map_at_1000 value: 49.483 - type: map_at_20 value: 48.646 - type: map_at_3 value: 41.467 - type: map_at_5 value: 45.467 - type: mrr_at_1 value: 47.00636942675159 - type: mrr_at_10 value: 54.63699322616519 - type: mrr_at_100 value: 55.54525182833755 - type: mrr_at_1000 value: 55.581331515356155 - type: mrr_at_20 value: 55.22918377451415 - type: mrr_at_3 value: 52.03821656050952 - type: mrr_at_5 value: 53.38216560509549 - type: nauc_map_at_1000_diff1 value: 45.03530825034854 - type: nauc_map_at_1000_max value: 34.22740272603397 - type: nauc_map_at_1000_std value: -30.428880484199244 - type: nauc_map_at_100_diff1 value: 44.978704455592805 - type: nauc_map_at_100_max value: 34.20908357964765 - type: nauc_map_at_100_std value: -30.47325365059666 - type: nauc_map_at_10_diff1 value: 44.9560579177672 - type: nauc_map_at_10_max value: 33.70097588985278 - type: nauc_map_at_10_std value: -31.205563222357885 - type: nauc_map_at_1_diff1 value: 57.94711780881773 - type: nauc_map_at_1_max value: 21.60278071836319 - type: nauc_map_at_1_std value: -23.273741268035923 - type: nauc_map_at_20_diff1 value: 44.97859054699532 - type: nauc_map_at_20_max value: 34.153729150181846 - type: nauc_map_at_20_std value: -30.97482545902907 - type: nauc_map_at_3_diff1 value: 47.52016138686765 - type: nauc_map_at_3_max value: 30.176197065298417 - type: nauc_map_at_3_std value: -29.90628984041898 - type: nauc_map_at_5_diff1 value: 45.36581638257985 - type: nauc_map_at_5_max value: 33.697200263698036 - type: nauc_map_at_5_std value: -31.165331120088453 - type: nauc_mrr_at_1000_diff1 value: 53.32889526818364 - type: nauc_mrr_at_1000_max value: 36.104118340589736 - type: nauc_mrr_at_1000_std value: -31.321132494516984 - type: nauc_mrr_at_100_diff1 value: 53.30695875258367 - type: nauc_mrr_at_100_max value: 36.114890079024455 - type: nauc_mrr_at_100_std value: -31.291749322117447 - type: nauc_mrr_at_10_diff1 value: 53.189084772141435 - type: nauc_mrr_at_10_max value: 35.939061062282484 - type: nauc_mrr_at_10_std value: -31.502185884653645 - type: nauc_mrr_at_1_diff1 value: 56.89368291041337 - type: nauc_mrr_at_1_max value: 36.07581125496313 - type: nauc_mrr_at_1_std value: -29.703764232519475 - type: nauc_mrr_at_20_diff1 value: 53.23955737199497 - type: nauc_mrr_at_20_max value: 36.068824838215676 - type: nauc_mrr_at_20_std value: -31.420039428197594 - type: nauc_mrr_at_3_diff1 value: 53.74385074861207 - type: nauc_mrr_at_3_max value: 35.57054587735015 - type: nauc_mrr_at_3_std value: -32.356894834537684 - type: nauc_mrr_at_5_diff1 value: 53.66669556981826 - type: nauc_mrr_at_5_max value: 36.02102289605049 - type: nauc_mrr_at_5_std value: -32.030437067359124 - type: nauc_ndcg_at_1000_diff1 value: 46.34900536768847 - type: nauc_ndcg_at_1000_max value: 35.6314995837715 - type: nauc_ndcg_at_1000_std value: -28.965103958822624 - type: nauc_ndcg_at_100_diff1 value: 45.1587893788861 - type: nauc_ndcg_at_100_max value: 35.62430753595297 - type: nauc_ndcg_at_100_std value: -28.77303405812772 - type: nauc_ndcg_at_10_diff1 value: 44.928781590765965 - type: nauc_ndcg_at_10_max value: 34.315200006430366 - type: nauc_ndcg_at_10_std value: -32.05164097076614 - type: nauc_ndcg_at_1_diff1 value: 57.228262350455125 - type: nauc_ndcg_at_1_max value: 35.645285703387366 - type: nauc_ndcg_at_1_std value: -29.893553821348718 - type: nauc_ndcg_at_20_diff1 value: 44.959903633039865 - type: nauc_ndcg_at_20_max value: 35.493022926282755 - type: nauc_ndcg_at_20_std value: -31.54989291850644 - type: nauc_ndcg_at_3_diff1 value: 46.65266185996905 - type: nauc_ndcg_at_3_max value: 33.74458119579594 - type: nauc_ndcg_at_3_std value: -31.493683304534176 - type: nauc_ndcg_at_5_diff1 value: 46.08707037187612 - type: nauc_ndcg_at_5_max value: 34.7401426055243 - type: nauc_ndcg_at_5_std value: -32.44390676345172 - type: nauc_precision_at_1000_diff1 value: -12.11355300492561 - type: nauc_precision_at_1000_max value: 14.490738062121233 - type: nauc_precision_at_1000_std value: 14.448811005059097 - type: nauc_precision_at_100_diff1 value: -9.742085657181239 - type: nauc_precision_at_100_max value: 18.030305489251223 - type: nauc_precision_at_100_std value: 8.213089709529765 - type: nauc_precision_at_10_diff1 value: 5.153466672774969 - type: nauc_precision_at_10_max value: 27.29412644661678 - type: nauc_precision_at_10_std value: -15.505053884112355 - type: nauc_precision_at_1_diff1 value: 57.228262350455125 - type: nauc_precision_at_1_max value: 35.645285703387366 - type: nauc_precision_at_1_std value: -29.893553821348718 - type: nauc_precision_at_20_diff1 value: -0.6812430761066635 - type: nauc_precision_at_20_max value: 25.81911286466295 - type: nauc_precision_at_20_std value: -8.388506222482595 - type: nauc_precision_at_3_diff1 value: 18.263873866510576 - type: nauc_precision_at_3_max value: 30.879576105862345 - type: nauc_precision_at_3_std value: -24.0342929870108 - type: nauc_precision_at_5_diff1 value: 10.9905804265327 - type: nauc_precision_at_5_max value: 30.88468087429045 - type: nauc_precision_at_5_std value: -20.458684056213507 - type: nauc_recall_at_1000_diff1 value: -64.887668417171 - type: nauc_recall_at_1000_max value: 52.25501730358092 - type: nauc_recall_at_1000_std value: 85.13647916200132 - type: nauc_recall_at_100_diff1 value: 18.956777346127655 - type: nauc_recall_at_100_max value: 36.10473493564588 - type: nauc_recall_at_100_std value: -10.007474558899949 - type: nauc_recall_at_10_diff1 value: 33.810344497568046 - type: nauc_recall_at_10_max value: 31.395430183214245 - type: nauc_recall_at_10_std value: -33.12920524433795 - type: nauc_recall_at_1_diff1 value: 57.94711780881773 - type: nauc_recall_at_1_max value: 21.60278071836319 - type: nauc_recall_at_1_std value: -23.273741268035923 - type: nauc_recall_at_20_diff1 value: 31.449657437065397 - type: nauc_recall_at_20_max value: 34.519574934321945 - type: nauc_recall_at_20_std value: -33.43406862055647 - type: nauc_recall_at_3_diff1 value: 42.07841848382365 - type: nauc_recall_at_3_max value: 28.7648772833266 - type: nauc_recall_at_3_std value: -31.56367736320086 - type: nauc_recall_at_5_diff1 value: 39.21392858246301 - type: nauc_recall_at_5_max value: 34.28338202081927 - type: nauc_recall_at_5_std value: -33.725680523721906 - type: ndcg_at_1 value: 46.879 - type: ndcg_at_10 value: 53.70399999999999 - type: ndcg_at_100 value: 60.532 - type: ndcg_at_1000 value: 61.997 - type: ndcg_at_20 value: 56.818999999999996 - type: ndcg_at_3 value: 47.441 - type: ndcg_at_5 value: 49.936 - type: precision_at_1 value: 46.879 - type: precision_at_10 value: 13.376 - type: precision_at_100 value: 1.8980000000000001 - type: precision_at_1000 value: 0.208 - type: precision_at_20 value: 7.771 - type: precision_at_3 value: 30.658 - type: precision_at_5 value: 22.828 - type: recall_at_1 value: 27.739000000000004 - type: recall_at_10 value: 64.197 - type: recall_at_100 value: 90.54100000000001 - type: recall_at_1000 value: 99.90400000000001 - type: recall_at_20 value: 74.178 - type: recall_at_3 value: 46.312 - type: recall_at_5 value: 54.581999999999994 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (cmn-eng) type: jinaai/xpqa config: cmn-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 64.64 - type: map_at_1 value: 35.858000000000004 - type: map_at_10 value: 58.547000000000004 - type: map_at_100 value: 60.108 - type: map_at_1000 value: 60.153999999999996 - type: map_at_20 value: 59.528000000000006 - type: map_at_3 value: 51.578 - type: map_at_5 value: 56.206999999999994 - type: mrr_at_1 value: 56.95121951219512 - type: mrr_at_10 value: 64.93975029036001 - type: mrr_at_100 value: 65.63357055718294 - type: mrr_at_1000 value: 65.64844109026834 - type: mrr_at_20 value: 65.41280668715439 - type: mrr_at_3 value: 62.68292682926826 - type: mrr_at_5 value: 64.1585365853658 - type: nauc_map_at_1000_diff1 value: 45.82740870907091 - type: nauc_map_at_1000_max value: 21.9696540066807 - type: nauc_map_at_1000_std value: -32.028262356639495 - type: nauc_map_at_100_diff1 value: 45.802053117616396 - type: nauc_map_at_100_max value: 21.946002070290966 - type: nauc_map_at_100_std value: -32.06190418866229 - type: nauc_map_at_10_diff1 value: 46.017774155748945 - type: nauc_map_at_10_max value: 21.876909086095544 - type: nauc_map_at_10_std value: -32.13913568843985 - type: nauc_map_at_1_diff1 value: 56.34671160956164 - type: nauc_map_at_1_max value: 17.6796949796236 - type: nauc_map_at_1_std value: -13.741140688066045 - type: nauc_map_at_20_diff1 value: 46.027469176858716 - type: nauc_map_at_20_max value: 21.80738432042703 - type: nauc_map_at_20_std value: -32.430379634015395 - type: nauc_map_at_3_diff1 value: 48.40096725254027 - type: nauc_map_at_3_max value: 21.15442803574233 - type: nauc_map_at_3_std value: -26.205850292181417 - type: nauc_map_at_5_diff1 value: 45.77800041356389 - type: nauc_map_at_5_max value: 22.11718771798752 - type: nauc_map_at_5_std value: -30.32876338031471 - type: nauc_mrr_at_1000_diff1 value: 49.748274798877944 - type: nauc_mrr_at_1000_max value: 24.547774167219906 - type: nauc_mrr_at_1000_std value: -32.728447209433504 - type: nauc_mrr_at_100_diff1 value: 49.734549290377856 - type: nauc_mrr_at_100_max value: 24.536933315055222 - type: nauc_mrr_at_100_std value: -32.74076335880697 - type: nauc_mrr_at_10_diff1 value: 49.82827711456392 - type: nauc_mrr_at_10_max value: 24.536773657485075 - type: nauc_mrr_at_10_std value: -33.05707547166962 - type: nauc_mrr_at_1_diff1 value: 51.954289992321044 - type: nauc_mrr_at_1_max value: 26.336255074856886 - type: nauc_mrr_at_1_std value: -29.042962019692446 - type: nauc_mrr_at_20_diff1 value: 49.70938465628863 - type: nauc_mrr_at_20_max value: 24.433219849576947 - type: nauc_mrr_at_20_std value: -32.94123791846049 - type: nauc_mrr_at_3_diff1 value: 50.289486880347134 - type: nauc_mrr_at_3_max value: 24.978796972860142 - type: nauc_mrr_at_3_std value: -32.11305594784892 - type: nauc_mrr_at_5_diff1 value: 49.95013396316144 - type: nauc_mrr_at_5_max value: 24.514452761198303 - type: nauc_mrr_at_5_std value: -32.865859962984146 - type: nauc_ndcg_at_1000_diff1 value: 45.73806489233998 - type: nauc_ndcg_at_1000_max value: 22.404941391043867 - type: nauc_ndcg_at_1000_std value: -33.063445720849685 - type: nauc_ndcg_at_100_diff1 value: 45.1046206923062 - type: nauc_ndcg_at_100_max value: 22.081133719684658 - type: nauc_ndcg_at_100_std value: -33.299291459450146 - type: nauc_ndcg_at_10_diff1 value: 46.140608688357496 - type: nauc_ndcg_at_10_max value: 21.442489279388916 - type: nauc_ndcg_at_10_std value: -35.115870342856006 - type: nauc_ndcg_at_1_diff1 value: 51.954289992321044 - type: nauc_ndcg_at_1_max value: 26.336255074856886 - type: nauc_ndcg_at_1_std value: -29.042962019692446 - type: nauc_ndcg_at_20_diff1 value: 45.966784725457046 - type: nauc_ndcg_at_20_max value: 21.166632858613145 - type: nauc_ndcg_at_20_std value: -35.65112890375392 - type: nauc_ndcg_at_3_diff1 value: 46.7404863978999 - type: nauc_ndcg_at_3_max value: 22.701743709129456 - type: nauc_ndcg_at_3_std value: -30.907633466983192 - type: nauc_ndcg_at_5_diff1 value: 45.86487199083486 - type: nauc_ndcg_at_5_max value: 22.088804840002513 - type: nauc_ndcg_at_5_std value: -32.3853481632832 - type: nauc_precision_at_1000_diff1 value: -25.69710612774455 - type: nauc_precision_at_1000_max value: 1.3964400247388091 - type: nauc_precision_at_1000_std value: -8.873947511634814 - type: nauc_precision_at_100_diff1 value: -24.013497191077978 - type: nauc_precision_at_100_max value: 2.0197725715909343 - type: nauc_precision_at_100_std value: -11.387423148770633 - type: nauc_precision_at_10_diff1 value: -6.47728645242781 - type: nauc_precision_at_10_max value: 6.815261443768304 - type: nauc_precision_at_10_std value: -26.825062292855943 - type: nauc_precision_at_1_diff1 value: 51.954289992321044 - type: nauc_precision_at_1_max value: 26.336255074856886 - type: nauc_precision_at_1_std value: -29.042962019692446 - type: nauc_precision_at_20_diff1 value: -12.355232044747511 - type: nauc_precision_at_20_max value: 4.022126850949725 - type: nauc_precision_at_20_std value: -23.688935769326772 - type: nauc_precision_at_3_diff1 value: 7.662671665835864 - type: nauc_precision_at_3_max value: 14.372394760986248 - type: nauc_precision_at_3_std value: -28.635125665532453 - type: nauc_precision_at_5_diff1 value: -1.4592476425511611 - type: nauc_precision_at_5_max value: 11.124310161474174 - type: nauc_precision_at_5_std value: -27.89526669318053 - type: nauc_recall_at_1000_diff1 value: -19.58450046684932 - type: nauc_recall_at_1000_max value: 70.71661998133165 - type: nauc_recall_at_1000_std value: 93.05555555556315 - type: nauc_recall_at_100_diff1 value: 15.06356457571853 - type: nauc_recall_at_100_max value: 14.051414749344806 - type: nauc_recall_at_100_std value: -29.461874235153008 - type: nauc_recall_at_10_diff1 value: 41.29842726117901 - type: nauc_recall_at_10_max value: 15.768699673830898 - type: nauc_recall_at_10_std value: -42.11585661287712 - type: nauc_recall_at_1_diff1 value: 56.34671160956164 - type: nauc_recall_at_1_max value: 17.6796949796236 - type: nauc_recall_at_1_std value: -13.741140688066045 - type: nauc_recall_at_20_diff1 value: 38.8078283585263 - type: nauc_recall_at_20_max value: 12.06816084005326 - type: nauc_recall_at_20_std value: -48.20956170056591 - type: nauc_recall_at_3_diff1 value: 44.71028758038993 - type: nauc_recall_at_3_max value: 19.1059093689162 - type: nauc_recall_at_3_std value: -26.795164453784253 - type: nauc_recall_at_5_diff1 value: 41.06320797773054 - type: nauc_recall_at_5_max value: 19.117028272530998 - type: nauc_recall_at_5_std value: -33.985747504612156 - type: ndcg_at_1 value: 56.95099999999999 - type: ndcg_at_10 value: 64.64 - type: ndcg_at_100 value: 70.017 - type: ndcg_at_1000 value: 70.662 - type: ndcg_at_20 value: 67.256 - type: ndcg_at_3 value: 58.269000000000005 - type: ndcg_at_5 value: 60.94199999999999 - type: precision_at_1 value: 56.95099999999999 - type: precision_at_10 value: 15.671 - type: precision_at_100 value: 2.002 - type: precision_at_1000 value: 0.208 - type: precision_at_20 value: 8.689 - type: precision_at_3 value: 36.341 - type: precision_at_5 value: 26.854 - type: recall_at_1 value: 35.858000000000004 - type: recall_at_10 value: 75.02 - type: recall_at_100 value: 95.76 - type: recall_at_1000 value: 99.837 - type: recall_at_20 value: 83.732 - type: recall_at_3 value: 57.093 - type: recall_at_5 value: 66.193 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (cmn-cmn) type: jinaai/xpqa config: cmn-cmn split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 69.446 - type: map_at_1 value: 39.995999999999995 - type: map_at_10 value: 64.033 - type: map_at_100 value: 65.51599999999999 - type: map_at_1000 value: 65.545 - type: map_at_20 value: 64.958 - type: map_at_3 value: 57.767 - type: map_at_5 value: 61.998 - type: mrr_at_1 value: 63.3495145631068 - type: mrr_at_10 value: 70.21146363075978 - type: mrr_at_100 value: 70.82810974202124 - type: mrr_at_1000 value: 70.83816803303915 - type: mrr_at_20 value: 70.60140248428802 - type: mrr_at_3 value: 68.66909385113267 - type: mrr_at_5 value: 69.56108414239482 - type: nauc_map_at_1000_diff1 value: 51.649897072831465 - type: nauc_map_at_1000_max value: 38.25222728655331 - type: nauc_map_at_1000_std value: -39.10327919949334 - type: nauc_map_at_100_diff1 value: 51.644205886401465 - type: nauc_map_at_100_max value: 38.23611154355255 - type: nauc_map_at_100_std value: -39.1677073977285 - type: nauc_map_at_10_diff1 value: 51.81444145636039 - type: nauc_map_at_10_max value: 38.03382104326485 - type: nauc_map_at_10_std value: -38.999395639812015 - type: nauc_map_at_1_diff1 value: 59.785298201044704 - type: nauc_map_at_1_max value: 23.273537759937785 - type: nauc_map_at_1_std value: -17.838712689290194 - type: nauc_map_at_20_diff1 value: 51.680208795601004 - type: nauc_map_at_20_max value: 38.23334583518634 - type: nauc_map_at_20_std value: -39.24344495939061 - type: nauc_map_at_3_diff1 value: 52.180913298194056 - type: nauc_map_at_3_max value: 33.45482478000481 - type: nauc_map_at_3_std value: -31.682911030586297 - type: nauc_map_at_5_diff1 value: 50.804900676175436 - type: nauc_map_at_5_max value: 37.68924816012326 - type: nauc_map_at_5_std value: -36.85016896616712 - type: nauc_mrr_at_1000_diff1 value: 56.371477471577535 - type: nauc_mrr_at_1000_max value: 42.773877962050086 - type: nauc_mrr_at_1000_std value: -40.41765081873682 - type: nauc_mrr_at_100_diff1 value: 56.3619751528192 - type: nauc_mrr_at_100_max value: 42.76298794859916 - type: nauc_mrr_at_100_std value: -40.44070582448831 - type: nauc_mrr_at_10_diff1 value: 56.33810523477712 - type: nauc_mrr_at_10_max value: 42.76591937795783 - type: nauc_mrr_at_10_std value: -40.69339583030244 - type: nauc_mrr_at_1_diff1 value: 58.90399906884378 - type: nauc_mrr_at_1_max value: 43.38806571165292 - type: nauc_mrr_at_1_std value: -38.224015285584 - type: nauc_mrr_at_20_diff1 value: 56.32629070537032 - type: nauc_mrr_at_20_max value: 42.79615263472604 - type: nauc_mrr_at_20_std value: -40.496777397603076 - type: nauc_mrr_at_3_diff1 value: 55.96989454480743 - type: nauc_mrr_at_3_max value: 42.49832220744744 - type: nauc_mrr_at_3_std value: -39.883799467132384 - type: nauc_mrr_at_5_diff1 value: 56.003080766475755 - type: nauc_mrr_at_5_max value: 42.73308051011805 - type: nauc_mrr_at_5_std value: -39.87179511166683 - type: nauc_ndcg_at_1000_diff1 value: 52.49054229225255 - type: nauc_ndcg_at_1000_max value: 39.61644750719859 - type: nauc_ndcg_at_1000_std value: -40.89845763194674 - type: nauc_ndcg_at_100_diff1 value: 52.33511250864434 - type: nauc_ndcg_at_100_max value: 39.25530146124452 - type: nauc_ndcg_at_100_std value: -41.92444498004374 - type: nauc_ndcg_at_10_diff1 value: 52.62031505931842 - type: nauc_ndcg_at_10_max value: 38.667195545396766 - type: nauc_ndcg_at_10_std value: -42.59503924641507 - type: nauc_ndcg_at_1_diff1 value: 58.90399906884378 - type: nauc_ndcg_at_1_max value: 43.38806571165292 - type: nauc_ndcg_at_1_std value: -38.224015285584 - type: nauc_ndcg_at_20_diff1 value: 52.15061629809436 - type: nauc_ndcg_at_20_max value: 39.09332400054708 - type: nauc_ndcg_at_20_std value: -42.80018671618001 - type: nauc_ndcg_at_3_diff1 value: 51.04210728138207 - type: nauc_ndcg_at_3_max value: 38.19034802567046 - type: nauc_ndcg_at_3_std value: -38.179821090765216 - type: nauc_ndcg_at_5_diff1 value: 51.04399574045204 - type: nauc_ndcg_at_5_max value: 38.42492210204548 - type: nauc_ndcg_at_5_std value: -38.868073241617715 - type: nauc_precision_at_1000_diff1 value: -25.151369907213734 - type: nauc_precision_at_1000_max value: 9.012549147054989 - type: nauc_precision_at_1000_std value: -9.319786589947698 - type: nauc_precision_at_100_diff1 value: -23.20945211843088 - type: nauc_precision_at_100_max value: 9.860701593969862 - type: nauc_precision_at_100_std value: -13.073877818347231 - type: nauc_precision_at_10_diff1 value: -6.970781124246847 - type: nauc_precision_at_10_max value: 19.392675322254487 - type: nauc_precision_at_10_std value: -26.74943490717657 - type: nauc_precision_at_1_diff1 value: 58.90399906884378 - type: nauc_precision_at_1_max value: 43.38806571165292 - type: nauc_precision_at_1_std value: -38.224015285584 - type: nauc_precision_at_20_diff1 value: -13.046456108081102 - type: nauc_precision_at_20_max value: 15.69439950383875 - type: nauc_precision_at_20_std value: -23.836004512018093 - type: nauc_precision_at_3_diff1 value: 3.5444232965528846 - type: nauc_precision_at_3_max value: 27.08858445453865 - type: nauc_precision_at_3_std value: -29.12757283665593 - type: nauc_precision_at_5_diff1 value: -3.6853986353320267 - type: nauc_precision_at_5_max value: 24.32059689571271 - type: nauc_precision_at_5_std value: -27.46188072134163 - type: nauc_recall_at_1000_diff1 value: 86.93515141907919 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 100.0 - type: nauc_recall_at_100_diff1 value: 39.7052887613879 - type: nauc_recall_at_100_max value: 18.40943977796887 - type: nauc_recall_at_100_std value: -88.74014854144974 - type: nauc_recall_at_10_diff1 value: 48.85342500870892 - type: nauc_recall_at_10_max value: 32.69617204234419 - type: nauc_recall_at_10_std value: -51.9937231860804 - type: nauc_recall_at_1_diff1 value: 59.785298201044704 - type: nauc_recall_at_1_max value: 23.273537759937785 - type: nauc_recall_at_1_std value: -17.838712689290194 - type: nauc_recall_at_20_diff1 value: 45.40839773314378 - type: nauc_recall_at_20_max value: 33.02458321493215 - type: nauc_recall_at_20_std value: -55.97800739448166 - type: nauc_recall_at_3_diff1 value: 47.05565693416531 - type: nauc_recall_at_3_max value: 28.743850400344297 - type: nauc_recall_at_3_std value: -32.436470486397475 - type: nauc_recall_at_5_diff1 value: 45.30223758669577 - type: nauc_recall_at_5_max value: 33.6567274747059 - type: nauc_recall_at_5_std value: -39.946712017948514 - type: ndcg_at_1 value: 63.349999999999994 - type: ndcg_at_10 value: 69.446 - type: ndcg_at_100 value: 74.439 - type: ndcg_at_1000 value: 74.834 - type: ndcg_at_20 value: 71.763 - type: ndcg_at_3 value: 64.752 - type: ndcg_at_5 value: 66.316 - type: precision_at_1 value: 63.349999999999994 - type: precision_at_10 value: 16.286 - type: precision_at_100 value: 2.024 - type: precision_at_1000 value: 0.207 - type: precision_at_20 value: 8.908000000000001 - type: precision_at_3 value: 40.655 - type: precision_at_5 value: 28.859 - type: recall_at_1 value: 39.995999999999995 - type: recall_at_10 value: 78.107 - type: recall_at_100 value: 97.538 - type: recall_at_1000 value: 99.96000000000001 - type: recall_at_20 value: 85.72 - type: recall_at_3 value: 63.291 - type: recall_at_5 value: 70.625 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (spa-eng) type: jinaai/xpqa config: spa-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 68.258 - type: map_at_1 value: 33.06 - type: map_at_10 value: 61.590999999999994 - type: map_at_100 value: 63.341 - type: map_at_1000 value: 63.385999999999996 - type: map_at_20 value: 62.77700000000001 - type: map_at_3 value: 52.547999999999995 - type: map_at_5 value: 58.824 - type: mrr_at_1 value: 63.80832282471627 - type: mrr_at_10 value: 70.76848015372607 - type: mrr_at_100 value: 71.33996704518061 - type: mrr_at_1000 value: 71.35368444388072 - type: mrr_at_20 value: 71.18191741103522 - type: mrr_at_3 value: 68.83144178226142 - type: mrr_at_5 value: 69.88440521227405 - type: nauc_map_at_1000_diff1 value: 41.59255746310511 - type: nauc_map_at_1000_max value: 42.064075373358065 - type: nauc_map_at_1000_std value: -25.130730194381723 - type: nauc_map_at_100_diff1 value: 41.56447648820406 - type: nauc_map_at_100_max value: 42.06711634651607 - type: nauc_map_at_100_std value: -25.14871585556968 - type: nauc_map_at_10_diff1 value: 41.28968387107058 - type: nauc_map_at_10_max value: 41.511538272139774 - type: nauc_map_at_10_std value: -25.99906440164276 - type: nauc_map_at_1_diff1 value: 51.09859596320021 - type: nauc_map_at_1_max value: 12.406789321338222 - type: nauc_map_at_1_std value: -18.227486548655076 - type: nauc_map_at_20_diff1 value: 41.39469672947315 - type: nauc_map_at_20_max value: 41.98309315808902 - type: nauc_map_at_20_std value: -25.44704720985219 - type: nauc_map_at_3_diff1 value: 43.16164995512842 - type: nauc_map_at_3_max value: 30.935400935562818 - type: nauc_map_at_3_std value: -23.53095555148866 - type: nauc_map_at_5_diff1 value: 41.23474352142375 - type: nauc_map_at_5_max value: 39.03088859147947 - type: nauc_map_at_5_std value: -26.046526443708366 - type: nauc_mrr_at_1000_diff1 value: 51.79649678213789 - type: nauc_mrr_at_1000_max value: 50.50340748045259 - type: nauc_mrr_at_1000_std value: -24.777183703493407 - type: nauc_mrr_at_100_diff1 value: 51.78609028166551 - type: nauc_mrr_at_100_max value: 50.51732896833555 - type: nauc_mrr_at_100_std value: -24.760054686874717 - type: nauc_mrr_at_10_diff1 value: 51.705268395036995 - type: nauc_mrr_at_10_max value: 50.35818415293149 - type: nauc_mrr_at_10_std value: -25.170367120250404 - type: nauc_mrr_at_1_diff1 value: 53.91475115581825 - type: nauc_mrr_at_1_max value: 49.122529616282016 - type: nauc_mrr_at_1_std value: -22.377647552937155 - type: nauc_mrr_at_20_diff1 value: 51.778984221197774 - type: nauc_mrr_at_20_max value: 50.5070957827813 - type: nauc_mrr_at_20_std value: -24.908935023607285 - type: nauc_mrr_at_3_diff1 value: 51.82683773090423 - type: nauc_mrr_at_3_max value: 50.77993196421369 - type: nauc_mrr_at_3_std value: -24.3925832021831 - type: nauc_mrr_at_5_diff1 value: 51.722232683543034 - type: nauc_mrr_at_5_max value: 50.334865493961864 - type: nauc_mrr_at_5_std value: -25.513593495703297 - type: nauc_ndcg_at_1000_diff1 value: 44.21851582991263 - type: nauc_ndcg_at_1000_max value: 45.73539068637836 - type: nauc_ndcg_at_1000_std value: -24.716522467580397 - type: nauc_ndcg_at_100_diff1 value: 43.8002401615357 - type: nauc_ndcg_at_100_max value: 45.801409410061915 - type: nauc_ndcg_at_100_std value: -24.73171742499903 - type: nauc_ndcg_at_10_diff1 value: 42.540922778755885 - type: nauc_ndcg_at_10_max value: 44.348836943874595 - type: nauc_ndcg_at_10_std value: -28.05403666494785 - type: nauc_ndcg_at_1_diff1 value: 53.91475115581825 - type: nauc_ndcg_at_1_max value: 49.122529616282016 - type: nauc_ndcg_at_1_std value: -22.377647552937155 - type: nauc_ndcg_at_20_diff1 value: 43.10347921163421 - type: nauc_ndcg_at_20_max value: 45.53253270265022 - type: nauc_ndcg_at_20_std value: -26.63902791862846 - type: nauc_ndcg_at_3_diff1 value: 42.41720274782384 - type: nauc_ndcg_at_3_max value: 42.91778219334943 - type: nauc_ndcg_at_3_std value: -24.793252033594076 - type: nauc_ndcg_at_5_diff1 value: 42.51515034945093 - type: nauc_ndcg_at_5_max value: 41.62080576508792 - type: nauc_ndcg_at_5_std value: -28.209669314955065 - type: nauc_precision_at_1000_diff1 value: -14.89794075433148 - type: nauc_precision_at_1000_max value: 27.85387929356412 - type: nauc_precision_at_1000_std value: 10.728618597190849 - type: nauc_precision_at_100_diff1 value: -13.075270046295856 - type: nauc_precision_at_100_max value: 29.77208946756632 - type: nauc_precision_at_100_std value: 8.491662697326039 - type: nauc_precision_at_10_diff1 value: -4.0826025188781205 - type: nauc_precision_at_10_max value: 39.04278085180075 - type: nauc_precision_at_10_std value: -5.925408651372333 - type: nauc_precision_at_1_diff1 value: 53.91475115581825 - type: nauc_precision_at_1_max value: 49.122529616282016 - type: nauc_precision_at_1_std value: -22.377647552937155 - type: nauc_precision_at_20_diff1 value: -7.93186440645135 - type: nauc_precision_at_20_max value: 35.81281308891365 - type: nauc_precision_at_20_std value: 0.1241277857515697 - type: nauc_precision_at_3_diff1 value: 7.563562511484409 - type: nauc_precision_at_3_max value: 43.43738862378524 - type: nauc_precision_at_3_std value: -11.958059731912615 - type: nauc_precision_at_5_diff1 value: -0.1801152449011624 - type: nauc_precision_at_5_max value: 41.32486715619513 - type: nauc_precision_at_5_std value: -10.088699021919552 - type: nauc_recall_at_1000_diff1 value: 86.93359696819986 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 72.21843645604022 - type: nauc_recall_at_100_diff1 value: 29.86050842714198 - type: nauc_recall_at_100_max value: 48.106658251136245 - type: nauc_recall_at_100_std value: -14.981886214880035 - type: nauc_recall_at_10_diff1 value: 33.67119240737528 - type: nauc_recall_at_10_max value: 39.271984859561414 - type: nauc_recall_at_10_std value: -35.6434883839217 - type: nauc_recall_at_1_diff1 value: 51.09859596320021 - type: nauc_recall_at_1_max value: 12.406789321338222 - type: nauc_recall_at_1_std value: -18.227486548655076 - type: nauc_recall_at_20_diff1 value: 33.211979983240724 - type: nauc_recall_at_20_max value: 43.47676074743184 - type: nauc_recall_at_20_std value: -33.88107138395349 - type: nauc_recall_at_3_diff1 value: 39.22513750146998 - type: nauc_recall_at_3_max value: 27.066674083840166 - type: nauc_recall_at_3_std value: -26.963282529629893 - type: nauc_recall_at_5_diff1 value: 36.53718917129459 - type: nauc_recall_at_5_max value: 35.40550013169686 - type: nauc_recall_at_5_std value: -34.209159379410806 - type: ndcg_at_1 value: 63.808 - type: ndcg_at_10 value: 68.258 - type: ndcg_at_100 value: 73.38799999999999 - type: ndcg_at_1000 value: 74.03 - type: ndcg_at_20 value: 70.968 - type: ndcg_at_3 value: 62.33 - type: ndcg_at_5 value: 64.096 - type: precision_at_1 value: 63.808 - type: precision_at_10 value: 19.243 - type: precision_at_100 value: 2.367 - type: precision_at_1000 value: 0.245 - type: precision_at_20 value: 10.599 - type: precision_at_3 value: 44.515 - type: precision_at_5 value: 33.467999999999996 - type: recall_at_1 value: 33.06 - type: recall_at_10 value: 77.423 - type: recall_at_100 value: 95.923 - type: recall_at_1000 value: 99.874 - type: recall_at_20 value: 85.782 - type: recall_at_3 value: 57.098000000000006 - type: recall_at_5 value: 67.472 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (spa-spa) type: jinaai/xpqa config: spa-spa split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 72.004 - type: map_at_1 value: 36.248000000000005 - type: map_at_10 value: 65.679 - type: map_at_100 value: 67.22399999999999 - type: map_at_1000 value: 67.264 - type: map_at_20 value: 66.705 - type: map_at_3 value: 56.455 - type: map_at_5 value: 62.997 - type: mrr_at_1 value: 67.71752837326608 - type: mrr_at_10 value: 74.59782021257429 - type: mrr_at_100 value: 75.0640960767943 - type: mrr_at_1000 value: 75.07324799466076 - type: mrr_at_20 value: 74.9323963386884 - type: mrr_at_3 value: 72.95081967213115 - type: mrr_at_5 value: 73.82723833543506 - type: nauc_map_at_1000_diff1 value: 43.111810717567714 - type: nauc_map_at_1000_max value: 44.835247208972476 - type: nauc_map_at_1000_std value: -32.798405973931985 - type: nauc_map_at_100_diff1 value: 43.090223482932764 - type: nauc_map_at_100_max value: 44.83392441557943 - type: nauc_map_at_100_std value: -32.81149166676563 - type: nauc_map_at_10_diff1 value: 42.87841934951979 - type: nauc_map_at_10_max value: 43.9838653389494 - type: nauc_map_at_10_std value: -33.588084643627084 - type: nauc_map_at_1_diff1 value: 54.509245848379095 - type: nauc_map_at_1_max value: 10.05921648322742 - type: nauc_map_at_1_std value: -24.652326014826762 - type: nauc_map_at_20_diff1 value: 43.07468612984794 - type: nauc_map_at_20_max value: 44.75663122615032 - type: nauc_map_at_20_std value: -33.11788887878321 - type: nauc_map_at_3_diff1 value: 44.63272828938906 - type: nauc_map_at_3_max value: 32.1584369869227 - type: nauc_map_at_3_std value: -30.761662210142944 - type: nauc_map_at_5_diff1 value: 42.77296997803048 - type: nauc_map_at_5_max value: 41.78894616737652 - type: nauc_map_at_5_std value: -33.56459774477362 - type: nauc_mrr_at_1000_diff1 value: 53.097544131833494 - type: nauc_mrr_at_1000_max value: 50.61134979184588 - type: nauc_mrr_at_1000_std value: -35.6221191487669 - type: nauc_mrr_at_100_diff1 value: 53.096609856182106 - type: nauc_mrr_at_100_max value: 50.61951585642645 - type: nauc_mrr_at_100_std value: -35.62396157508327 - type: nauc_mrr_at_10_diff1 value: 52.771534471912304 - type: nauc_mrr_at_10_max value: 50.430863224435726 - type: nauc_mrr_at_10_std value: -36.027992076620365 - type: nauc_mrr_at_1_diff1 value: 55.05316238884337 - type: nauc_mrr_at_1_max value: 49.461858515275196 - type: nauc_mrr_at_1_std value: -31.87492636319712 - type: nauc_mrr_at_20_diff1 value: 53.083253469629746 - type: nauc_mrr_at_20_max value: 50.62156424256193 - type: nauc_mrr_at_20_std value: -35.879153692447154 - type: nauc_mrr_at_3_diff1 value: 52.98283109188415 - type: nauc_mrr_at_3_max value: 50.83561260429378 - type: nauc_mrr_at_3_std value: -35.30839538038797 - type: nauc_mrr_at_5_diff1 value: 52.93270510879709 - type: nauc_mrr_at_5_max value: 50.54595596761199 - type: nauc_mrr_at_5_std value: -35.84059376434395 - type: nauc_ndcg_at_1000_diff1 value: 45.343685089209416 - type: nauc_ndcg_at_1000_max value: 47.801141576669465 - type: nauc_ndcg_at_1000_std value: -33.512958862879195 - type: nauc_ndcg_at_100_diff1 value: 45.255590461515894 - type: nauc_ndcg_at_100_max value: 47.99240031881967 - type: nauc_ndcg_at_100_std value: -33.614465006695205 - type: nauc_ndcg_at_10_diff1 value: 43.93472511731019 - type: nauc_ndcg_at_10_max value: 45.92599752897053 - type: nauc_ndcg_at_10_std value: -36.43629114491574 - type: nauc_ndcg_at_1_diff1 value: 55.05316238884337 - type: nauc_ndcg_at_1_max value: 49.461858515275196 - type: nauc_ndcg_at_1_std value: -31.87492636319712 - type: nauc_ndcg_at_20_diff1 value: 44.93534591273201 - type: nauc_ndcg_at_20_max value: 47.55153940713458 - type: nauc_ndcg_at_20_std value: -35.56392448745206 - type: nauc_ndcg_at_3_diff1 value: 43.17916122133396 - type: nauc_ndcg_at_3_max value: 45.603634205103276 - type: nauc_ndcg_at_3_std value: -32.473227507181214 - type: nauc_ndcg_at_5_diff1 value: 44.10242961669216 - type: nauc_ndcg_at_5_max value: 43.61666669031808 - type: nauc_ndcg_at_5_std value: -35.98808321497782 - type: nauc_precision_at_1000_diff1 value: -23.264714449991146 - type: nauc_precision_at_1000_max value: 28.505729576735465 - type: nauc_precision_at_1000_std value: 11.987379232920926 - type: nauc_precision_at_100_diff1 value: -21.156119174614627 - type: nauc_precision_at_100_max value: 30.711646221646255 - type: nauc_precision_at_100_std value: 9.650486536340322 - type: nauc_precision_at_10_diff1 value: -10.98001328477502 - type: nauc_precision_at_10_max value: 39.25638073760597 - type: nauc_precision_at_10_std value: -4.3456859257488 - type: nauc_precision_at_1_diff1 value: 55.05316238884337 - type: nauc_precision_at_1_max value: 49.461858515275196 - type: nauc_precision_at_1_std value: -31.87492636319712 - type: nauc_precision_at_20_diff1 value: -14.97565390664424 - type: nauc_precision_at_20_max value: 36.383835295942355 - type: nauc_precision_at_20_std value: 1.525158880381114 - type: nauc_precision_at_3_diff1 value: 1.0448345623903483 - type: nauc_precision_at_3_max value: 45.69772060667404 - type: nauc_precision_at_3_std value: -13.002685018948293 - type: nauc_precision_at_5_diff1 value: -5.434185597628904 - type: nauc_precision_at_5_max value: 42.99162431099203 - type: nauc_precision_at_5_std value: -9.789308817624534 - type: nauc_recall_at_1000_diff1 value: 12.309303236094845 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 86.93359696819986 - type: nauc_recall_at_100_diff1 value: 39.093544920901415 - type: nauc_recall_at_100_max value: 55.62814395062938 - type: nauc_recall_at_100_std value: -22.6919033301514 - type: nauc_recall_at_10_diff1 value: 35.50100141633622 - type: nauc_recall_at_10_max value: 39.25750019586647 - type: nauc_recall_at_10_std value: -43.01273078031791 - type: nauc_recall_at_1_diff1 value: 54.509245848379095 - type: nauc_recall_at_1_max value: 10.05921648322742 - type: nauc_recall_at_1_std value: -24.652326014826762 - type: nauc_recall_at_20_diff1 value: 38.1281707132327 - type: nauc_recall_at_20_max value: 43.97950642900301 - type: nauc_recall_at_20_std value: -44.049952771307574 - type: nauc_recall_at_3_diff1 value: 40.01986938242728 - type: nauc_recall_at_3_max value: 27.517114421061173 - type: nauc_recall_at_3_std value: -32.99056780232045 - type: nauc_recall_at_5_diff1 value: 38.52035606499483 - type: nauc_recall_at_5_max value: 37.05834604678859 - type: nauc_recall_at_5_std value: -39.86196378897912 - type: ndcg_at_1 value: 67.718 - type: ndcg_at_10 value: 72.004 - type: ndcg_at_100 value: 76.554 - type: ndcg_at_1000 value: 77.07300000000001 - type: ndcg_at_20 value: 74.37899999999999 - type: ndcg_at_3 value: 66.379 - type: ndcg_at_5 value: 68.082 - type: precision_at_1 value: 67.718 - type: precision_at_10 value: 19.849 - type: precision_at_100 value: 2.3800000000000003 - type: precision_at_1000 value: 0.245 - type: precision_at_20 value: 10.813 - type: precision_at_3 value: 46.574 - type: precision_at_5 value: 34.83 - type: recall_at_1 value: 36.248000000000005 - type: recall_at_10 value: 80.252 - type: recall_at_100 value: 96.73 - type: recall_at_1000 value: 99.874 - type: recall_at_20 value: 87.703 - type: recall_at_3 value: 60.815 - type: recall_at_5 value: 71.16 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fra-eng) type: jinaai/xpqa config: fra-eng split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 73.729 - type: map_at_1 value: 43.964999999999996 - type: map_at_10 value: 67.803 - type: map_at_100 value: 69.188 - type: map_at_1000 value: 69.21000000000001 - type: map_at_20 value: 68.747 - type: map_at_3 value: 60.972 - type: map_at_5 value: 65.39399999999999 - type: mrr_at_1 value: 68.4913217623498 - type: mrr_at_10 value: 75.2600822260368 - type: mrr_at_100 value: 75.6599169808848 - type: mrr_at_1000 value: 75.66720883727534 - type: mrr_at_20 value: 75.52375865860405 - type: mrr_at_3 value: 73.54250111259452 - type: mrr_at_5 value: 74.51713395638626 - type: nauc_map_at_1000_diff1 value: 46.81533703002097 - type: nauc_map_at_1000_max value: 46.30794757084772 - type: nauc_map_at_1000_std value: -14.953470500312335 - type: nauc_map_at_100_diff1 value: 46.82464740277745 - type: nauc_map_at_100_max value: 46.32852879948254 - type: nauc_map_at_100_std value: -14.950035098066172 - type: nauc_map_at_10_diff1 value: 46.31406143369831 - type: nauc_map_at_10_max value: 45.337593270786634 - type: nauc_map_at_10_std value: -16.011789445907876 - type: nauc_map_at_1_diff1 value: 57.097134715065835 - type: nauc_map_at_1_max value: 21.93931500350721 - type: nauc_map_at_1_std value: -15.134457251301637 - type: nauc_map_at_20_diff1 value: 46.47030891134173 - type: nauc_map_at_20_max value: 46.29169960276292 - type: nauc_map_at_20_std value: -15.14241106541829 - type: nauc_map_at_3_diff1 value: 50.27064228648596 - type: nauc_map_at_3_max value: 39.43058773971639 - type: nauc_map_at_3_std value: -16.16545993089126 - type: nauc_map_at_5_diff1 value: 46.974867679747426 - type: nauc_map_at_5_max value: 44.31091104855002 - type: nauc_map_at_5_std value: -16.50175337658926 - type: nauc_mrr_at_1000_diff1 value: 55.20294005110399 - type: nauc_mrr_at_1000_max value: 51.947725719119966 - type: nauc_mrr_at_1000_std value: -14.586112939597232 - type: nauc_mrr_at_100_diff1 value: 55.20426251109304 - type: nauc_mrr_at_100_max value: 51.95648725402534 - type: nauc_mrr_at_100_std value: -14.579769236539143 - type: nauc_mrr_at_10_diff1 value: 54.93870506205835 - type: nauc_mrr_at_10_max value: 51.89312772900638 - type: nauc_mrr_at_10_std value: -14.692635010092939 - type: nauc_mrr_at_1_diff1 value: 56.54945935175171 - type: nauc_mrr_at_1_max value: 51.28134504197991 - type: nauc_mrr_at_1_std value: -12.909042186563061 - type: nauc_mrr_at_20_diff1 value: 55.10667018041461 - type: nauc_mrr_at_20_max value: 51.98236870783707 - type: nauc_mrr_at_20_std value: -14.599377575198025 - type: nauc_mrr_at_3_diff1 value: 55.67124311746892 - type: nauc_mrr_at_3_max value: 51.77903236246767 - type: nauc_mrr_at_3_std value: -14.94452633860763 - type: nauc_mrr_at_5_diff1 value: 55.42849172366371 - type: nauc_mrr_at_5_max value: 51.76902965753959 - type: nauc_mrr_at_5_std value: -15.357993534727072 - type: nauc_ndcg_at_1000_diff1 value: 48.736844959280326 - type: nauc_ndcg_at_1000_max value: 48.92891159935398 - type: nauc_ndcg_at_1000_std value: -13.983968675611056 - type: nauc_ndcg_at_100_diff1 value: 48.73859328503975 - type: nauc_ndcg_at_100_max value: 49.31867149556439 - type: nauc_ndcg_at_100_std value: -13.72387564912742 - type: nauc_ndcg_at_10_diff1 value: 46.50313862975287 - type: nauc_ndcg_at_10_max value: 47.13599793554596 - type: nauc_ndcg_at_10_std value: -16.317919977400113 - type: nauc_ndcg_at_1_diff1 value: 56.54945935175171 - type: nauc_ndcg_at_1_max value: 51.28134504197991 - type: nauc_ndcg_at_1_std value: -12.909042186563061 - type: nauc_ndcg_at_20_diff1 value: 47.01727117133912 - type: nauc_ndcg_at_20_max value: 49.121366036709105 - type: nauc_ndcg_at_20_std value: -14.411078677638775 - type: nauc_ndcg_at_3_diff1 value: 49.229581145458276 - type: nauc_ndcg_at_3_max value: 47.427609717032 - type: nauc_ndcg_at_3_std value: -16.52066627289908 - type: nauc_ndcg_at_5_diff1 value: 48.0152514127505 - type: nauc_ndcg_at_5_max value: 46.12152407850816 - type: nauc_ndcg_at_5_std value: -17.613295491954656 - type: nauc_precision_at_1000_diff1 value: -25.959006032642463 - type: nauc_precision_at_1000_max value: 12.81002362947137 - type: nauc_precision_at_1000_std value: 12.575312826061513 - type: nauc_precision_at_100_diff1 value: -24.35413527283394 - type: nauc_precision_at_100_max value: 14.878359236477303 - type: nauc_precision_at_100_std value: 12.384426050018428 - type: nauc_precision_at_10_diff1 value: -17.93220761770618 - type: nauc_precision_at_10_max value: 23.523485811847294 - type: nauc_precision_at_10_std value: 4.424456968716939 - type: nauc_precision_at_1_diff1 value: 56.54945935175171 - type: nauc_precision_at_1_max value: 51.28134504197991 - type: nauc_precision_at_1_std value: -12.909042186563061 - type: nauc_precision_at_20_diff1 value: -21.776871398686936 - type: nauc_precision_at_20_max value: 21.18436338264366 - type: nauc_precision_at_20_std value: 9.937274986573321 - type: nauc_precision_at_3_diff1 value: -1.2411845580934435 - type: nauc_precision_at_3_max value: 34.962281941875 - type: nauc_precision_at_3_std value: -2.447892908501237 - type: nauc_precision_at_5_diff1 value: -11.134164534114085 - type: nauc_precision_at_5_max value: 30.22079740070525 - type: nauc_precision_at_5_std value: -0.24232594421765946 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 43.3647412452869 - type: nauc_recall_at_100_max value: 63.50094950500327 - type: nauc_recall_at_100_std value: 2.3911909633714044 - type: nauc_recall_at_10_diff1 value: 33.993445071666855 - type: nauc_recall_at_10_max value: 41.38694129134144 - type: nauc_recall_at_10_std value: -19.308698266099096 - type: nauc_recall_at_1_diff1 value: 57.097134715065835 - type: nauc_recall_at_1_max value: 21.93931500350721 - type: nauc_recall_at_1_std value: -15.134457251301637 - type: nauc_recall_at_20_diff1 value: 32.03888531880772 - type: nauc_recall_at_20_max value: 49.660787482562085 - type: nauc_recall_at_20_std value: -12.641456758778382 - type: nauc_recall_at_3_diff1 value: 47.94527082900579 - type: nauc_recall_at_3_max value: 36.51733131437679 - type: nauc_recall_at_3_std value: -18.65511713247495 - type: nauc_recall_at_5_diff1 value: 42.04545772092305 - type: nauc_recall_at_5_max value: 41.21440912972303 - type: nauc_recall_at_5_std value: -21.47386527081128 - type: ndcg_at_1 value: 68.491 - type: ndcg_at_10 value: 73.729 - type: ndcg_at_100 value: 77.684 - type: ndcg_at_1000 value: 78.084 - type: ndcg_at_20 value: 75.795 - type: ndcg_at_3 value: 68.568 - type: ndcg_at_5 value: 70.128 - type: precision_at_1 value: 68.491 - type: precision_at_10 value: 16.996 - type: precision_at_100 value: 2.023 - type: precision_at_1000 value: 0.207 - type: precision_at_20 value: 9.246 - type: precision_at_3 value: 41.923 - type: precision_at_5 value: 29.826000000000004 - type: recall_at_1 value: 43.964999999999996 - type: recall_at_10 value: 82.777 - type: recall_at_100 value: 97.287 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 89.183 - type: recall_at_3 value: 65.803 - type: recall_at_5 value: 74.119 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fra-fra split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 77.581 - type: map_at_1 value: 46.444 - type: map_at_10 value: 72.084 - type: map_at_100 value: 73.175 - type: map_at_1000 value: 73.193 - type: map_at_20 value: 72.77799999999999 - type: map_at_3 value: 65.242 - type: map_at_5 value: 69.926 - type: mrr_at_1 value: 71.82910547396529 - type: mrr_at_10 value: 78.66594612923046 - type: mrr_at_100 value: 78.97334934049613 - type: mrr_at_1000 value: 78.97687021803557 - type: mrr_at_20 value: 78.85701141744282 - type: mrr_at_3 value: 76.96929238985311 - type: mrr_at_5 value: 77.99732977303067 - type: nauc_map_at_1000_diff1 value: 49.090956807097804 - type: nauc_map_at_1000_max value: 52.01095354889508 - type: nauc_map_at_1000_std value: -12.182870421711026 - type: nauc_map_at_100_diff1 value: 49.091664766684566 - type: nauc_map_at_100_max value: 52.017499797253755 - type: nauc_map_at_100_std value: -12.188342487271528 - type: nauc_map_at_10_diff1 value: 48.6619338205362 - type: nauc_map_at_10_max value: 50.93591260329888 - type: nauc_map_at_10_std value: -12.899399261673365 - type: nauc_map_at_1_diff1 value: 61.89699552471587 - type: nauc_map_at_1_max value: 22.387748207421946 - type: nauc_map_at_1_std value: -17.139518194308437 - type: nauc_map_at_20_diff1 value: 48.72828404686453 - type: nauc_map_at_20_max value: 51.781074586075434 - type: nauc_map_at_20_std value: -12.174270605093136 - type: nauc_map_at_3_diff1 value: 53.11509580126934 - type: nauc_map_at_3_max value: 42.1768380145106 - type: nauc_map_at_3_std value: -14.98340833032363 - type: nauc_map_at_5_diff1 value: 49.60521390803235 - type: nauc_map_at_5_max value: 49.80360562029127 - type: nauc_map_at_5_std value: -13.900652140457618 - type: nauc_mrr_at_1000_diff1 value: 58.10782478654255 - type: nauc_mrr_at_1000_max value: 61.31083013535486 - type: nauc_mrr_at_1000_std value: -9.624904298545921 - type: nauc_mrr_at_100_diff1 value: 58.11041683306092 - type: nauc_mrr_at_100_max value: 61.31590199755797 - type: nauc_mrr_at_100_std value: -9.625991053580865 - type: nauc_mrr_at_10_diff1 value: 57.883701815695375 - type: nauc_mrr_at_10_max value: 61.36276126424689 - type: nauc_mrr_at_10_std value: -9.495072468420386 - type: nauc_mrr_at_1_diff1 value: 60.18176977079093 - type: nauc_mrr_at_1_max value: 59.697615236642555 - type: nauc_mrr_at_1_std value: -9.396133077966779 - type: nauc_mrr_at_20_diff1 value: 57.964817434006754 - type: nauc_mrr_at_20_max value: 61.34073539502932 - type: nauc_mrr_at_20_std value: -9.602378876645131 - type: nauc_mrr_at_3_diff1 value: 58.44338049427257 - type: nauc_mrr_at_3_max value: 60.92272989411293 - type: nauc_mrr_at_3_std value: -9.928970439416162 - type: nauc_mrr_at_5_diff1 value: 58.01513016866578 - type: nauc_mrr_at_5_max value: 61.46805302986586 - type: nauc_mrr_at_5_std value: -9.842227002440984 - type: nauc_ndcg_at_1000_diff1 value: 50.99293152828167 - type: nauc_ndcg_at_1000_max value: 56.14232784664811 - type: nauc_ndcg_at_1000_std value: -10.529213072410288 - type: nauc_ndcg_at_100_diff1 value: 50.99385944312529 - type: nauc_ndcg_at_100_max value: 56.34825518954588 - type: nauc_ndcg_at_100_std value: -10.398943874846047 - type: nauc_ndcg_at_10_diff1 value: 48.51273364357823 - type: nauc_ndcg_at_10_max value: 53.77871849486298 - type: nauc_ndcg_at_10_std value: -11.82105972112472 - type: nauc_ndcg_at_1_diff1 value: 60.18176977079093 - type: nauc_ndcg_at_1_max value: 59.697615236642555 - type: nauc_ndcg_at_1_std value: -9.396133077966779 - type: nauc_ndcg_at_20_diff1 value: 49.04268319033412 - type: nauc_ndcg_at_20_max value: 55.47011381097071 - type: nauc_ndcg_at_20_std value: -10.486452945493042 - type: nauc_ndcg_at_3_diff1 value: 50.95112745400584 - type: nauc_ndcg_at_3_max value: 53.45473828705577 - type: nauc_ndcg_at_3_std value: -13.420699384045728 - type: nauc_ndcg_at_5_diff1 value: 50.313156212000074 - type: nauc_ndcg_at_5_max value: 52.78539129309866 - type: nauc_ndcg_at_5_std value: -13.586274096509122 - type: nauc_precision_at_1000_diff1 value: -31.13772049254778 - type: nauc_precision_at_1000_max value: 17.2847598361294 - type: nauc_precision_at_1000_std value: 15.497531773816887 - type: nauc_precision_at_100_diff1 value: -29.98812263553739 - type: nauc_precision_at_100_max value: 19.048620003227654 - type: nauc_precision_at_100_std value: 15.38499952171958 - type: nauc_precision_at_10_diff1 value: -25.33028097412579 - type: nauc_precision_at_10_max value: 26.077919168306853 - type: nauc_precision_at_10_std value: 11.35352933466097 - type: nauc_precision_at_1_diff1 value: 60.18176977079093 - type: nauc_precision_at_1_max value: 59.697615236642555 - type: nauc_precision_at_1_std value: -9.396133077966779 - type: nauc_precision_at_20_diff1 value: -28.417606311068905 - type: nauc_precision_at_20_max value: 23.958679828637692 - type: nauc_precision_at_20_std value: 14.442021499194205 - type: nauc_precision_at_3_diff1 value: -8.127396049790482 - type: nauc_precision_at_3_max value: 37.348067982957076 - type: nauc_precision_at_3_std value: 4.747913619596849 - type: nauc_precision_at_5_diff1 value: -16.902418446058395 - type: nauc_precision_at_5_max value: 32.73583852552014 - type: nauc_precision_at_5_std value: 7.031446423850052 - type: nauc_recall_at_1000_diff1 value: -14.485978369112514 - type: nauc_recall_at_1000_max value: 78.59123887333172 - type: nauc_recall_at_1000_std value: 90.7384575424963 - type: nauc_recall_at_100_diff1 value: 41.47842281590715 - type: nauc_recall_at_100_max value: 67.47271545727422 - type: nauc_recall_at_100_std value: 14.555561992253999 - type: nauc_recall_at_10_diff1 value: 33.05308907973924 - type: nauc_recall_at_10_max value: 45.49878918493155 - type: nauc_recall_at_10_std value: -11.560069806810926 - type: nauc_recall_at_1_diff1 value: 61.89699552471587 - type: nauc_recall_at_1_max value: 22.387748207421946 - type: nauc_recall_at_1_std value: -17.139518194308437 - type: nauc_recall_at_20_diff1 value: 31.305721376453754 - type: nauc_recall_at_20_max value: 51.24817763724019 - type: nauc_recall_at_20_std value: -5.0809908162023145 - type: nauc_recall_at_3_diff1 value: 49.27109038342917 - type: nauc_recall_at_3_max value: 37.69188317998447 - type: nauc_recall_at_3_std value: -17.119900758664336 - type: nauc_recall_at_5_diff1 value: 42.74501803377967 - type: nauc_recall_at_5_max value: 46.877008503354844 - type: nauc_recall_at_5_std value: -15.704892082115975 - type: ndcg_at_1 value: 71.829 - type: ndcg_at_10 value: 77.581 - type: ndcg_at_100 value: 80.75 - type: ndcg_at_1000 value: 81.026 - type: ndcg_at_20 value: 79.092 - type: ndcg_at_3 value: 72.81 - type: ndcg_at_5 value: 74.22999999999999 - type: precision_at_1 value: 71.829 - type: precision_at_10 value: 17.717 - type: precision_at_100 value: 2.031 - type: precision_at_1000 value: 0.207 - type: precision_at_20 value: 9.399000000000001 - type: precision_at_3 value: 44.458999999999996 - type: precision_at_5 value: 31.535000000000004 - type: recall_at_1 value: 46.444 - type: recall_at_10 value: 86.275 - type: recall_at_100 value: 98.017 - type: recall_at_1000 value: 99.8 - type: recall_at_20 value: 90.935 - type: recall_at_3 value: 70.167 - type: recall_at_5 value: 78.2 --- <br><br> <p align="center"> <img src="https://huggingface.co/datasets/jinaai/documentation-images/resolve/main/logo.webp" alt="Jina AI: Your Search Foundation, Supercharged!" width="150px"> </p> <p align="center"> <b>The embedding model trained by <a href="https://jina.ai/"><b>Jina AI</b></a>.</b> </p> <p align="center"> <b>jina-embeddings-v3: Multilingual Embeddings With Task LoRA</b> </p> ## Quick Start [Blog](https://jina.ai/news/jina-embeddings-v3-a-frontier-multilingual-embedding-model/#parameter-dimensions) | [Azure](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/jinaai.jina-embeddings-v3-vm) | [AWS SageMaker](https://aws.amazon.com/marketplace/pp/prodview-kdi3xkt62lo32) | [API](https://jina.ai/embeddings) ## Intended Usage & Model Info `jina-embeddings-v3` is a **multilingual multi-task text embedding model** designed for a variety of NLP applications. Based on the [Jina-XLM-RoBERTa architecture](https://huggingface.co/jinaai/xlm-roberta-flash-implementation), this model supports Rotary Position Embeddings to handle long input sequences up to **8192 tokens**. Additionally, it features 5 LoRA adapters to generate task-specific embeddings efficiently. ### Key Features: - **Extended Sequence Length:** Supports up to 8192 tokens with RoPE. - **Task-Specific Embedding:** Customize embeddings through the `task` argument with the following options: - `retrieval.query`: Used for query embeddings in asymmetric retrieval tasks - `retrieval.passage`: Used for passage embeddings in asymmetric retrieval tasks - `separation`: Used for embeddings in clustering and re-ranking applications - `classification`: Used for embeddings in classification tasks - `text-matching`: Used for embeddings in tasks that quantify similarity between two texts, such as STS or symmetric retrieval tasks - **Matryoshka Embeddings**: Supports flexible embedding sizes (`32, 64, 128, 256, 512, 768, 1024`), allowing for truncating embeddings to fit your application. ### Supported Languages: While the foundation model supports 100 languages, we've focused our tuning efforts on the following 30 languages: **Arabic, Bengali, Chinese, Danish, Dutch, English, Finnish, French, Georgian, German, Greek, Hindi, Indonesian, Italian, Japanese, Korean, Latvian, Norwegian, Polish, Portuguese, Romanian, Russian, Slovak, Spanish, Swedish, Thai, Turkish, Ukrainian, Urdu,** and **Vietnamese.** > **⚠️ Important Notice:** > We fixed a bug in the `encode` function [#60](https://huggingface.co/jinaai/jina-embeddings-v3/discussions/60) where **Matryoshka embedding truncation** occurred *after normalization*, leading to non-normalized truncated embeddings. This issue has been resolved in the latest code revision. > > If you have encoded data using the previous version and wish to maintain consistency, please use the specific code revision when loading the model: `AutoModel.from_pretrained('jinaai/jina-embeddings-v3', code_revision='da863dd04a4e5dce6814c6625adfba87b83838aa', ...)` ## Usage **<details><summary>Apply mean pooling when integrating the model.</summary>** <p> ### Why Use Mean Pooling? Mean pooling takes all token embeddings from the model's output and averages them at the sentence or paragraph level. This approach has been shown to produce high-quality sentence embeddings. We provide an `encode` function that handles this for you automatically. However, if you're working with the model directly, outside of the `encode` function, you'll need to apply mean pooling manually. Here's how you can do it: ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = ( attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() ) return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp( input_mask_expanded.sum(1), min=1e-9 ) sentences = ["How is the weather today?", "What is the current weather like today?"] tokenizer = AutoTokenizer.from_pretrained("jinaai/jina-embeddings-v3") model = AutoModel.from_pretrained("jinaai/jina-embeddings-v3", trust_remote_code=True) encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors="pt") task = 'retrieval.query' task_id = model._adaptation_map[task] adapter_mask = torch.full((len(sentences),), task_id, dtype=torch.int32) with torch.no_grad(): model_output = model(**encoded_input, adapter_mask=adapter_mask) embeddings = mean_pooling(model_output, encoded_input["attention_mask"]) embeddings = F.normalize(embeddings, p=2, dim=1) ``` </p> </details> The easiest way to start using `jina-embeddings-v3` is with the [Jina Embedding API](https://jina.ai/embeddings/). Alternatively, you can use `jina-embeddings-v3` directly via Transformers package: ```bash !pip install transformers torch einops !pip install 'numpy<2' ``` If you run it on a GPU that support [FlashAttention-2](https://github.com/Dao-AILab/flash-attention). By 2024.9.12, it supports Ampere, Ada, or Hopper GPUs (e.g., A100, RTX 3090, RTX 4090, H100), ```bash !pip install flash-attn --no-build-isolation ``` ```python from transformers import AutoModel # Initialize the model model = AutoModel.from_pretrained("jinaai/jina-embeddings-v3", trust_remote_code=True) texts = [ "Follow the white rabbit.", # English "Sigue al conejo blanco.", # Spanish "Suis le lapin blanc.", # French "跟着白兔走。", # Chinese "اتبع الأرنب الأبيض.", # Arabic "Folge dem weißen Kaninchen.", # German ] # When calling the `encode` function, you can choose a `task` based on the use case: # 'retrieval.query', 'retrieval.passage', 'separation', 'classification', 'text-matching' # Alternatively, you can choose not to pass a `task`, and no specific LoRA adapter will be used. embeddings = model.encode(texts, task="text-matching") # Compute similarities print(embeddings[0] @ embeddings[1].T) ``` By default, the model supports a maximum sequence length of 8192 tokens. However, if you want to truncate your input texts to a shorter length, you can pass the `max_length` parameter to the `encode` function: ```python embeddings = model.encode(["Very long ... document"], max_length=2048) ``` In case you want to use **Matryoshka embeddings** and switch to a different dimension, you can adjust it by passing the `truncate_dim` parameter to the `encode` function: ```python embeddings = model.encode(['Sample text'], truncate_dim=256) ``` The latest version (3.1.0) of [SentenceTransformers](https://github.com/UKPLab/sentence-transformers) also supports `jina-embeddings-v3`: ```bash !pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True) task = "retrieval.query" embeddings = model.encode( ["What is the weather like in Berlin today?"], task=task, prompt_name=task, ) ``` You can fine-tune `jina-embeddings-v3` using [SentenceTransformerTrainer](https://sbert.net/docs/package_reference/sentence_transformer/trainer.html). To fine-tune for a specific task, you should set the task before passing the model to the ST Trainer, either during initialization: ```python model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True, model_kwargs={'default_task': 'classification'}) ``` Or afterwards: ```python model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True) model[0].default_task = 'classification' ``` This way you can fine-tune the LoRA adapter for the chosen task. However, If you want to fine-tune the entire model, make sure the main parameters are set as trainable when loading the model: ```python model = SentenceTransformer("jinaai/jina-embeddings-v3", trust_remote_code=True, model_kwargs={'lora_main_params_trainable': True}) ``` This will allow fine-tuning the whole model instead of just the LoRA adapters. **<details><summary>ONNX Inference.</summary>** <p> You can use ONNX for efficient inference with `jina-embeddings-v3`: ```python import onnxruntime import numpy as np from transformers import AutoTokenizer, PretrainedConfig # Mean pool function def mean_pooling(model_output: np.ndarray, attention_mask: np.ndarray): token_embeddings = model_output input_mask_expanded = np.expand_dims(attention_mask, axis=-1) input_mask_expanded = np.broadcast_to(input_mask_expanded, token_embeddings.shape) sum_embeddings = np.sum(token_embeddings * input_mask_expanded, axis=1) sum_mask = np.clip(np.sum(input_mask_expanded, axis=1), a_min=1e-9, a_max=None) return sum_embeddings / sum_mask # Load tokenizer and model config tokenizer = AutoTokenizer.from_pretrained('jinaai/jina-embeddings-v3') config = PretrainedConfig.from_pretrained('jinaai/jina-embeddings-v3') # Tokenize input input_text = tokenizer('sample text', return_tensors='np') # ONNX session model_path = 'jina-embeddings-v3/onnx/model.onnx' session = onnxruntime.InferenceSession(model_path) # Prepare inputs for ONNX model task_type = 'text-matching' task_id = np.array(config.lora_adaptations.index(task_type), dtype=np.int64) inputs = { 'input_ids': input_text['input_ids'], 'attention_mask': input_text['attention_mask'], 'task_id': task_id } # Run model outputs = session.run(None, inputs)[0] # Apply mean pooling and normalization to the model outputs embeddings = mean_pooling(outputs, input_text["attention_mask"]) embeddings = embeddings / np.linalg.norm(embeddings, ord=2, axis=1, keepdims=True) ``` </p> </details> ## Contact Join our [Discord community](https://discord.jina.ai) and chat with other community members about ideas. ## License `jina-embeddings-v3` is listed on AWS & Azure. If you need to use it beyond those platforms or on-premises within your company, note that the models is licensed under CC BY-NC 4.0. For commercial usage inquiries, feel free to [contact us](https://jina.ai/contact-sales/). ## Citation If you find `jina-embeddings-v3` useful in your research, please cite the following paper: ```bibtex @misc{sturua2024jinaembeddingsv3multilingualembeddingstask, title={jina-embeddings-v3: Multilingual Embeddings With Task LoRA}, author={Saba Sturua and Isabelle Mohr and Mohammad Kalim Akram and Michael Günther and Bo Wang and Markus Krimmel and Feng Wang and Georgios Mastrapas and Andreas Koukounas and Andreas Koukounas and Nan Wang and Han Xiao}, year={2024}, eprint={2409.10173}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={https://arxiv.org/abs/2409.10173}, } ```
[ "BIOSSES", "SCIFACT" ]
jinaai/jina-clip-v1
jinaai
feature-extraction
[ "transformers", "pytorch", "onnx", "safetensors", "jina_clip", "feature-extraction", "sentence-similarity", "mteb", "clip", "vision", "transformers.js", "custom_code", "en", "arxiv:2405.20204", "license:apache-2.0", "region:eu" ]
"2024-05-21T13:52:49Z"
2025-01-06T16:22:30+00:00
1,729,560
239
--- language: en library_name: transformers license: apache-2.0 tags: - feature-extraction - sentence-similarity - mteb - clip - vision - transformers.js inference: false --- <br><br> <p align="center"> <img src="https://huggingface.co/datasets/jinaai/documentation-images/resolve/main/logo.webp" alt="Jina AI: Your Search Foundation, Supercharged!" width="150px"> </p> <p align="center"> <b>The embedding set trained by <a href="https://jina.ai/"><b>Jina AI</b></a>.</b> </p> <p align="center"> <b>Jina CLIP: your CLIP model is also your text retriever!</b> </p> ## Intended Usage & Model Info `jina-clip-v1` is a state-of-the-art English **multimodal (text-image) embedding model**. Traditional text embedding models, such as [jina-embeddings-v2-base-en](https://huggingface.co/jinaai/jina-embeddings-v2-base-en), excel in text-to-text retrieval but incapable of cross-modal tasks. Models like [openai/clip-vit-base-patch32](https://huggingface.co/openai/clip-vit-base-patch32) effectively align image and text embeddings but are not optimized for text-to-text retrieval due to their training methodologies and context limitations. `jina-clip-v1` bridges this gap by offering robust performance in both domains. Its text component matches the retrieval efficiency of `jina-embeddings-v2-base-en`, while its overall architecture sets a new benchmark for cross-modal retrieval. This dual capability makes it an excellent tool for multimodal retrieval-augmented generation (MuRAG) applications, enabling seamless text-to-text and text-to-image searches within a single model. ## Data & Parameters [Check out our paper](https://arxiv.org/abs/2405.20204) ## Usage 1. The easiest way to starting using jina-clip-v1-en is to use Jina AI's [Embeddings API](https://jina.ai/embeddings/). 2. Alternatively, you can use Jina CLIP directly via transformers/sentence-transformers package. ```python !pip install transformers einops timm pillow from transformers import AutoModel # Initialize the model model = AutoModel.from_pretrained('jinaai/jina-clip-v1', trust_remote_code=True) # New meaningful sentences sentences = ['A blue cat', 'A red cat'] # Public image URLs image_urls = [ 'https://i.pinimg.com/600x315/21/48/7e/21487e8e0970dd366dafaed6ab25d8d8.jpg', 'https://i.pinimg.com/736x/c9/f2/3e/c9f23e212529f13f19bad5602d84b78b.jpg' ] # Encode text and images text_embeddings = model.encode_text(sentences) image_embeddings = model.encode_image(image_urls) # also accepts PIL.image, local filenames, dataURI # Compute similarities print(text_embeddings[0] @ text_embeddings[1].T) # text embedding similarity print(text_embeddings[0] @ image_embeddings[0].T) # text-image cross-modal similarity print(text_embeddings[0] @ image_embeddings[1].T) # text-image cross-modal similarity print(text_embeddings[1] @ image_embeddings[0].T) # text-image cross-modal similarity print(text_embeddings[1] @ image_embeddings[1].T)# text-image cross-modal similarity ``` or sentence-transformers: ```python # !pip install -U sentence-transformers from sentence_transformers import SentenceTransformer # Initialize the model model = SentenceTransformer('jinaai/jina-clip-v1', trust_remote_code=True) # New meaningful sentences sentences = ['A blue cat', 'A red cat'] # Public image URLs image_urls = [ 'https://i.pinimg.com/600x315/21/48/7e/21487e8e0970dd366dafaed6ab25d8d8.jpg', 'https://i.pinimg.com/736x/c9/f2/3e/c9f23e212529f13f19bad5602d84b78b.jpg' ] text_embeddings = model.encode(sentences) image_embeddings = model.encode(image_urls) ``` 3. JavaScript developers can use Jina CLIP via the [Transformers.js](https://huggingface.co/docs/transformers.js) library. Note that to use this model, you need to install Transformers.js [v3](https://github.com/xenova/transformers.js/tree/v3) from source using `npm install xenova/transformers.js#v3`. ```js import { AutoTokenizer, CLIPTextModelWithProjection, AutoProcessor, CLIPVisionModelWithProjection, RawImage, cos_sim } from '@xenova/transformers'; // Load tokenizer and text model const tokenizer = await AutoTokenizer.from_pretrained('jinaai/jina-clip-v1'); const text_model = await CLIPTextModelWithProjection.from_pretrained('jinaai/jina-clip-v1'); // Load processor and vision model const processor = await AutoProcessor.from_pretrained('Xenova/clip-vit-base-patch32'); const vision_model = await CLIPVisionModelWithProjection.from_pretrained('jinaai/jina-clip-v1'); // Run tokenization const texts = ['A blue cat', 'A red cat']; const text_inputs = tokenizer(texts, { padding: true, truncation: true }); // Compute text embeddings const { text_embeds } = await text_model(text_inputs); // Read images and run processor const urls = [ 'https://i.pinimg.com/600x315/21/48/7e/21487e8e0970dd366dafaed6ab25d8d8.jpg', 'https://i.pinimg.com/736x/c9/f2/3e/c9f23e212529f13f19bad5602d84b78b.jpg' ]; const image = await Promise.all(urls.map(url => RawImage.read(url))); const image_inputs = await processor(image); // Compute vision embeddings const { image_embeds } = await vision_model(image_inputs); // Compute similarities console.log(cos_sim(text_embeds[0].data, text_embeds[1].data)) // text embedding similarity console.log(cos_sim(text_embeds[0].data, image_embeds[0].data)) // text-image cross-modal similarity console.log(cos_sim(text_embeds[0].data, image_embeds[1].data)) // text-image cross-modal similarity console.log(cos_sim(text_embeds[1].data, image_embeds[0].data)) // text-image cross-modal similarity console.log(cos_sim(text_embeds[1].data, image_embeds[1].data)) // text-image cross-modal similarity ``` ## Performance ### Text-Image Retrieval | Name | Flickr Image Retr. R@1 | Flickr Image Retr. R@5 | Flickr Text Retr. R@1 | Flickr Text Retr. R@5 | |------------------|-------------------------|-------------------------|-----------------------|-----------------------| | ViT-B-32 | 0.597 | 0.8398 | 0.781 | 0.938 | | ViT-B-16 | 0.6216 | 0.8572 | 0.822 | 0.966 | | jina-clip | 0.6748 | 0.8902 | 0.811 | 0.965 | | Name | MSCOCO Image Retr. R@1 | MSCOCO Image Retr. R@5 | MSCOCO Text Retr. R@1 | MSCOCO Text Retr. R@5 | |------------------|-------------------------|-------------------------|-----------------------|-----------------------| | ViT-B-32 | 0.342 | 0.6001 | 0.5234 | 0.7634 | | ViT-B-16 | 0.3309 | 0.5842 | 0.5242 | 0.767 | | jina-clip | 0.4111 | 0.6644 | 0.5544 | 0.7904 | ### Text-Text Retrieval | Name | STS12 | STS15 | STS17 | STS13 | STS14 | STS16 | STS22 | STSBenchmark | SummEval | |-----------------------|--------|--------|--------|--------|--------|--------|--------|--------------|----------| | jina-embeddings-v2 | 0.7427 | 0.8755 | 0.8888 | 0.833 | 0.7917 | 0.836 | 0.6346 | 0.8404 | 0.3056 | | jina-clip | 0.7352 | 0.8746 | 0.8976 | 0.8323 | 0.7868 | 0.8377 | 0.6583 | 0.8493 | 0.3048 | | Name | ArguAna | FiQA2018 | NFCorpus | Quora | SCIDOCS | SciFact | TRECCOVID | |--------------------|---------|----------|----------|-------|---------|---------|-----------| | jina-embeddings-v2 | 0.4418 | 0.4158 | 0.3245 | 0.882 | 0.1986 | 0.6668 | 0.6591 | | jina-clip | 0.4933 | 0.3827 | 0.3352 | 0.8789| 0.2024 | 0.6734 | 0.7161 | ## Contact Join our [Discord community](https://discord.jina.ai) and chat with other community members about ideas. ## Citation If you find `jina-clip-v1` useful in your research, please cite the following paper: ```bibtex @misc{2405.20204, Author = {Andreas Koukounas and Georgios Mastrapas and Michael Günther and Bo Wang and Scott Martens and Isabelle Mohr and Saba Sturua and Mohammad Kalim Akram and Joan Fontanals Martínez and Saahil Ognawala and Susana Guzman and Maximilian Werk and Nan Wang and Han Xiao}, Title = {Jina CLIP: Your CLIP Model Is Also Your Text Retriever}, Year = {2024}, Eprint = {arXiv:2405.20204}, } ``` ## FAQ ### I encounter this problem, what should I do? ``` ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has <class 'transformers_modules.jinaai.jina-clip-implementation.7f069e2d54d609ef1ad2eb578c7bf07b5a51de41.configuration_clip.JinaCLIPConfig'> and you passed <class 'transformers_modules.jinaai.jina-clip-implementation.7f069e2d54d609ef1ad2eb578c7bf07b5a51de41.configuration_cli.JinaCLIPConfig'>. Fix one of those so they match! ``` There was a bug in Transformers library between 4.40.x to 4.41.1. You can update transformers to >4.41.2 or <=4.40.0 ### Given one query, how can I merge its text-text and text-image cosine similarity? Our emperical study shows that text-text cosine similarity is normally larger than text-image cosine similarity! If you want to merge two scores, we recommended 2 ways: 1. weighted average of text-text sim and text-image sim: ```python combined_scores = sim(text, text) + lambda * sim(text, image) # optimal lambda depends on your dataset, but in general lambda=2 can be a good choice. ``` 2. apply z-score normalization before merging scores: ```python # pseudo code query_document_mean = np.mean(cos_sim_text_texts) query_document_std = np.std(cos_sim_text_texts) text_image_mean = np.mean(cos_sim_text_images) text_image_std = np.std(cos_sim_text_images) query_document_sim_normalized = (cos_sim_query_documents - query_document_mean) / query_document_std text_image_sim_normalized = (cos_sim_text_images - text_image_mean) / text_image_std ```
[ "SCIFACT" ]
BAAI/bge-base-en-v1.5
BAAI
feature-extraction
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "transformers", "mteb", "en", "arxiv:2401.03462", "arxiv:2312.15503", "arxiv:2311.13534", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-09-11T15:04:22Z"
2024-02-21T03:00:19+00:00
1,663,008
285
--- language: - en license: mit tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers - mteb model-index: - name: bge-base-en-v1.5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.14925373134328 - type: ap value: 39.32336517995478 - type: f1 value: 70.16902252611425 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.386825 - type: ap value: 90.21276917991995 - type: f1 value: 93.37741030006174 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.846000000000004 - type: f1 value: 48.14646269778261 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 40.754000000000005 - type: map_at_10 value: 55.761 - type: map_at_100 value: 56.330999999999996 - type: map_at_1000 value: 56.333999999999996 - type: map_at_3 value: 51.92 - type: map_at_5 value: 54.010999999999996 - type: mrr_at_1 value: 41.181 - type: mrr_at_10 value: 55.967999999999996 - type: mrr_at_100 value: 56.538 - type: mrr_at_1000 value: 56.542 - type: mrr_at_3 value: 51.980000000000004 - type: mrr_at_5 value: 54.208999999999996 - type: ndcg_at_1 value: 40.754000000000005 - type: ndcg_at_10 value: 63.605000000000004 - type: ndcg_at_100 value: 66.05199999999999 - type: ndcg_at_1000 value: 66.12 - type: ndcg_at_3 value: 55.708 - type: ndcg_at_5 value: 59.452000000000005 - type: precision_at_1 value: 40.754000000000005 - type: precision_at_10 value: 8.841000000000001 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.238 - type: precision_at_5 value: 15.149000000000001 - type: recall_at_1 value: 40.754000000000005 - type: recall_at_10 value: 88.407 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 66.714 - type: recall_at_5 value: 75.747 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.74884539679369 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.8075893810716 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.128470519187736 - type: mrr value: 74.28065778481289 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.24629081484655 - type: cos_sim_spearman value: 86.93752309911496 - type: euclidean_pearson value: 87.58589628573816 - type: euclidean_spearman value: 88.05622328825284 - type: manhattan_pearson value: 87.5594959805773 - type: manhattan_spearman value: 88.19658793233961 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 86.9512987012987 - type: f1 value: 86.92515357973708 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.10263762928872 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.69711517426737 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.327 - type: map_at_10 value: 44.099 - type: map_at_100 value: 45.525 - type: map_at_1000 value: 45.641999999999996 - type: map_at_3 value: 40.47 - type: map_at_5 value: 42.36 - type: mrr_at_1 value: 39.199 - type: mrr_at_10 value: 49.651 - type: mrr_at_100 value: 50.29 - type: mrr_at_1000 value: 50.329 - type: mrr_at_3 value: 46.924 - type: mrr_at_5 value: 48.548 - type: ndcg_at_1 value: 39.199 - type: ndcg_at_10 value: 50.773 - type: ndcg_at_100 value: 55.67999999999999 - type: ndcg_at_1000 value: 57.495 - type: ndcg_at_3 value: 45.513999999999996 - type: ndcg_at_5 value: 47.703 - type: precision_at_1 value: 39.199 - type: precision_at_10 value: 9.914000000000001 - type: precision_at_100 value: 1.5310000000000001 - type: precision_at_1000 value: 0.198 - type: precision_at_3 value: 21.984 - type: precision_at_5 value: 15.737000000000002 - type: recall_at_1 value: 32.327 - type: recall_at_10 value: 63.743 - type: recall_at_100 value: 84.538 - type: recall_at_1000 value: 96.089 - type: recall_at_3 value: 48.065000000000005 - type: recall_at_5 value: 54.519 - type: map_at_1 value: 32.671 - type: map_at_10 value: 42.954 - type: map_at_100 value: 44.151 - type: map_at_1000 value: 44.287 - type: map_at_3 value: 39.912 - type: map_at_5 value: 41.798 - type: mrr_at_1 value: 41.465 - type: mrr_at_10 value: 49.351 - type: mrr_at_100 value: 49.980000000000004 - type: mrr_at_1000 value: 50.016000000000005 - type: mrr_at_3 value: 47.144000000000005 - type: mrr_at_5 value: 48.592999999999996 - type: ndcg_at_1 value: 41.465 - type: ndcg_at_10 value: 48.565999999999995 - type: ndcg_at_100 value: 52.76499999999999 - type: ndcg_at_1000 value: 54.749 - type: ndcg_at_3 value: 44.57 - type: ndcg_at_5 value: 46.759 - type: precision_at_1 value: 41.465 - type: precision_at_10 value: 9.107999999999999 - type: precision_at_100 value: 1.433 - type: precision_at_1000 value: 0.191 - type: precision_at_3 value: 21.423000000000002 - type: precision_at_5 value: 15.414 - type: recall_at_1 value: 32.671 - type: recall_at_10 value: 57.738 - type: recall_at_100 value: 75.86500000000001 - type: recall_at_1000 value: 88.36 - type: recall_at_3 value: 45.626 - type: recall_at_5 value: 51.812000000000005 - type: map_at_1 value: 41.185 - type: map_at_10 value: 53.929 - type: map_at_100 value: 54.92 - type: map_at_1000 value: 54.967999999999996 - type: map_at_3 value: 50.70400000000001 - type: map_at_5 value: 52.673 - type: mrr_at_1 value: 47.398 - type: mrr_at_10 value: 57.303000000000004 - type: mrr_at_100 value: 57.959 - type: mrr_at_1000 value: 57.985 - type: mrr_at_3 value: 54.932 - type: mrr_at_5 value: 56.464999999999996 - type: ndcg_at_1 value: 47.398 - type: ndcg_at_10 value: 59.653 - type: ndcg_at_100 value: 63.627 - type: ndcg_at_1000 value: 64.596 - type: ndcg_at_3 value: 54.455 - type: ndcg_at_5 value: 57.245000000000005 - type: precision_at_1 value: 47.398 - type: precision_at_10 value: 9.524000000000001 - type: precision_at_100 value: 1.243 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 24.389 - type: precision_at_5 value: 16.752 - type: recall_at_1 value: 41.185 - type: recall_at_10 value: 73.193 - type: recall_at_100 value: 90.357 - type: recall_at_1000 value: 97.253 - type: recall_at_3 value: 59.199999999999996 - type: recall_at_5 value: 66.118 - type: map_at_1 value: 27.27 - type: map_at_10 value: 36.223 - type: map_at_100 value: 37.218 - type: map_at_1000 value: 37.293 - type: map_at_3 value: 33.503 - type: map_at_5 value: 35.097 - type: mrr_at_1 value: 29.492 - type: mrr_at_10 value: 38.352000000000004 - type: mrr_at_100 value: 39.188 - type: mrr_at_1000 value: 39.247 - type: mrr_at_3 value: 35.876000000000005 - type: mrr_at_5 value: 37.401 - type: ndcg_at_1 value: 29.492 - type: ndcg_at_10 value: 41.239 - type: ndcg_at_100 value: 46.066 - type: ndcg_at_1000 value: 47.992000000000004 - type: ndcg_at_3 value: 36.11 - type: ndcg_at_5 value: 38.772 - type: precision_at_1 value: 29.492 - type: precision_at_10 value: 6.260000000000001 - type: precision_at_100 value: 0.914 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 15.104000000000001 - type: precision_at_5 value: 10.644 - type: recall_at_1 value: 27.27 - type: recall_at_10 value: 54.589 - type: recall_at_100 value: 76.70700000000001 - type: recall_at_1000 value: 91.158 - type: recall_at_3 value: 40.974 - type: recall_at_5 value: 47.327000000000005 - type: map_at_1 value: 17.848 - type: map_at_10 value: 26.207 - type: map_at_100 value: 27.478 - type: map_at_1000 value: 27.602 - type: map_at_3 value: 23.405 - type: map_at_5 value: 24.98 - type: mrr_at_1 value: 21.891 - type: mrr_at_10 value: 31.041999999999998 - type: mrr_at_100 value: 32.092 - type: mrr_at_1000 value: 32.151999999999994 - type: mrr_at_3 value: 28.358 - type: mrr_at_5 value: 29.969 - type: ndcg_at_1 value: 21.891 - type: ndcg_at_10 value: 31.585 - type: ndcg_at_100 value: 37.531 - type: ndcg_at_1000 value: 40.256 - type: ndcg_at_3 value: 26.508 - type: ndcg_at_5 value: 28.894 - type: precision_at_1 value: 21.891 - type: precision_at_10 value: 5.795999999999999 - type: precision_at_100 value: 0.9990000000000001 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 12.769 - type: precision_at_5 value: 9.279 - type: recall_at_1 value: 17.848 - type: recall_at_10 value: 43.452 - type: recall_at_100 value: 69.216 - type: recall_at_1000 value: 88.102 - type: recall_at_3 value: 29.18 - type: recall_at_5 value: 35.347 - type: map_at_1 value: 30.94 - type: map_at_10 value: 41.248000000000005 - type: map_at_100 value: 42.495 - type: map_at_1000 value: 42.602000000000004 - type: map_at_3 value: 37.939 - type: map_at_5 value: 39.924 - type: mrr_at_1 value: 37.824999999999996 - type: mrr_at_10 value: 47.041 - type: mrr_at_100 value: 47.83 - type: mrr_at_1000 value: 47.878 - type: mrr_at_3 value: 44.466 - type: mrr_at_5 value: 46.111999999999995 - type: ndcg_at_1 value: 37.824999999999996 - type: ndcg_at_10 value: 47.223 - type: ndcg_at_100 value: 52.394 - type: ndcg_at_1000 value: 54.432 - type: ndcg_at_3 value: 42.032000000000004 - type: ndcg_at_5 value: 44.772 - type: precision_at_1 value: 37.824999999999996 - type: precision_at_10 value: 8.393 - type: precision_at_100 value: 1.2890000000000001 - type: precision_at_1000 value: 0.164 - type: precision_at_3 value: 19.698 - type: precision_at_5 value: 14.013 - type: recall_at_1 value: 30.94 - type: recall_at_10 value: 59.316 - type: recall_at_100 value: 80.783 - type: recall_at_1000 value: 94.15400000000001 - type: recall_at_3 value: 44.712 - type: recall_at_5 value: 51.932 - type: map_at_1 value: 27.104 - type: map_at_10 value: 36.675999999999995 - type: map_at_100 value: 38.076 - type: map_at_1000 value: 38.189 - type: map_at_3 value: 33.733999999999995 - type: map_at_5 value: 35.287 - type: mrr_at_1 value: 33.904 - type: mrr_at_10 value: 42.55 - type: mrr_at_100 value: 43.434 - type: mrr_at_1000 value: 43.494 - type: mrr_at_3 value: 40.126 - type: mrr_at_5 value: 41.473 - type: ndcg_at_1 value: 33.904 - type: ndcg_at_10 value: 42.414 - type: ndcg_at_100 value: 48.203 - type: ndcg_at_1000 value: 50.437 - type: ndcg_at_3 value: 37.633 - type: ndcg_at_5 value: 39.67 - type: precision_at_1 value: 33.904 - type: precision_at_10 value: 7.82 - type: precision_at_100 value: 1.2409999999999999 - type: precision_at_1000 value: 0.159 - type: precision_at_3 value: 17.884 - type: precision_at_5 value: 12.648000000000001 - type: recall_at_1 value: 27.104 - type: recall_at_10 value: 53.563 - type: recall_at_100 value: 78.557 - type: recall_at_1000 value: 93.533 - type: recall_at_3 value: 39.92 - type: recall_at_5 value: 45.457 - type: map_at_1 value: 27.707749999999997 - type: map_at_10 value: 36.961 - type: map_at_100 value: 38.158833333333334 - type: map_at_1000 value: 38.270333333333326 - type: map_at_3 value: 34.07183333333334 - type: map_at_5 value: 35.69533333333334 - type: mrr_at_1 value: 32.81875 - type: mrr_at_10 value: 41.293 - type: mrr_at_100 value: 42.116499999999995 - type: mrr_at_1000 value: 42.170249999999996 - type: mrr_at_3 value: 38.83983333333333 - type: mrr_at_5 value: 40.29775 - type: ndcg_at_1 value: 32.81875 - type: ndcg_at_10 value: 42.355 - type: ndcg_at_100 value: 47.41374999999999 - type: ndcg_at_1000 value: 49.5805 - type: ndcg_at_3 value: 37.52825 - type: ndcg_at_5 value: 39.83266666666667 - type: precision_at_1 value: 32.81875 - type: precision_at_10 value: 7.382416666666666 - type: precision_at_100 value: 1.1640833333333334 - type: precision_at_1000 value: 0.15383333333333335 - type: precision_at_3 value: 17.134166666666665 - type: precision_at_5 value: 12.174833333333336 - type: recall_at_1 value: 27.707749999999997 - type: recall_at_10 value: 53.945 - type: recall_at_100 value: 76.191 - type: recall_at_1000 value: 91.101 - type: recall_at_3 value: 40.39083333333334 - type: recall_at_5 value: 46.40083333333333 - type: map_at_1 value: 26.482 - type: map_at_10 value: 33.201 - type: map_at_100 value: 34.107 - type: map_at_1000 value: 34.197 - type: map_at_3 value: 31.174000000000003 - type: map_at_5 value: 32.279 - type: mrr_at_1 value: 29.908 - type: mrr_at_10 value: 36.235 - type: mrr_at_100 value: 37.04 - type: mrr_at_1000 value: 37.105 - type: mrr_at_3 value: 34.355999999999995 - type: mrr_at_5 value: 35.382999999999996 - type: ndcg_at_1 value: 29.908 - type: ndcg_at_10 value: 37.325 - type: ndcg_at_100 value: 41.795 - type: ndcg_at_1000 value: 44.105 - type: ndcg_at_3 value: 33.555 - type: ndcg_at_5 value: 35.266999999999996 - type: precision_at_1 value: 29.908 - type: precision_at_10 value: 5.721 - type: precision_at_100 value: 0.8630000000000001 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 14.008000000000001 - type: precision_at_5 value: 9.754999999999999 - type: recall_at_1 value: 26.482 - type: recall_at_10 value: 47.072 - type: recall_at_100 value: 67.27 - type: recall_at_1000 value: 84.371 - type: recall_at_3 value: 36.65 - type: recall_at_5 value: 40.774 - type: map_at_1 value: 18.815 - type: map_at_10 value: 26.369999999999997 - type: map_at_100 value: 27.458 - type: map_at_1000 value: 27.588 - type: map_at_3 value: 23.990000000000002 - type: map_at_5 value: 25.345000000000002 - type: mrr_at_1 value: 22.953000000000003 - type: mrr_at_10 value: 30.342999999999996 - type: mrr_at_100 value: 31.241000000000003 - type: mrr_at_1000 value: 31.319000000000003 - type: mrr_at_3 value: 28.16 - type: mrr_at_5 value: 29.406 - type: ndcg_at_1 value: 22.953000000000003 - type: ndcg_at_10 value: 31.151 - type: ndcg_at_100 value: 36.309000000000005 - type: ndcg_at_1000 value: 39.227000000000004 - type: ndcg_at_3 value: 26.921 - type: ndcg_at_5 value: 28.938000000000002 - type: precision_at_1 value: 22.953000000000003 - type: precision_at_10 value: 5.602 - type: precision_at_100 value: 0.9530000000000001 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 12.606 - type: precision_at_5 value: 9.119 - type: recall_at_1 value: 18.815 - type: recall_at_10 value: 41.574 - type: recall_at_100 value: 64.84400000000001 - type: recall_at_1000 value: 85.406 - type: recall_at_3 value: 29.694 - type: recall_at_5 value: 34.935 - type: map_at_1 value: 27.840999999999998 - type: map_at_10 value: 36.797999999999995 - type: map_at_100 value: 37.993 - type: map_at_1000 value: 38.086999999999996 - type: map_at_3 value: 34.050999999999995 - type: map_at_5 value: 35.379 - type: mrr_at_1 value: 32.649 - type: mrr_at_10 value: 41.025 - type: mrr_at_100 value: 41.878 - type: mrr_at_1000 value: 41.929 - type: mrr_at_3 value: 38.573 - type: mrr_at_5 value: 39.715 - type: ndcg_at_1 value: 32.649 - type: ndcg_at_10 value: 42.142 - type: ndcg_at_100 value: 47.558 - type: ndcg_at_1000 value: 49.643 - type: ndcg_at_3 value: 37.12 - type: ndcg_at_5 value: 38.983000000000004 - type: precision_at_1 value: 32.649 - type: precision_at_10 value: 7.08 - type: precision_at_100 value: 1.1039999999999999 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 16.698 - type: precision_at_5 value: 11.511000000000001 - type: recall_at_1 value: 27.840999999999998 - type: recall_at_10 value: 54.245 - type: recall_at_100 value: 77.947 - type: recall_at_1000 value: 92.36999999999999 - type: recall_at_3 value: 40.146 - type: recall_at_5 value: 44.951 - type: map_at_1 value: 26.529000000000003 - type: map_at_10 value: 35.010000000000005 - type: map_at_100 value: 36.647 - type: map_at_1000 value: 36.857 - type: map_at_3 value: 31.968000000000004 - type: map_at_5 value: 33.554 - type: mrr_at_1 value: 31.818 - type: mrr_at_10 value: 39.550999999999995 - type: mrr_at_100 value: 40.54 - type: mrr_at_1000 value: 40.596 - type: mrr_at_3 value: 36.726 - type: mrr_at_5 value: 38.416 - type: ndcg_at_1 value: 31.818 - type: ndcg_at_10 value: 40.675 - type: ndcg_at_100 value: 46.548 - type: ndcg_at_1000 value: 49.126 - type: ndcg_at_3 value: 35.829 - type: ndcg_at_5 value: 38.0 - type: precision_at_1 value: 31.818 - type: precision_at_10 value: 7.826 - type: precision_at_100 value: 1.538 - type: precision_at_1000 value: 0.24 - type: precision_at_3 value: 16.601 - type: precision_at_5 value: 12.095 - type: recall_at_1 value: 26.529000000000003 - type: recall_at_10 value: 51.03 - type: recall_at_100 value: 77.556 - type: recall_at_1000 value: 93.804 - type: recall_at_3 value: 36.986000000000004 - type: recall_at_5 value: 43.096000000000004 - type: map_at_1 value: 23.480999999999998 - type: map_at_10 value: 30.817 - type: map_at_100 value: 31.838 - type: map_at_1000 value: 31.932 - type: map_at_3 value: 28.011999999999997 - type: map_at_5 value: 29.668 - type: mrr_at_1 value: 25.323 - type: mrr_at_10 value: 33.072 - type: mrr_at_100 value: 33.926 - type: mrr_at_1000 value: 33.993 - type: mrr_at_3 value: 30.436999999999998 - type: mrr_at_5 value: 32.092 - type: ndcg_at_1 value: 25.323 - type: ndcg_at_10 value: 35.514 - type: ndcg_at_100 value: 40.489000000000004 - type: ndcg_at_1000 value: 42.908 - type: ndcg_at_3 value: 30.092000000000002 - type: ndcg_at_5 value: 32.989000000000004 - type: precision_at_1 value: 25.323 - type: precision_at_10 value: 5.545 - type: precision_at_100 value: 0.861 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 12.446 - type: precision_at_5 value: 9.131 - type: recall_at_1 value: 23.480999999999998 - type: recall_at_10 value: 47.825 - type: recall_at_100 value: 70.652 - type: recall_at_1000 value: 88.612 - type: recall_at_3 value: 33.537 - type: recall_at_5 value: 40.542 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 13.333999999999998 - type: map_at_10 value: 22.524 - type: map_at_100 value: 24.506 - type: map_at_1000 value: 24.715 - type: map_at_3 value: 19.022 - type: map_at_5 value: 20.693 - type: mrr_at_1 value: 29.186 - type: mrr_at_10 value: 41.22 - type: mrr_at_100 value: 42.16 - type: mrr_at_1000 value: 42.192 - type: mrr_at_3 value: 38.013000000000005 - type: mrr_at_5 value: 39.704 - type: ndcg_at_1 value: 29.186 - type: ndcg_at_10 value: 31.167 - type: ndcg_at_100 value: 38.879000000000005 - type: ndcg_at_1000 value: 42.376000000000005 - type: ndcg_at_3 value: 25.817 - type: ndcg_at_5 value: 27.377000000000002 - type: precision_at_1 value: 29.186 - type: precision_at_10 value: 9.693999999999999 - type: precision_at_100 value: 1.8030000000000002 - type: precision_at_1000 value: 0.246 - type: precision_at_3 value: 19.11 - type: precision_at_5 value: 14.344999999999999 - type: recall_at_1 value: 13.333999999999998 - type: recall_at_10 value: 37.092000000000006 - type: recall_at_100 value: 63.651 - type: recall_at_1000 value: 83.05 - type: recall_at_3 value: 23.74 - type: recall_at_5 value: 28.655 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.151 - type: map_at_10 value: 19.653000000000002 - type: map_at_100 value: 28.053 - type: map_at_1000 value: 29.709000000000003 - type: map_at_3 value: 14.191 - type: map_at_5 value: 16.456 - type: mrr_at_1 value: 66.25 - type: mrr_at_10 value: 74.4 - type: mrr_at_100 value: 74.715 - type: mrr_at_1000 value: 74.726 - type: mrr_at_3 value: 72.417 - type: mrr_at_5 value: 73.667 - type: ndcg_at_1 value: 54.25 - type: ndcg_at_10 value: 40.77 - type: ndcg_at_100 value: 46.359 - type: ndcg_at_1000 value: 54.193000000000005 - type: ndcg_at_3 value: 44.832 - type: ndcg_at_5 value: 42.63 - type: precision_at_1 value: 66.25 - type: precision_at_10 value: 32.175 - type: precision_at_100 value: 10.668 - type: precision_at_1000 value: 2.067 - type: precision_at_3 value: 47.667 - type: precision_at_5 value: 41.3 - type: recall_at_1 value: 9.151 - type: recall_at_10 value: 25.003999999999998 - type: recall_at_100 value: 52.976 - type: recall_at_1000 value: 78.315 - type: recall_at_3 value: 15.487 - type: recall_at_5 value: 18.999 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.89999999999999 - type: f1 value: 46.47777925067403 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 73.706 - type: map_at_10 value: 82.423 - type: map_at_100 value: 82.67999999999999 - type: map_at_1000 value: 82.694 - type: map_at_3 value: 81.328 - type: map_at_5 value: 82.001 - type: mrr_at_1 value: 79.613 - type: mrr_at_10 value: 87.07000000000001 - type: mrr_at_100 value: 87.169 - type: mrr_at_1000 value: 87.17 - type: mrr_at_3 value: 86.404 - type: mrr_at_5 value: 86.856 - type: ndcg_at_1 value: 79.613 - type: ndcg_at_10 value: 86.289 - type: ndcg_at_100 value: 87.201 - type: ndcg_at_1000 value: 87.428 - type: ndcg_at_3 value: 84.625 - type: ndcg_at_5 value: 85.53699999999999 - type: precision_at_1 value: 79.613 - type: precision_at_10 value: 10.399 - type: precision_at_100 value: 1.1079999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 32.473 - type: precision_at_5 value: 20.132 - type: recall_at_1 value: 73.706 - type: recall_at_10 value: 93.559 - type: recall_at_100 value: 97.188 - type: recall_at_1000 value: 98.555 - type: recall_at_3 value: 88.98700000000001 - type: recall_at_5 value: 91.373 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 19.841 - type: map_at_10 value: 32.643 - type: map_at_100 value: 34.575 - type: map_at_1000 value: 34.736 - type: map_at_3 value: 28.317999999999998 - type: map_at_5 value: 30.964000000000002 - type: mrr_at_1 value: 39.660000000000004 - type: mrr_at_10 value: 48.620000000000005 - type: mrr_at_100 value: 49.384 - type: mrr_at_1000 value: 49.415 - type: mrr_at_3 value: 45.988 - type: mrr_at_5 value: 47.361 - type: ndcg_at_1 value: 39.660000000000004 - type: ndcg_at_10 value: 40.646 - type: ndcg_at_100 value: 47.657 - type: ndcg_at_1000 value: 50.428 - type: ndcg_at_3 value: 36.689 - type: ndcg_at_5 value: 38.211 - type: precision_at_1 value: 39.660000000000004 - type: precision_at_10 value: 11.235000000000001 - type: precision_at_100 value: 1.8530000000000002 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 24.587999999999997 - type: precision_at_5 value: 18.395 - type: recall_at_1 value: 19.841 - type: recall_at_10 value: 48.135 - type: recall_at_100 value: 74.224 - type: recall_at_1000 value: 90.826 - type: recall_at_3 value: 33.536 - type: recall_at_5 value: 40.311 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 40.358 - type: map_at_10 value: 64.497 - type: map_at_100 value: 65.362 - type: map_at_1000 value: 65.41900000000001 - type: map_at_3 value: 61.06700000000001 - type: map_at_5 value: 63.317 - type: mrr_at_1 value: 80.716 - type: mrr_at_10 value: 86.10799999999999 - type: mrr_at_100 value: 86.265 - type: mrr_at_1000 value: 86.27 - type: mrr_at_3 value: 85.271 - type: mrr_at_5 value: 85.82499999999999 - type: ndcg_at_1 value: 80.716 - type: ndcg_at_10 value: 72.597 - type: ndcg_at_100 value: 75.549 - type: ndcg_at_1000 value: 76.61 - type: ndcg_at_3 value: 67.874 - type: ndcg_at_5 value: 70.655 - type: precision_at_1 value: 80.716 - type: precision_at_10 value: 15.148 - type: precision_at_100 value: 1.745 - type: precision_at_1000 value: 0.188 - type: precision_at_3 value: 43.597 - type: precision_at_5 value: 28.351 - type: recall_at_1 value: 40.358 - type: recall_at_10 value: 75.739 - type: recall_at_100 value: 87.259 - type: recall_at_1000 value: 94.234 - type: recall_at_3 value: 65.39500000000001 - type: recall_at_5 value: 70.878 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 90.80799999999998 - type: ap value: 86.81350378180757 - type: f1 value: 90.79901248314215 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.096 - type: map_at_10 value: 34.384 - type: map_at_100 value: 35.541 - type: map_at_1000 value: 35.589999999999996 - type: map_at_3 value: 30.496000000000002 - type: map_at_5 value: 32.718 - type: mrr_at_1 value: 22.750999999999998 - type: mrr_at_10 value: 35.024 - type: mrr_at_100 value: 36.125 - type: mrr_at_1000 value: 36.168 - type: mrr_at_3 value: 31.225 - type: mrr_at_5 value: 33.416000000000004 - type: ndcg_at_1 value: 22.750999999999998 - type: ndcg_at_10 value: 41.351 - type: ndcg_at_100 value: 46.92 - type: ndcg_at_1000 value: 48.111 - type: ndcg_at_3 value: 33.439 - type: ndcg_at_5 value: 37.407000000000004 - type: precision_at_1 value: 22.750999999999998 - type: precision_at_10 value: 6.564 - type: precision_at_100 value: 0.935 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.288 - type: precision_at_5 value: 10.581999999999999 - type: recall_at_1 value: 22.096 - type: recall_at_10 value: 62.771 - type: recall_at_100 value: 88.529 - type: recall_at_1000 value: 97.55 - type: recall_at_3 value: 41.245 - type: recall_at_5 value: 50.788 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 94.16780665754673 - type: f1 value: 93.96331194859894 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.90606475148198 - type: f1 value: 58.58344986604187 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.14660390047075 - type: f1 value: 74.31533923533614 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.16139878950908 - type: f1 value: 80.18532656824924 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 32.949880906135085 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.56300351524862 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.196521894371315 - type: mrr value: 32.22644231694389 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.783 - type: map_at_10 value: 14.549000000000001 - type: map_at_100 value: 18.433 - type: map_at_1000 value: 19.949 - type: map_at_3 value: 10.936 - type: map_at_5 value: 12.514 - type: mrr_at_1 value: 47.368 - type: mrr_at_10 value: 56.42 - type: mrr_at_100 value: 56.908 - type: mrr_at_1000 value: 56.95 - type: mrr_at_3 value: 54.283 - type: mrr_at_5 value: 55.568 - type: ndcg_at_1 value: 45.666000000000004 - type: ndcg_at_10 value: 37.389 - type: ndcg_at_100 value: 34.253 - type: ndcg_at_1000 value: 43.059999999999995 - type: ndcg_at_3 value: 42.725 - type: ndcg_at_5 value: 40.193 - type: precision_at_1 value: 47.368 - type: precision_at_10 value: 27.988000000000003 - type: precision_at_100 value: 8.672 - type: precision_at_1000 value: 2.164 - type: precision_at_3 value: 40.248 - type: precision_at_5 value: 34.737 - type: recall_at_1 value: 6.783 - type: recall_at_10 value: 17.838 - type: recall_at_100 value: 33.672000000000004 - type: recall_at_1000 value: 66.166 - type: recall_at_3 value: 11.849 - type: recall_at_5 value: 14.205000000000002 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 31.698999999999998 - type: map_at_10 value: 46.556 - type: map_at_100 value: 47.652 - type: map_at_1000 value: 47.68 - type: map_at_3 value: 42.492000000000004 - type: map_at_5 value: 44.763999999999996 - type: mrr_at_1 value: 35.747 - type: mrr_at_10 value: 49.242999999999995 - type: mrr_at_100 value: 50.052 - type: mrr_at_1000 value: 50.068 - type: mrr_at_3 value: 45.867000000000004 - type: mrr_at_5 value: 47.778999999999996 - type: ndcg_at_1 value: 35.717999999999996 - type: ndcg_at_10 value: 54.14600000000001 - type: ndcg_at_100 value: 58.672999999999995 - type: ndcg_at_1000 value: 59.279 - type: ndcg_at_3 value: 46.407 - type: ndcg_at_5 value: 50.181 - type: precision_at_1 value: 35.717999999999996 - type: precision_at_10 value: 8.844000000000001 - type: precision_at_100 value: 1.139 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 20.993000000000002 - type: precision_at_5 value: 14.791000000000002 - type: recall_at_1 value: 31.698999999999998 - type: recall_at_10 value: 74.693 - type: recall_at_100 value: 94.15299999999999 - type: recall_at_1000 value: 98.585 - type: recall_at_3 value: 54.388999999999996 - type: recall_at_5 value: 63.08200000000001 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.283 - type: map_at_10 value: 85.24000000000001 - type: map_at_100 value: 85.882 - type: map_at_1000 value: 85.897 - type: map_at_3 value: 82.326 - type: map_at_5 value: 84.177 - type: mrr_at_1 value: 82.21000000000001 - type: mrr_at_10 value: 88.228 - type: mrr_at_100 value: 88.32 - type: mrr_at_1000 value: 88.32 - type: mrr_at_3 value: 87.323 - type: mrr_at_5 value: 87.94800000000001 - type: ndcg_at_1 value: 82.17999999999999 - type: ndcg_at_10 value: 88.9 - type: ndcg_at_100 value: 90.079 - type: ndcg_at_1000 value: 90.158 - type: ndcg_at_3 value: 86.18299999999999 - type: ndcg_at_5 value: 87.71799999999999 - type: precision_at_1 value: 82.17999999999999 - type: precision_at_10 value: 13.464 - type: precision_at_100 value: 1.533 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.693 - type: precision_at_5 value: 24.792 - type: recall_at_1 value: 71.283 - type: recall_at_10 value: 95.742 - type: recall_at_100 value: 99.67200000000001 - type: recall_at_1000 value: 99.981 - type: recall_at_3 value: 87.888 - type: recall_at_5 value: 92.24 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.24267063669042 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.88056988932578 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.903 - type: map_at_10 value: 13.202 - type: map_at_100 value: 15.5 - type: map_at_1000 value: 15.870999999999999 - type: map_at_3 value: 9.407 - type: map_at_5 value: 11.238 - type: mrr_at_1 value: 24.2 - type: mrr_at_10 value: 35.867 - type: mrr_at_100 value: 37.001 - type: mrr_at_1000 value: 37.043 - type: mrr_at_3 value: 32.5 - type: mrr_at_5 value: 34.35 - type: ndcg_at_1 value: 24.2 - type: ndcg_at_10 value: 21.731 - type: ndcg_at_100 value: 30.7 - type: ndcg_at_1000 value: 36.618 - type: ndcg_at_3 value: 20.72 - type: ndcg_at_5 value: 17.954 - type: precision_at_1 value: 24.2 - type: precision_at_10 value: 11.33 - type: precision_at_100 value: 2.4410000000000003 - type: precision_at_1000 value: 0.386 - type: precision_at_3 value: 19.667 - type: precision_at_5 value: 15.86 - type: recall_at_1 value: 4.903 - type: recall_at_10 value: 22.962 - type: recall_at_100 value: 49.563 - type: recall_at_1000 value: 78.238 - type: recall_at_3 value: 11.953 - type: recall_at_5 value: 16.067999999999998 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.12694254604078 - type: cos_sim_spearman value: 80.30141815181918 - type: euclidean_pearson value: 81.34015449877128 - type: euclidean_spearman value: 80.13984197010849 - type: manhattan_pearson value: 81.31767068124086 - type: manhattan_spearman value: 80.11720513114103 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.13112984010417 - type: cos_sim_spearman value: 78.03063573402875 - type: euclidean_pearson value: 83.51928418844804 - type: euclidean_spearman value: 78.4045235411144 - type: manhattan_pearson value: 83.49981637388689 - type: manhattan_spearman value: 78.4042575139372 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 82.50327987379504 - type: cos_sim_spearman value: 84.18556767756205 - type: euclidean_pearson value: 82.69684424327679 - type: euclidean_spearman value: 83.5368106038335 - type: manhattan_pearson value: 82.57967581007374 - type: manhattan_spearman value: 83.43009053133697 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 82.50756863007814 - type: cos_sim_spearman value: 82.27204331279108 - type: euclidean_pearson value: 81.39535251429741 - type: euclidean_spearman value: 81.84386626336239 - type: manhattan_pearson value: 81.34281737280695 - type: manhattan_spearman value: 81.81149375673166 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.8727714856726 - type: cos_sim_spearman value: 87.95738287792312 - type: euclidean_pearson value: 86.62920602795887 - type: euclidean_spearman value: 87.05207355381243 - type: manhattan_pearson value: 86.53587918472225 - type: manhattan_spearman value: 86.95382961029586 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.52240359769479 - type: cos_sim_spearman value: 85.47685776238286 - type: euclidean_pearson value: 84.25815333483058 - type: euclidean_spearman value: 85.27415639683198 - type: manhattan_pearson value: 84.29127757025637 - type: manhattan_spearman value: 85.30226224917351 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 86.42501708915708 - type: cos_sim_spearman value: 86.42276182795041 - type: euclidean_pearson value: 86.5408207354761 - type: euclidean_spearman value: 85.46096321750838 - type: manhattan_pearson value: 86.54177303026881 - type: manhattan_spearman value: 85.50313151916117 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 64.86521089250766 - type: cos_sim_spearman value: 65.94868540323003 - type: euclidean_pearson value: 67.16569626533084 - type: euclidean_spearman value: 66.37667004134917 - type: manhattan_pearson value: 67.1482365102333 - type: manhattan_spearman value: 66.53240122580029 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.64746265365318 - type: cos_sim_spearman value: 86.41888825906786 - type: euclidean_pearson value: 85.27453642725811 - type: euclidean_spearman value: 85.94095796602544 - type: manhattan_pearson value: 85.28643660505334 - type: manhattan_spearman value: 85.95028003260744 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.48903153618527 - type: mrr value: 96.41081503826601 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 58.594 - type: map_at_10 value: 69.296 - type: map_at_100 value: 69.782 - type: map_at_1000 value: 69.795 - type: map_at_3 value: 66.23 - type: map_at_5 value: 68.293 - type: mrr_at_1 value: 61.667 - type: mrr_at_10 value: 70.339 - type: mrr_at_100 value: 70.708 - type: mrr_at_1000 value: 70.722 - type: mrr_at_3 value: 68.0 - type: mrr_at_5 value: 69.56700000000001 - type: ndcg_at_1 value: 61.667 - type: ndcg_at_10 value: 74.039 - type: ndcg_at_100 value: 76.103 - type: ndcg_at_1000 value: 76.47800000000001 - type: ndcg_at_3 value: 68.967 - type: ndcg_at_5 value: 71.96900000000001 - type: precision_at_1 value: 61.667 - type: precision_at_10 value: 9.866999999999999 - type: precision_at_100 value: 1.097 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27.111 - type: precision_at_5 value: 18.2 - type: recall_at_1 value: 58.594 - type: recall_at_10 value: 87.422 - type: recall_at_100 value: 96.667 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 74.217 - type: recall_at_5 value: 81.539 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.85049504950496 - type: cos_sim_ap value: 96.33111544137081 - type: cos_sim_f1 value: 92.35443037974684 - type: cos_sim_precision value: 93.53846153846153 - type: cos_sim_recall value: 91.2 - type: dot_accuracy value: 99.82376237623762 - type: dot_ap value: 95.38082527310888 - type: dot_f1 value: 90.90909090909092 - type: dot_precision value: 92.90187891440502 - type: dot_recall value: 89.0 - type: euclidean_accuracy value: 99.84851485148515 - type: euclidean_ap value: 96.32316003996347 - type: euclidean_f1 value: 92.2071392659628 - type: euclidean_precision value: 92.71991911021233 - type: euclidean_recall value: 91.7 - type: manhattan_accuracy value: 99.84851485148515 - type: manhattan_ap value: 96.3655668249217 - type: manhattan_f1 value: 92.18356026222895 - type: manhattan_precision value: 92.98067141403867 - type: manhattan_recall value: 91.4 - type: max_accuracy value: 99.85049504950496 - type: max_ap value: 96.3655668249217 - type: max_f1 value: 92.35443037974684 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.94861371629051 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.009430451385 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.61164066427969 - type: mrr value: 55.49710603938544 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.622620124907662 - type: cos_sim_spearman value: 31.0678351356163 - type: dot_pearson value: 30.863727693306814 - type: dot_spearman value: 31.230306567021255 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22 - type: map_at_10 value: 2.011 - type: map_at_100 value: 10.974 - type: map_at_1000 value: 25.819 - type: map_at_3 value: 0.6649999999999999 - type: map_at_5 value: 1.076 - type: mrr_at_1 value: 86.0 - type: mrr_at_10 value: 91.8 - type: mrr_at_100 value: 91.8 - type: mrr_at_1000 value: 91.8 - type: mrr_at_3 value: 91.0 - type: mrr_at_5 value: 91.8 - type: ndcg_at_1 value: 82.0 - type: ndcg_at_10 value: 78.07300000000001 - type: ndcg_at_100 value: 58.231 - type: ndcg_at_1000 value: 51.153000000000006 - type: ndcg_at_3 value: 81.123 - type: ndcg_at_5 value: 81.059 - type: precision_at_1 value: 86.0 - type: precision_at_10 value: 83.0 - type: precision_at_100 value: 59.38 - type: precision_at_1000 value: 22.55 - type: precision_at_3 value: 87.333 - type: precision_at_5 value: 86.8 - type: recall_at_1 value: 0.22 - type: recall_at_10 value: 2.2079999999999997 - type: recall_at_100 value: 14.069 - type: recall_at_1000 value: 47.678 - type: recall_at_3 value: 0.7040000000000001 - type: recall_at_5 value: 1.161 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.809 - type: map_at_10 value: 10.394 - type: map_at_100 value: 16.598 - type: map_at_1000 value: 18.142 - type: map_at_3 value: 5.572 - type: map_at_5 value: 7.1370000000000005 - type: mrr_at_1 value: 32.653 - type: mrr_at_10 value: 46.564 - type: mrr_at_100 value: 47.469 - type: mrr_at_1000 value: 47.469 - type: mrr_at_3 value: 42.177 - type: mrr_at_5 value: 44.524 - type: ndcg_at_1 value: 30.612000000000002 - type: ndcg_at_10 value: 25.701 - type: ndcg_at_100 value: 37.532 - type: ndcg_at_1000 value: 48.757 - type: ndcg_at_3 value: 28.199999999999996 - type: ndcg_at_5 value: 25.987 - type: precision_at_1 value: 32.653 - type: precision_at_10 value: 23.469 - type: precision_at_100 value: 7.9799999999999995 - type: precision_at_1000 value: 1.5350000000000001 - type: precision_at_3 value: 29.932 - type: precision_at_5 value: 26.122 - type: recall_at_1 value: 2.809 - type: recall_at_10 value: 16.887 - type: recall_at_100 value: 48.67 - type: recall_at_1000 value: 82.89699999999999 - type: recall_at_3 value: 6.521000000000001 - type: recall_at_5 value: 9.609 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.57860000000001 - type: ap value: 13.82629211536393 - type: f1 value: 54.59860966183956 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.38030560271647 - type: f1 value: 59.69685552567865 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 51.4736717043405 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.92853311080646 - type: cos_sim_ap value: 77.67872502591382 - type: cos_sim_f1 value: 70.33941236068895 - type: cos_sim_precision value: 67.63273258645884 - type: cos_sim_recall value: 73.27176781002639 - type: dot_accuracy value: 85.79603027954938 - type: dot_ap value: 73.73786190233379 - type: dot_f1 value: 67.3437901774235 - type: dot_precision value: 65.67201604814443 - type: dot_recall value: 69.10290237467018 - type: euclidean_accuracy value: 86.94045419324074 - type: euclidean_ap value: 77.6687791535167 - type: euclidean_f1 value: 70.47209214023542 - type: euclidean_precision value: 67.7207492094381 - type: euclidean_recall value: 73.45646437994723 - type: manhattan_accuracy value: 86.87488823985218 - type: manhattan_ap value: 77.63373392430728 - type: manhattan_f1 value: 70.40920716112532 - type: manhattan_precision value: 68.31265508684864 - type: manhattan_recall value: 72.63852242744063 - type: max_accuracy value: 86.94045419324074 - type: max_ap value: 77.67872502591382 - type: max_f1 value: 70.47209214023542 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.67155664221679 - type: cos_sim_ap value: 85.64591703003417 - type: cos_sim_f1 value: 77.59531005352656 - type: cos_sim_precision value: 73.60967184801382 - type: cos_sim_recall value: 82.03726516784724 - type: dot_accuracy value: 88.41541506578181 - type: dot_ap value: 84.6482788957769 - type: dot_f1 value: 77.04748541466657 - type: dot_precision value: 74.02440754931176 - type: dot_recall value: 80.3279950723745 - type: euclidean_accuracy value: 88.63080684596576 - type: euclidean_ap value: 85.44570045321562 - type: euclidean_f1 value: 77.28769403336106 - type: euclidean_precision value: 72.90600040958427 - type: euclidean_recall value: 82.22975053895904 - type: manhattan_accuracy value: 88.59393798269105 - type: manhattan_ap value: 85.40271361038187 - type: manhattan_f1 value: 77.17606419344392 - type: manhattan_precision value: 72.4447747078295 - type: manhattan_recall value: 82.5685247921158 - type: max_accuracy value: 88.67155664221679 - type: max_ap value: 85.64591703003417 - type: max_f1 value: 77.59531005352656 --- <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> For more details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). If you are looking for a model that supports more languages, longer texts, and other retrieval methods, you can try using [bge-m3](https://huggingface.co/BAAI/bge-m3). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding focuses on retrieval-augmented LLMs, consisting of the following projects currently: - **Long-Context LLM**: [Activation Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon) - **Fine-tuning of LM** : [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail) - **Dense Retrieval**: [BGE-M3](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3), [LLM Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), [BGE Embedding](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/baai_general_embedding) - **Reranker Model**: [BGE Reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) - **Benchmark**: [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) ## News - 1/30/2024: Release **BGE-M3**, a new member to BGE model series! M3 stands for **M**ulti-linguality (100+ languages), **M**ulti-granularities (input length up to 8192), **M**ulti-Functionality (unification of dense, lexical, multi-vec/colbert retrieval). It is the first embedding model which supports all three retrieval methods, achieving new SOTA on multi-lingual (MIRACL) and cross-lingual (MKQA) benchmarks. [Technical Report](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/BGE_M3/BGE_M3.pdf) and [Code](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3). :fire: - 1/9/2024: Release [Activation-Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon), an effective, efficient, compatible, and low-cost (training) method to extend the context length of LLM. [Technical Report](https://arxiv.org/abs/2401.03462) :fire: - 12/24/2023: Release **LLaRA**, a LLaMA-7B based dense retriever, leading to state-of-the-art performances on MS MARCO and BEIR. Model and code will be open-sourced. Please stay tuned. [Technical Report](https://arxiv.org/abs/2312.15503) :fire: - 11/23/2023: Release [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail), a method to maintain general capabilities during fine-tuning by merging multiple language models. [Technical Report](https://arxiv.org/abs/2311.13534) :fire: - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Technical Report](https://arxiv.org/pdf/2310.07554.pdf) - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) and [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3#usage) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3) | Multi-Functionality(dense retrieval, sparse retrieval, multi-vector(colbert)), Multi-Linguality, and Multi-Granularity(8192 tokens) | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` #### Usage of the ONNX files ```python from optimum.onnxruntime import ORTModelForFeatureExtraction # type: ignore import torch from transformers import AutoModel, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-en-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-en-v1.5', revision="refs/pr/13") model_ort = ORTModelForFeatureExtraction.from_pretrained('BAAI/bge-large-en-v1.5', revision="refs/pr/13",file_name="onnx/model.onnx") # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') model_output_ort = model_ort(**encoded_input) # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # model_output and model_output_ort are identical ``` #### Usage via infinity Its also possible to deploy the onnx files with the [infinity_emb](https://github.com/michaelfeil/infinity) pip package. ```python import asyncio from infinity_emb import AsyncEmbeddingEngine, EngineArgs sentences = ["Embed this is sentence via Infinity.", "Paris is in France."] engine = AsyncEmbeddingEngine.from_args( EngineArgs(model_name_or_path = "BAAI/bge-large-en-v1.5", device="cpu", engine="optimum" # or engine="torch" )) async def main(): async with engine: embeddings, usage = await engine.embed(sentences=sentences) asyncio.run(main()) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR", "BIOSSES", "SCIFACT" ]
apple/OpenELM-1_1B-Instruct
apple
text-generation
[ "transformers", "safetensors", "openelm", "text-generation", "custom_code", "arxiv:2404.14619", "license:apple-amlr", "autotrain_compatible", "region:us" ]
"2024-04-12T21:52:12Z"
2025-02-28T18:31:24+00:00
1,520,427
60
--- license: apple-amlr license_name: apple-sample-code-license license_link: LICENSE --- # OpenELM *Sachin Mehta, Mohammad Hossein Sekhavat, Qingqing Cao, Maxwell Horton, Yanzi Jin, Chenfan Sun, Iman Mirzadeh, Mahyar Najibi, Dmitry Belenko, Peter Zatloukal, Mohammad Rastegari* We introduce **OpenELM**, a family of **Open** **E**fficient **L**anguage **M**odels. OpenELM uses a layer-wise scaling strategy to efficiently allocate parameters within each layer of the transformer model, leading to enhanced accuracy. We pretrained OpenELM models using the [CoreNet](https://github.com/apple/corenet) library. We release both pretrained and instruction tuned models with 270M, 450M, 1.1B and 3B parameters. We release the complete framework, encompassing data preparation, training, fine-tuning, and evaluation procedures, alongside multiple pre-trained checkpoints and training logs, to facilitate open research. Our pre-training dataset contains RefinedWeb, deduplicated PILE, a subset of RedPajama, and a subset of Dolma v1.6, totaling approximately 1.8 trillion tokens. Please check license agreements and terms of these datasets before using them. ## Usage We have provided an example function to generate output from OpenELM models loaded via [HuggingFace Hub](https://huggingface.co/docs/hub/) in `generate_openelm.py`. You can try the model by running the following command: ``` python generate_openelm.py --model apple/OpenELM-1_1B-Instruct --hf_access_token [HF_ACCESS_TOKEN] --prompt 'Once upon a time there was' --generate_kwargs repetition_penalty=1.2 ``` Please refer to [this link](https://huggingface.co/docs/hub/security-tokens) to obtain your hugging face access token. Additional arguments to the hugging face generate function can be passed via `generate_kwargs`. As an example, to speedup the inference, you can try [lookup token speculative generation](https://huggingface.co/docs/transformers/generation_strategies) by passing the `prompt_lookup_num_tokens` argument as follows: ``` python generate_openelm.py --model apple/OpenELM-1_1B-Instruct --hf_access_token [HF_ACCESS_TOKEN] --prompt 'Once upon a time there was' --generate_kwargs repetition_penalty=1.2 prompt_lookup_num_tokens=10 ``` Alternatively, try model-wise speculative generation with an [assistive model](https://huggingface.co/blog/assisted-generation) by passing a smaller model through the `assistant_model` argument, for example: ``` python generate_openelm.py --model apple/OpenELM-1_1B-Instruct --hf_access_token [HF_ACCESS_TOKEN] --prompt 'Once upon a time there was' --generate_kwargs repetition_penalty=1.2 --assistant_model [SMALLER_MODEL] ``` ## Main Results ### Zero-Shot | **Model Size** | **ARC-c** | **ARC-e** | **BoolQ** | **HellaSwag** | **PIQA** | **SciQ** | **WinoGrande** | **Average** | |-----------------------------------------------------------------------------|-----------|-----------|-----------|---------------|-----------|-----------|----------------|-------------| | [OpenELM-270M](https://huggingface.co/apple/OpenELM-270M) | 26.45 | 45.08 | **53.98** | 46.71 | 69.75 | **84.70** | **53.91** | 54.37 | | [OpenELM-270M-Instruct](https://huggingface.co/apple/OpenELM-270M-Instruct) | **30.55** | **46.68** | 48.56 | **52.07** | **70.78** | 84.40 | 52.72 | **55.11** | | [OpenELM-450M](https://huggingface.co/apple/OpenELM-450M) | 27.56 | 48.06 | 55.78 | 53.97 | 72.31 | 87.20 | 58.01 | 57.56 | | [OpenELM-450M-Instruct](https://huggingface.co/apple/OpenELM-450M-Instruct) | **30.38** | **50.00** | **60.37** | **59.34** | **72.63** | **88.00** | **58.96** | **59.95** | | [OpenELM-1_1B](https://huggingface.co/apple/OpenELM-1_1B) | 32.34 | **55.43** | 63.58 | 64.81 | **75.57** | **90.60** | 61.72 | 63.44 | | [OpenELM-1_1B-Instruct](https://huggingface.co/apple/OpenELM-1_1B-Instruct) | **37.97** | 52.23 | **70.00** | **71.20** | 75.03 | 89.30 | **62.75** | **65.50** | | [OpenELM-3B](https://huggingface.co/apple/OpenELM-3B) | 35.58 | 59.89 | 67.40 | 72.44 | 78.24 | **92.70** | 65.51 | 67.39 | | [OpenELM-3B-Instruct](https://huggingface.co/apple/OpenELM-3B-Instruct) | **39.42** | **61.74** | **68.17** | **76.36** | **79.00** | 92.50 | **66.85** | **69.15** | ### LLM360 | **Model Size** | **ARC-c** | **HellaSwag** | **MMLU** | **TruthfulQA** | **WinoGrande** | **Average** | |-----------------------------------------------------------------------------|-----------|---------------|-----------|----------------|----------------|-------------| | [OpenELM-270M](https://huggingface.co/apple/OpenELM-270M) | 27.65 | 47.15 | 25.72 | **39.24** | **53.83** | 38.72 | | [OpenELM-270M-Instruct](https://huggingface.co/apple/OpenELM-270M-Instruct) | **32.51** | **51.58** | **26.70** | 38.72 | 53.20 | **40.54** | | [OpenELM-450M](https://huggingface.co/apple/OpenELM-450M) | 30.20 | 53.86 | **26.01** | 40.18 | 57.22 | 41.50 | | [OpenELM-450M-Instruct](https://huggingface.co/apple/OpenELM-450M-Instruct) | **33.53** | **59.31** | 25.41 | **40.48** | **58.33** | **43.41** | | [OpenELM-1_1B](https://huggingface.co/apple/OpenELM-1_1B) | 36.69 | 65.71 | **27.05** | 36.98 | 63.22 | 45.93 | | [OpenELM-1_1B-Instruct](https://huggingface.co/apple/OpenELM-1_1B-Instruct) | **41.55** | **71.83** | 25.65 | **45.95** | **64.72** | **49.94** | | [OpenELM-3B](https://huggingface.co/apple/OpenELM-3B) | 42.24 | 73.28 | **26.76** | 34.98 | 67.25 | 48.90 | | [OpenELM-3B-Instruct](https://huggingface.co/apple/OpenELM-3B-Instruct) | **47.70** | **76.87** | 24.80 | **38.76** | **67.96** | **51.22** | ### OpenLLM Leaderboard | **Model Size** | **ARC-c** | **CrowS-Pairs** | **HellaSwag** | **MMLU** | **PIQA** | **RACE** | **TruthfulQA** | **WinoGrande** | **Average** | |-----------------------------------------------------------------------------|-----------|-----------------|---------------|-----------|-----------|-----------|----------------|----------------|-------------| | [OpenELM-270M](https://huggingface.co/apple/OpenELM-270M) | 27.65 | **66.79** | 47.15 | 25.72 | 69.75 | 30.91 | **39.24** | **53.83** | 45.13 | | [OpenELM-270M-Instruct](https://huggingface.co/apple/OpenELM-270M-Instruct) | **32.51** | 66.01 | **51.58** | **26.70** | **70.78** | 33.78 | 38.72 | 53.20 | **46.66** | | [OpenELM-450M](https://huggingface.co/apple/OpenELM-450M) | 30.20 | **68.63** | 53.86 | **26.01** | 72.31 | 33.11 | 40.18 | 57.22 | 47.69 | | [OpenELM-450M-Instruct](https://huggingface.co/apple/OpenELM-450M-Instruct) | **33.53** | 67.44 | **59.31** | 25.41 | **72.63** | **36.84** | **40.48** | **58.33** | **49.25** | | [OpenELM-1_1B](https://huggingface.co/apple/OpenELM-1_1B) | 36.69 | **71.74** | 65.71 | **27.05** | **75.57** | 36.46 | 36.98 | 63.22 | 51.68 | | [OpenELM-1_1B-Instruct](https://huggingface.co/apple/OpenELM-1_1B-Instruct) | **41.55** | 71.02 | **71.83** | 25.65 | 75.03 | **39.43** | **45.95** | **64.72** | **54.40** | | [OpenELM-3B](https://huggingface.co/apple/OpenELM-3B) | 42.24 | **73.29** | 73.28 | **26.76** | 78.24 | **38.76** | 34.98 | 67.25 | 54.35 | | [OpenELM-3B-Instruct](https://huggingface.co/apple/OpenELM-3B-Instruct) | **47.70** | 72.33 | **76.87** | 24.80 | **79.00** | 38.47 | **38.76** | **67.96** | **55.73** | See the technical report for more results and comparison. ## Evaluation ### Setup Install the following dependencies: ```bash # install public lm-eval-harness harness_repo="public-lm-eval-harness" git clone https://github.com/EleutherAI/lm-evaluation-harness ${harness_repo} cd ${harness_repo} # use main branch on 03-15-2024, SHA is dc90fec git checkout dc90fec pip install -e . cd .. # 66d6242 is the main branch on 2024-04-01 pip install datasets@git+https://github.com/huggingface/datasets.git@66d6242 pip install tokenizers>=0.15.2 transformers>=4.38.2 sentencepiece>=0.2.0 ``` ### Evaluate OpenELM ```bash # OpenELM-1_1B-Instruct hf_model=apple/OpenELM-1_1B-Instruct # this flag is needed because lm-eval-harness set add_bos_token to False by default, but OpenELM uses LLaMA tokenizer which requires add_bos_token to be True tokenizer=meta-llama/Llama-2-7b-hf add_bos_token=True batch_size=1 mkdir lm_eval_output shot=0 task=arc_challenge,arc_easy,boolq,hellaswag,piqa,race,winogrande,sciq,truthfulqa_mc2 lm_eval --model hf \ --model_args pretrained=${hf_model},trust_remote_code=True,add_bos_token=${add_bos_token},tokenizer=${tokenizer} \ --tasks ${task} \ --device cuda:0 \ --num_fewshot ${shot} \ --output_path ./lm_eval_output/${hf_model//\//_}_${task//,/_}-${shot}shot \ --batch_size ${batch_size} 2>&1 | tee ./lm_eval_output/eval-${hf_model//\//_}_${task//,/_}-${shot}shot.log shot=5 task=mmlu,winogrande lm_eval --model hf \ --model_args pretrained=${hf_model},trust_remote_code=True,add_bos_token=${add_bos_token},tokenizer=${tokenizer} \ --tasks ${task} \ --device cuda:0 \ --num_fewshot ${shot} \ --output_path ./lm_eval_output/${hf_model//\//_}_${task//,/_}-${shot}shot \ --batch_size ${batch_size} 2>&1 | tee ./lm_eval_output/eval-${hf_model//\//_}_${task//,/_}-${shot}shot.log shot=25 task=arc_challenge,crows_pairs_english lm_eval --model hf \ --model_args pretrained=${hf_model},trust_remote_code=True,add_bos_token=${add_bos_token},tokenizer=${tokenizer} \ --tasks ${task} \ --device cuda:0 \ --num_fewshot ${shot} \ --output_path ./lm_eval_output/${hf_model//\//_}_${task//,/_}-${shot}shot \ --batch_size ${batch_size} 2>&1 | tee ./lm_eval_output/eval-${hf_model//\//_}_${task//,/_}-${shot}shot.log shot=10 task=hellaswag lm_eval --model hf \ --model_args pretrained=${hf_model},trust_remote_code=True,add_bos_token=${add_bos_token},tokenizer=${tokenizer} \ --tasks ${task} \ --device cuda:0 \ --num_fewshot ${shot} \ --output_path ./lm_eval_output/${hf_model//\//_}_${task//,/_}-${shot}shot \ --batch_size ${batch_size} 2>&1 | tee ./lm_eval_output/eval-${hf_model//\//_}_${task//,/_}-${shot}shot.log ``` ## Bias, Risks, and Limitations The release of OpenELM models aims to empower and enrich the open research community by providing access to state-of-the-art language models. Trained on publicly available datasets, these models are made available without any safety guarantees. Consequently, there exists the possibility of these models producing outputs that are inaccurate, harmful, biased, or objectionable in response to user prompts. Thus, it is imperative for users and developers to undertake thorough safety testing and implement appropriate filtering mechanisms tailored to their specific requirements. ## Citation If you find our work useful, please cite: ```BibTex @article{mehtaOpenELMEfficientLanguage2024, title = {{OpenELM}: {An} {Efficient} {Language} {Model} {Family} with {Open} {Training} and {Inference} {Framework}}, shorttitle = {{OpenELM}}, url = {https://arxiv.org/abs/2404.14619v1}, language = {en}, urldate = {2024-04-24}, journal = {arXiv.org}, author = {Mehta, Sachin and Sekhavat, Mohammad Hossein and Cao, Qingqing and Horton, Maxwell and Jin, Yanzi and Sun, Chenfan and Mirzadeh, Iman and Najibi, Mahyar and Belenko, Dmitry and Zatloukal, Peter and Rastegari, Mohammad}, month = apr, year = {2024}, } @inproceedings{mehta2022cvnets, author = {Mehta, Sachin and Abdolhosseini, Farzad and Rastegari, Mohammad}, title = {CVNets: High Performance Library for Computer Vision}, year = {2022}, booktitle = {Proceedings of the 30th ACM International Conference on Multimedia}, series = {MM '22} } ```
[ "SCIQ" ]
Alibaba-NLP/gte-multilingual-base
Alibaba-NLP
sentence-similarity
[ "sentence-transformers", "safetensors", "new", "feature-extraction", "mteb", "transformers", "multilingual", "sentence-similarity", "custom_code", "af", "ar", "az", "be", "bg", "bn", "ca", "ceb", "cs", "cy", "da", "de", "el", "en", "es", "et", "eu", "fa", "fi", "fr", "gl", "gu", "he", "hi", "hr", "ht", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ky", "lo", "lt", "lv", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "pa", "pl", "pt", "qu", "ro", "ru", "si", "sk", "sl", "so", "sq", "sr", "sv", "sw", "ta", "te", "th", "tl", "tr", "uk", "ur", "vi", "yo", "zh", "arxiv:2407.19669", "arxiv:2210.09984", "arxiv:2402.03216", "arxiv:2007.15207", "arxiv:2104.08663", "arxiv:2402.07440", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-07-20T08:37:28Z"
2025-03-17T05:40:01+00:00
1,376,850
212
--- language: - af - ar - az - be - bg - bn - ca - ceb - cs - cy - da - de - el - en - es - et - eu - fa - fi - fr - gl - gu - he - hi - hr - ht - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ky - lo - lt - lv - mk - ml - mn - mr - ms - my - ne - nl - 'no' - pa - pl - pt - qu - ro - ru - si - sk - sl - so - sq - sr - sv - sw - ta - te - th - tl - tr - uk - ur - vi - yo - zh license: apache-2.0 tags: - mteb - sentence-transformers - transformers - multilingual - sentence-similarity model-index: - name: gte-multilingual-base (dense) results: - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 33.66681726329994 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_spearman value: 43.54760696384009 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_spearman value: 48.91186363417501 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 41.689860834990064 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: v_measure value: 54.20241337977897 - type: v_measure value: 44.34083695608643 - task: type: Reranking dataset: name: MTEB AlloprofReranking type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: 666fdacebe0291776e86f29345663dfaf80a0db9 metrics: - type: map value: 64.91495250072002 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: ndcg_at_10 value: 53.638 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.95522388059702 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 80.717625 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 43.64199999999999 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.108 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.169999999999995 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 39.56799999999999 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 35.75000000000001 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 33.342000000000006 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: ndcg_at_10 value: 58.231 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: ndcg_at_10 value: 53.166000000000004 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 46.01900557959478 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 41.06626465345723 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.87514497610431 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_spearman value: 81.21450112991194 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_spearman value: 51.71589543397271 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: ndcg_at_10 value: 26.115 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: f1 value: 98.6169102296451 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: f1 value: 97.89603052314916 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: f1 value: 97.12388869645537 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: f1 value: 98.15692469720906 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.36038961038962 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 37.5903826674123 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 34.21474277151329 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 62.519999999999996 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_ap value: 74.90132799162956 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_spearman value: 90.30727955142524 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 37.94850105022274 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 38.11958675421534 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 86.10950950485399 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 87.28038294231966 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: ndcg_at_10 value: 47.099000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: ndcg_at_10 value: 45.973000000000006 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: ndcg_at_10 value: 55.606 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: ndcg_at_10 value: 36.638 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: ndcg_at_10 value: 30.711 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: ndcg_at_10 value: 44.523 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: ndcg_at_10 value: 37.940000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_10 value: 38.12183333333333 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: ndcg_at_10 value: 32.684000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: ndcg_at_10 value: 26.735 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: ndcg_at_10 value: 36.933 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_10 value: 33.747 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_10 value: 28.872999999999998 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: ndcg_at_10 value: 34.833 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: ndcg_at_10 value: 43.78 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_ap value: 84.00640599186677 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: ndcg_at_10 value: 80.60000000000001 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: ndcg_at_10 value: 40.116 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: ndcg_at_10 value: 32.498 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: ndcg_at_10 value: 87.547 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: ndcg_at_10 value: 64.85 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 47.949999999999996 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: ndcg_at_10 value: 92.111 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: ndcg_at_10 value: 28.962 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: ndcg_at_10 value: 45.005 - task: type: Clustering dataset: name: MTEB HALClusteringS2S type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: v_measure value: 25.133776435657595 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: ndcg_at_10 value: 63.036 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: ndcg_at_10 value: 56.904999999999994 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 44.59407464409388 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 74.912 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 79.26829268292683 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_spearman value: 74.8601229809791 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P type: mlsum config: default split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: v_measure value: 42.331902754246556 - type: v_measure value: 40.92029335502153 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: 8e0c766dbe9e16e1d221116a3f36795fbade07f6 metrics: - type: map value: 32.19266316591337 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: ndcg_at_10 value: 79.346 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: ndcg_at_10 value: 39.922999999999995 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: ndcg_at_10 value: 55.620999999999995 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.53989968080255 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 88.26993519301212 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.87725150100067 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.48512370811149 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.45141627823591 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 83.45750452079565 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 72.57637938896488 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 63.50803043110736 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 71.6577718478986 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 64.05887879736925 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 65.27070634636071 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 63.04520795660037 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 80.66350710900474 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 44.016506455899425 - type: v_measure value: 40.67730129573544 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.94552790854068 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 49.273705447209146 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.490921318090116 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.97511768661733 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.5689307330195 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 48.34902488231337 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.6684599865501 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.54539340954942 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.08675184936112 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.12508406186953 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.41425689307331 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.59515803631474 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.90517821116342 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.91526563550774 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.198386012104905 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.04371217215869 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.31203765971756 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.521183591123055 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.06254203093476 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.01546738399461 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.27975790181574 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.79556153328849 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 50.18493611297915 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.888365837256224 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 50.79690652320108 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.225958305312716 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.58641560188299 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.08204438466711 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.54606590450572 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.443174176193665 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.65097511768661 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.45662407531944 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.739071956960316 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.36180228648286 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.3920645595158 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.06993947545395 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.123739071956955 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.46133154001346 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.54472091459314 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.204438466711494 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.69603227975792 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.684599865501 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.523873570948226 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.53396099529253 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.88298587760591 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.65097511768662 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.8453261600538 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.6247478143914 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.16274377942166 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.61667787491594 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.17283120376598 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.89912575655683 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.27975790181573 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.269670477471415 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.10423671822461 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.40753194351043 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 55.369872225958304 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.60726294552792 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.30262273032952 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.52925353059851 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.28446536650976 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.45460659045058 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.26563550773368 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.20578345662408 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.64963012777405 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.698049764626774 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.14458641560188 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.51445864156018 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.13786146603901 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.61533288500337 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.526563550773375 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.99731002017484 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.59381304640216 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.010759919300604 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 53.26160053799597 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.800941492938804 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.387357094821795 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.5359784801614 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.36919973100203 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.81506388702084 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.35104236718225 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.67787491593813 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.4250168123739 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.49630127774043 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.95696032279758 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.11768661735036 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.86953597848016 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.51042367182247 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.65097511768661 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.81573638197713 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.26227303295225 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.51513113651646 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.29858776059179 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.72696704774714 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.57700067249496 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.22797579018157 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.97041022192333 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.72629455279085 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.16072629455278 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.92199058507062 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.40484196368527 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.61398789509079 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: ndcg_at_10 value: 61.934999999999995 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.052031054565205 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.969909524076794 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.7530992892652 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: ndcg_at_10 value: 34.705999999999996 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (ar) type: Shitao/MLDR config: ar split: test revision: None metrics: - type: ndcg_at_10 value: 55.166000000000004 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (de) type: Shitao/MLDR config: de split: test revision: None metrics: - type: ndcg_at_10 value: 55.155 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (en) type: Shitao/MLDR config: en split: test revision: None metrics: - type: ndcg_at_10 value: 50.993 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (es) type: Shitao/MLDR config: es split: test revision: None metrics: - type: ndcg_at_10 value: 81.228 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (fr) type: Shitao/MLDR config: fr split: test revision: None metrics: - type: ndcg_at_10 value: 76.19 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (hi) type: Shitao/MLDR config: hi split: test revision: None metrics: - type: ndcg_at_10 value: 45.206 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (it) type: Shitao/MLDR config: it split: test revision: None metrics: - type: ndcg_at_10 value: 66.741 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (ja) type: Shitao/MLDR config: ja split: test revision: None metrics: - type: ndcg_at_10 value: 52.111 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (ko) type: Shitao/MLDR config: ko split: test revision: None metrics: - type: ndcg_at_10 value: 46.733000000000004 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (pt) type: Shitao/MLDR config: pt split: test revision: None metrics: - type: ndcg_at_10 value: 79.105 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (ru) type: Shitao/MLDR config: ru split: test revision: None metrics: - type: ndcg_at_10 value: 64.21 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (th) type: Shitao/MLDR config: th split: test revision: None metrics: - type: ndcg_at_10 value: 35.467 - task: type: Retrieval dataset: name: MTEB MultiLongDocRetrieval (zh) type: Shitao/MLDR config: zh split: test revision: None metrics: - type: ndcg_at_10 value: 27.419 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 61.02000000000001 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: ndcg_at_10 value: 36.65 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: ndcg_at_10 value: 26.831 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: ndcg_at_10 value: 58.111000000000004 - task: type: Retrieval dataset: name: MTEB NQ-PL type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: ndcg_at_10 value: 43.126999999999995 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_ap value: 72.67630697316041 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 84.85000000000001 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_ap value: 100 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 65.99189110918043 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_spearman value: 16.124364530596228 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_ap value: 92.43431057460192 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_ap value: 99.06090138049724 - task: type: PairClassification dataset: name: MTEB PawsX (fr) type: paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_ap value: 58.9314954874314 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 69.59833795013851 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 44.73684210526315 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_spearman value: 39.36450754137984 - task: type: Retrieval dataset: name: MTEB Quora-PL type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: ndcg_at_10 value: 80.76299999999999 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: ndcg_at_10 value: 88.022 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 55.719165988934385 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.25390069273025 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: ndcg_at_10 value: 18.243000000000002 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: ndcg_at_10 value: 14.219000000000001 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_ap value: 75.4022630307816 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_spearman value: 79.34269390198548 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_spearman value: 74.0651660446132 - task: type: STS dataset: name: MTEB SICKFr type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cos_sim_spearman value: 78.62693119733123 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_spearman value: 77.50660544631359 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_spearman value: 85.55415077723738 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_spearman value: 81.67550814479077 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_spearman value: 88.94601412322764 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_spearman value: 84.33844259337481 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 81.58650681159105 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 78.82472265884256 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 76.43637938260397 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 84.71008299464059 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 88.88074713413747 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 76.36405640457285 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 83.84737910084762 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 87.03931621433031 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 84.43335591752246 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 83.85268648747021 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 82.45786516224341 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 67.20227303970304 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 60.892838305537126 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 72.01876318464508 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 42.3879320510127 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 65.54048784845729 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 58.55244068334867 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 66.48710288440624 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 66.585754901838 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 81.03001290557805 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 62.28001859884359 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 79.64106342105019 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 78.27915339361124 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 78.28574268257462 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 72.92658860751482 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 74.83418886368217 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 56.01064022625769 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 53.64332829635126 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_spearman value: 73.24670207647144 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_spearman value: 80.7157790971544 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_spearman value: 86.45763616928973 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: stsb_multi_mt config: fr split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_spearman value: 84.4335500335282 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 84.15276484499303 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: ndcg_at_10 value: 73.433 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: ndcg_at_10 value: 58.919999999999995 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_ap value: 95.40564890916419 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 63.41856697730145 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 31.709285904909112 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.09341030060322 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_spearman value: 30.58262517835034 - task: type: Summarization dataset: name: MTEB SummEvalFr type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cos_sim_spearman value: 29.744542072951358 - task: type: Reranking dataset: name: MTEB SyntecReranking type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 88.03333333333333 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 77f7e271bf4a92b24fce5119f3486b583ca016ff metrics: - type: ndcg_at_10 value: 83.043 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.08577894804324 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: ndcg_at_10 value: 84.718 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 48.726 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: ndcg_at_10 value: 57.56 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: ndcg_at_10 value: 59.355999999999995 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 82.765 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 73.69942196531792 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 32.86585365853657 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 95.81666666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 97.75 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 93.78333333333335 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 90.72333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 42.45202558635395 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 77.59238095238095 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 35.69686411149825 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 82.59333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 84.1456922987907 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 52.47462133594857 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 67.62965440356746 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 79.48412698412699 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 75.85 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 27.32600866497127 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 84.38 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 42.98888712165028 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 85.55690476190476 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 46.68466031323174 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 32.73071428571428 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 88.26333333333334 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 96.61666666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.30666666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 70.03714285714285 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 89.09 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 59.570476190476185 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 92.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 97.68333333333334 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 80.40880503144653 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 89.7008547008547 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 81.84833333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 71.69696969696969 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 55.76985790822269 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.66666666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 68.36668519547896 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 36.73992673992674 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 63.420952380952365 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.28999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 40.95392490046146 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 77.58936507936508 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.28999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 63.563650793650794 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 94.35 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.43 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 95.73333333333332 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 79.38666666666667 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 89.64 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 21.257184628237262 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 13.592316017316017 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 73.22666666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 51.711309523809526 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 24.98790634904795 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 17.19218192918193 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 93.26666666666667 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 94.57333333333334 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 42.35127206127206 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 51.12318903318903 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 23.856320290390055 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 79.52833333333334 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 95.93333333333334 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 90.75333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 30.802919708029197 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 15.984076294076294 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.82666666666667 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 76.36054421768706 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 9.232711399711398 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 45.640803181175855 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 86.29 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 88.90833333333332 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 11.11880248978075 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 48.45839345839346 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 65.68157033805888 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 94.63852498786997 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 81.67904761904761 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 89.35969868173258 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 5.957229437229437 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 91.50333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 63.75498778998778 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 82.99190476190476 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 92.95 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 9.054042624042623 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 72.77064981488574 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 93.14 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 29.976786498525627 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 67.6525821596244 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 33.12964812964813 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 92.30666666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 34.36077879427633 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 52.571845212690285 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 58.13107263107262 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 93.33333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 42.87370133925458 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 20.394327616827614 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 84.29967426710098 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 88.80666666666667 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 67.23062271062273 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 78.08398950131233 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 77.85166666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 67.63004001231148 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 89.77000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 40.2654503616042 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 83.90333333333334 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 77.80666666666666 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 84.08 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 60.43098607367475 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 88.19333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 90.55352798053529 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: f1 value: 88.44999999999999 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 57.25416429643288 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 56.616646560243524 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: ndcg_at_10 value: 22.819 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.02579999999999 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 57.60045274476514 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 50.346666699466205 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_ap value: 71.88199004440489 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_ap value: 85.41587779677383 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: ndcg_at_10 value: 72.792 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 82.58000000000001 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fr split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: ndcg_at_10 value: 67.327 --- ## gte-multilingual-base The **gte-multilingual-base** model is the latest in the [GTE](https://huggingface.co/collections/Alibaba-NLP/gte-models-6680f0b13f885cb431e6d469) (General Text Embedding) family of models, featuring several key attributes: - **High Performance**: Achieves state-of-the-art (SOTA) results in multilingual retrieval tasks and multi-task representation model evaluations when compared to models of similar size. - **Training Architecture**: Trained using an encoder-only transformers architecture, resulting in a smaller model size. Unlike previous models based on decode-only LLM architecture (e.g., gte-qwen2-1.5b-instruct), this model has lower hardware requirements for inference, offering a 10x increase in inference speed. - **Long Context**: Supports text lengths up to **8192** tokens. - **Multilingual Capability**: Supports over **70** languages. - **Elastic Dense Embedding**: Support elastic output dense representation while maintaining the effectiveness of downstream tasks, which significantly reduces storage costs and improves execution efficiency. - **Sparse Vectors**: In addition to dense representations, it can also generate sparse vectors. **Paper**: [mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval](https://arxiv.org/pdf/2407.19669) ## Model Information - Model Size: 305M - Embedding Dimension: 768 - Max Input Tokens: 8192 ## Usage - **It is recommended to install xformers and enable unpadding for acceleration, refer to [enable-unpadding-and-xformers](https://huggingface.co/Alibaba-NLP/new-impl#recommendation-enable-unpadding-and-acceleration-with-xformers).** - **How to use it offline: [new-impl/discussions/2](https://huggingface.co/Alibaba-NLP/new-impl/discussions/2#662b08d04d8c3d0a09c88fa3)** - **How to use with [TEI](https://github.com/huggingface/text-embeddings-inference): [refs/pr/7](https://huggingface.co/Alibaba-NLP/gte-multilingual-base/discussions/7#66bfb82ea03b764ca92a2221)** ### Get Dense Embeddings with Transformers ```python # Requires transformers>=4.36.0 import torch.nn.functional as F from transformers import AutoModel, AutoTokenizer input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "北京", "快排算法介绍" ] model_name_or_path = 'Alibaba-NLP/gte-multilingual-base' tokenizer = AutoTokenizer.from_pretrained(model_name_or_path) model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=True) # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=8192, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) dimension=768 # The output dimension of the output embedding, should be in [128, 768] embeddings = outputs.last_hidden_state[:, 0][:dimension] embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) # [[0.3016996383666992, 0.7503870129585266, 0.3203084468841553]] ``` ### Use with sentence-transformers ```python # Requires sentence-transformers>=3.0.0 from sentence_transformers import SentenceTransformer input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "北京", "快排算法介绍" ] model_name_or_path="Alibaba-NLP/gte-multilingual-base" model = SentenceTransformer(model_name_or_path, trust_remote_code=True) embeddings = model.encode(input_texts, normalize_embeddings=True) # embeddings.shape (4, 768) # sim scores scores = model.similarity(embeddings[:1], embeddings[1:]) print(scores.tolist()) # [[0.301699697971344, 0.7503870129585266, 0.32030850648880005]] ``` ### Use with infinity Usage via docker and [infinity](https://github.com/michaelfeil/infinity), MIT Licensed. ``` docker run --gpus all -v $PWD/data:/app/.cache -p "7997":"7997" \ michaelf34/infinity:0.0.69 \ v2 --model-id Alibaba-NLP/gte-multilingual-base --revision "main" --dtype float16 --batch-size 32 --device cuda --engine torch --port 7997 ``` ### Use with custom code to get dense embeddings and sparse token weights ```python # You can find the script gte_embedding.py in https://huggingface.co/Alibaba-NLP/gte-multilingual-base/blob/main/scripts/gte_embedding.py from gte_embedding import GTEEmbeddidng model_name_or_path = 'Alibaba-NLP/gte-multilingual-base' model = GTEEmbeddidng(model_name_or_path) query = "中国的首都在哪儿" docs = [ "what is the capital of China?", "how to implement quick sort in python?", "北京", "快排算法介绍" ] embs = model.encode(docs, return_dense=True,return_sparse=True) print('dense_embeddings vecs', embs['dense_embeddings']) print('token_weights', embs['token_weights']) pairs = [(query, doc) for doc in docs] dense_scores = model.compute_scores(pairs, dense_weight=1.0, sparse_weight=0.0) sparse_scores = model.compute_scores(pairs, dense_weight=0.0, sparse_weight=1.0) hybrid_scores = model.compute_scores(pairs, dense_weight=1.0, sparse_weight=0.3) print('dense_scores', dense_scores) print('sparse_scores', sparse_scores) print('hybrid_scores', hybrid_scores) # dense_scores [0.85302734375, 0.257568359375, 0.76953125, 0.325439453125] # sparse_scores [0.0, 0.0, 4.600879669189453, 1.570279598236084] # hybrid_scores [0.85302734375, 0.257568359375, 2.1497951507568356, 0.7965233325958252] ``` ## Evaluation We validated the performance of the **gte-multilingual-base** model on multiple downstream tasks, including multilingual retrieval, cross-lingual retrieval, long text retrieval, and general text representation evaluation on the [MTEB Leaderboard](https://huggingface.co/spaces/mteb/leaderboard), among others. ### Retrieval Task Retrieval results on [MIRACL](https://arxiv.org/abs/2210.09984) and [MLDR](https://arxiv.org/abs/2402.03216) (multilingual), [MKQA](https://arxiv.org/abs/2007.15207) (crosslingual), [BEIR](https://arxiv.org/abs/2104.08663) and [LoCo](https://arxiv.org/abs/2402.07440) (English). ![image](./images/mgte-retrieval.png) - Detail results on [MLDR](https://arxiv.org/abs/2402.03216) ![image](./images/mgte-retrieval.png) - Detail results on [LoCo](https://arxiv.org/abs/2402.07440) ### MTEB Results on MTEB English, Chinese, French, Polish ![image](./images/mgte-mteb.png) **More detailed experimental results can be found in the [paper](https://arxiv.org/pdf/2407.19669)**. ## Cloud API Services In addition to the open-source [GTE](https://huggingface.co/collections/Alibaba-NLP/gte-models-6680f0b13f885cb431e6d469) series models, GTE series models are also available as commercial API services on Alibaba Cloud. - [Embedding Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-embedding/): Three versions of the text embedding models are available: text-embedding-v1/v2/v3, with v3 being the latest API service. - [ReRank Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-sorting-model/): The gte-rerank model service is available. Note that the models behind the commercial APIs are not entirely identical to the open-source models. ## Citation If you find our paper or models helpful, please consider cite: ``` @inproceedings{zhang2024mgte, title={mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval}, author={Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Wen and Dai, Ziqi and Tang, Jialong and Lin, Huan and Yang, Baosong and Xie, Pengjun and Huang, Fei and others}, booktitle={Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing: Industry Track}, pages={1393--1412}, year={2024} } ```
[ "BIOSSES", "SCIFACT" ]
nomic-ai/nomic-embed-text-v1
nomic-ai
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "nomic_bert", "feature-extraction", "sentence-similarity", "mteb", "transformers", "transformers.js", "custom_code", "en", "arxiv:2402.01613", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-01-31T20:26:50Z"
2024-09-26T14:42:37+00:00
1,037,781
498
--- language: - en library_name: sentence-transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - feature-extraction - sentence-similarity - mteb - transformers - transformers.js new_version: nomic-ai/nomic-embed-text-v1.5 model-index: - name: epoch_0_model results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.8507462686567 - type: ap value: 40.592189159090495 - type: f1 value: 71.01634655512476 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.51892500000001 - type: ap value: 88.50346762975335 - type: f1 value: 91.50342077459624 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.364 - type: f1 value: 46.72708080922794 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 25.178 - type: map_at_10 value: 40.244 - type: map_at_100 value: 41.321999999999996 - type: map_at_1000 value: 41.331 - type: map_at_3 value: 35.016999999999996 - type: map_at_5 value: 37.99 - type: mrr_at_1 value: 25.605 - type: mrr_at_10 value: 40.422000000000004 - type: mrr_at_100 value: 41.507 - type: mrr_at_1000 value: 41.516 - type: mrr_at_3 value: 35.23 - type: mrr_at_5 value: 38.15 - type: ndcg_at_1 value: 25.178 - type: ndcg_at_10 value: 49.258 - type: ndcg_at_100 value: 53.776 - type: ndcg_at_1000 value: 53.995000000000005 - type: ndcg_at_3 value: 38.429 - type: ndcg_at_5 value: 43.803 - type: precision_at_1 value: 25.178 - type: precision_at_10 value: 7.831 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 16.121 - type: precision_at_5 value: 12.29 - type: recall_at_1 value: 25.178 - type: recall_at_10 value: 78.307 - type: recall_at_100 value: 97.866 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 48.364000000000004 - type: recall_at_5 value: 61.451 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 45.93034494751465 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 36.64579480054327 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 60.601310529222054 - type: mrr value: 75.04484896451656 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.57797718095814 - type: cos_sim_spearman value: 86.47064499110101 - type: euclidean_pearson value: 87.4559602783142 - type: euclidean_spearman value: 86.47064499110101 - type: manhattan_pearson value: 87.7232764230245 - type: manhattan_spearman value: 86.91222131777742 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.5422077922078 - type: f1 value: 84.47657456950589 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 38.48953561974464 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 32.75995857510105 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 30.008000000000003 - type: map_at_10 value: 39.51 - type: map_at_100 value: 40.841 - type: map_at_1000 value: 40.973 - type: map_at_3 value: 36.248999999999995 - type: map_at_5 value: 38.096999999999994 - type: mrr_at_1 value: 36.481 - type: mrr_at_10 value: 44.818000000000005 - type: mrr_at_100 value: 45.64 - type: mrr_at_1000 value: 45.687 - type: mrr_at_3 value: 42.036 - type: mrr_at_5 value: 43.782 - type: ndcg_at_1 value: 36.481 - type: ndcg_at_10 value: 45.152 - type: ndcg_at_100 value: 50.449 - type: ndcg_at_1000 value: 52.76499999999999 - type: ndcg_at_3 value: 40.161 - type: ndcg_at_5 value: 42.577999999999996 - type: precision_at_1 value: 36.481 - type: precision_at_10 value: 8.369 - type: precision_at_100 value: 1.373 - type: precision_at_1000 value: 0.186 - type: precision_at_3 value: 18.693 - type: precision_at_5 value: 13.533999999999999 - type: recall_at_1 value: 30.008000000000003 - type: recall_at_10 value: 56.108999999999995 - type: recall_at_100 value: 78.55499999999999 - type: recall_at_1000 value: 93.659 - type: recall_at_3 value: 41.754999999999995 - type: recall_at_5 value: 48.296 - type: map_at_1 value: 30.262 - type: map_at_10 value: 40.139 - type: map_at_100 value: 41.394 - type: map_at_1000 value: 41.526 - type: map_at_3 value: 37.155 - type: map_at_5 value: 38.785 - type: mrr_at_1 value: 38.153 - type: mrr_at_10 value: 46.369 - type: mrr_at_100 value: 47.072 - type: mrr_at_1000 value: 47.111999999999995 - type: mrr_at_3 value: 44.268 - type: mrr_at_5 value: 45.389 - type: ndcg_at_1 value: 38.153 - type: ndcg_at_10 value: 45.925 - type: ndcg_at_100 value: 50.394000000000005 - type: ndcg_at_1000 value: 52.37500000000001 - type: ndcg_at_3 value: 41.754000000000005 - type: ndcg_at_5 value: 43.574 - type: precision_at_1 value: 38.153 - type: precision_at_10 value: 8.796 - type: precision_at_100 value: 1.432 - type: precision_at_1000 value: 0.189 - type: precision_at_3 value: 20.318 - type: precision_at_5 value: 14.395 - type: recall_at_1 value: 30.262 - type: recall_at_10 value: 55.72200000000001 - type: recall_at_100 value: 74.97500000000001 - type: recall_at_1000 value: 87.342 - type: recall_at_3 value: 43.129 - type: recall_at_5 value: 48.336 - type: map_at_1 value: 39.951 - type: map_at_10 value: 51.248000000000005 - type: map_at_100 value: 52.188 - type: map_at_1000 value: 52.247 - type: map_at_3 value: 48.211 - type: map_at_5 value: 49.797000000000004 - type: mrr_at_1 value: 45.329 - type: mrr_at_10 value: 54.749 - type: mrr_at_100 value: 55.367999999999995 - type: mrr_at_1000 value: 55.400000000000006 - type: mrr_at_3 value: 52.382 - type: mrr_at_5 value: 53.649 - type: ndcg_at_1 value: 45.329 - type: ndcg_at_10 value: 56.847 - type: ndcg_at_100 value: 60.738 - type: ndcg_at_1000 value: 61.976 - type: ndcg_at_3 value: 51.59 - type: ndcg_at_5 value: 53.915 - type: precision_at_1 value: 45.329 - type: precision_at_10 value: 8.959 - type: precision_at_100 value: 1.187 - type: precision_at_1000 value: 0.134 - type: precision_at_3 value: 22.612 - type: precision_at_5 value: 15.273 - type: recall_at_1 value: 39.951 - type: recall_at_10 value: 70.053 - type: recall_at_100 value: 86.996 - type: recall_at_1000 value: 95.707 - type: recall_at_3 value: 56.032000000000004 - type: recall_at_5 value: 61.629999999999995 - type: map_at_1 value: 25.566 - type: map_at_10 value: 33.207 - type: map_at_100 value: 34.166000000000004 - type: map_at_1000 value: 34.245 - type: map_at_3 value: 30.94 - type: map_at_5 value: 32.01 - type: mrr_at_1 value: 27.345000000000002 - type: mrr_at_10 value: 35.193000000000005 - type: mrr_at_100 value: 35.965 - type: mrr_at_1000 value: 36.028999999999996 - type: mrr_at_3 value: 32.806000000000004 - type: mrr_at_5 value: 34.021 - type: ndcg_at_1 value: 27.345000000000002 - type: ndcg_at_10 value: 37.891999999999996 - type: ndcg_at_100 value: 42.664 - type: ndcg_at_1000 value: 44.757000000000005 - type: ndcg_at_3 value: 33.123000000000005 - type: ndcg_at_5 value: 35.035 - type: precision_at_1 value: 27.345000000000002 - type: precision_at_10 value: 5.763 - type: precision_at_100 value: 0.859 - type: precision_at_1000 value: 0.108 - type: precision_at_3 value: 13.71 - type: precision_at_5 value: 9.401 - type: recall_at_1 value: 25.566 - type: recall_at_10 value: 50.563 - type: recall_at_100 value: 72.86399999999999 - type: recall_at_1000 value: 88.68599999999999 - type: recall_at_3 value: 37.43 - type: recall_at_5 value: 41.894999999999996 - type: map_at_1 value: 16.663 - type: map_at_10 value: 23.552 - type: map_at_100 value: 24.538 - type: map_at_1000 value: 24.661 - type: map_at_3 value: 21.085 - type: map_at_5 value: 22.391 - type: mrr_at_1 value: 20.025000000000002 - type: mrr_at_10 value: 27.643 - type: mrr_at_100 value: 28.499999999999996 - type: mrr_at_1000 value: 28.582 - type: mrr_at_3 value: 25.083 - type: mrr_at_5 value: 26.544 - type: ndcg_at_1 value: 20.025000000000002 - type: ndcg_at_10 value: 28.272000000000002 - type: ndcg_at_100 value: 33.353 - type: ndcg_at_1000 value: 36.454 - type: ndcg_at_3 value: 23.579 - type: ndcg_at_5 value: 25.685000000000002 - type: precision_at_1 value: 20.025000000000002 - type: precision_at_10 value: 5.187 - type: precision_at_100 value: 0.897 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 10.987 - type: precision_at_5 value: 8.06 - type: recall_at_1 value: 16.663 - type: recall_at_10 value: 38.808 - type: recall_at_100 value: 61.305 - type: recall_at_1000 value: 83.571 - type: recall_at_3 value: 25.907999999999998 - type: recall_at_5 value: 31.214 - type: map_at_1 value: 27.695999999999998 - type: map_at_10 value: 37.018 - type: map_at_100 value: 38.263000000000005 - type: map_at_1000 value: 38.371 - type: map_at_3 value: 34.226 - type: map_at_5 value: 35.809999999999995 - type: mrr_at_1 value: 32.916000000000004 - type: mrr_at_10 value: 42.067 - type: mrr_at_100 value: 42.925000000000004 - type: mrr_at_1000 value: 42.978 - type: mrr_at_3 value: 39.637 - type: mrr_at_5 value: 41.134 - type: ndcg_at_1 value: 32.916000000000004 - type: ndcg_at_10 value: 42.539 - type: ndcg_at_100 value: 47.873 - type: ndcg_at_1000 value: 50.08200000000001 - type: ndcg_at_3 value: 37.852999999999994 - type: ndcg_at_5 value: 40.201 - type: precision_at_1 value: 32.916000000000004 - type: precision_at_10 value: 7.5840000000000005 - type: precision_at_100 value: 1.199 - type: precision_at_1000 value: 0.155 - type: precision_at_3 value: 17.485 - type: precision_at_5 value: 12.512 - type: recall_at_1 value: 27.695999999999998 - type: recall_at_10 value: 53.638 - type: recall_at_100 value: 76.116 - type: recall_at_1000 value: 91.069 - type: recall_at_3 value: 41.13 - type: recall_at_5 value: 46.872 - type: map_at_1 value: 24.108 - type: map_at_10 value: 33.372 - type: map_at_100 value: 34.656 - type: map_at_1000 value: 34.768 - type: map_at_3 value: 30.830999999999996 - type: map_at_5 value: 32.204 - type: mrr_at_1 value: 29.110000000000003 - type: mrr_at_10 value: 37.979 - type: mrr_at_100 value: 38.933 - type: mrr_at_1000 value: 38.988 - type: mrr_at_3 value: 35.731 - type: mrr_at_5 value: 36.963 - type: ndcg_at_1 value: 29.110000000000003 - type: ndcg_at_10 value: 38.635000000000005 - type: ndcg_at_100 value: 44.324999999999996 - type: ndcg_at_1000 value: 46.747 - type: ndcg_at_3 value: 34.37 - type: ndcg_at_5 value: 36.228 - type: precision_at_1 value: 29.110000000000003 - type: precision_at_10 value: 6.963 - type: precision_at_100 value: 1.146 - type: precision_at_1000 value: 0.152 - type: precision_at_3 value: 16.400000000000002 - type: precision_at_5 value: 11.552999999999999 - type: recall_at_1 value: 24.108 - type: recall_at_10 value: 49.597 - type: recall_at_100 value: 73.88900000000001 - type: recall_at_1000 value: 90.62400000000001 - type: recall_at_3 value: 37.662 - type: recall_at_5 value: 42.565 - type: map_at_1 value: 25.00791666666667 - type: map_at_10 value: 33.287749999999996 - type: map_at_100 value: 34.41141666666667 - type: map_at_1000 value: 34.52583333333333 - type: map_at_3 value: 30.734416666666668 - type: map_at_5 value: 32.137166666666666 - type: mrr_at_1 value: 29.305666666666664 - type: mrr_at_10 value: 37.22966666666666 - type: mrr_at_100 value: 38.066583333333334 - type: mrr_at_1000 value: 38.12616666666667 - type: mrr_at_3 value: 34.92275 - type: mrr_at_5 value: 36.23333333333334 - type: ndcg_at_1 value: 29.305666666666664 - type: ndcg_at_10 value: 38.25533333333333 - type: ndcg_at_100 value: 43.25266666666666 - type: ndcg_at_1000 value: 45.63583333333334 - type: ndcg_at_3 value: 33.777166666666666 - type: ndcg_at_5 value: 35.85 - type: precision_at_1 value: 29.305666666666664 - type: precision_at_10 value: 6.596416666666667 - type: precision_at_100 value: 1.0784166666666668 - type: precision_at_1000 value: 0.14666666666666664 - type: precision_at_3 value: 15.31075 - type: precision_at_5 value: 10.830916666666667 - type: recall_at_1 value: 25.00791666666667 - type: recall_at_10 value: 49.10933333333333 - type: recall_at_100 value: 71.09216666666667 - type: recall_at_1000 value: 87.77725000000001 - type: recall_at_3 value: 36.660916666666665 - type: recall_at_5 value: 41.94149999999999 - type: map_at_1 value: 23.521 - type: map_at_10 value: 30.043 - type: map_at_100 value: 30.936000000000003 - type: map_at_1000 value: 31.022 - type: map_at_3 value: 27.926000000000002 - type: map_at_5 value: 29.076999999999998 - type: mrr_at_1 value: 26.227 - type: mrr_at_10 value: 32.822 - type: mrr_at_100 value: 33.61 - type: mrr_at_1000 value: 33.672000000000004 - type: mrr_at_3 value: 30.776999999999997 - type: mrr_at_5 value: 31.866 - type: ndcg_at_1 value: 26.227 - type: ndcg_at_10 value: 34.041 - type: ndcg_at_100 value: 38.394 - type: ndcg_at_1000 value: 40.732 - type: ndcg_at_3 value: 30.037999999999997 - type: ndcg_at_5 value: 31.845000000000002 - type: precision_at_1 value: 26.227 - type: precision_at_10 value: 5.244999999999999 - type: precision_at_100 value: 0.808 - type: precision_at_1000 value: 0.107 - type: precision_at_3 value: 12.679000000000002 - type: precision_at_5 value: 8.773 - type: recall_at_1 value: 23.521 - type: recall_at_10 value: 43.633 - type: recall_at_100 value: 63.126000000000005 - type: recall_at_1000 value: 80.765 - type: recall_at_3 value: 32.614 - type: recall_at_5 value: 37.15 - type: map_at_1 value: 16.236 - type: map_at_10 value: 22.898 - type: map_at_100 value: 23.878 - type: map_at_1000 value: 24.009 - type: map_at_3 value: 20.87 - type: map_at_5 value: 22.025 - type: mrr_at_1 value: 19.339000000000002 - type: mrr_at_10 value: 26.382 - type: mrr_at_100 value: 27.245 - type: mrr_at_1000 value: 27.33 - type: mrr_at_3 value: 24.386 - type: mrr_at_5 value: 25.496000000000002 - type: ndcg_at_1 value: 19.339000000000002 - type: ndcg_at_10 value: 27.139999999999997 - type: ndcg_at_100 value: 31.944 - type: ndcg_at_1000 value: 35.077999999999996 - type: ndcg_at_3 value: 23.424 - type: ndcg_at_5 value: 25.188 - type: precision_at_1 value: 19.339000000000002 - type: precision_at_10 value: 4.8309999999999995 - type: precision_at_100 value: 0.845 - type: precision_at_1000 value: 0.128 - type: precision_at_3 value: 10.874 - type: precision_at_5 value: 7.825 - type: recall_at_1 value: 16.236 - type: recall_at_10 value: 36.513 - type: recall_at_100 value: 57.999 - type: recall_at_1000 value: 80.512 - type: recall_at_3 value: 26.179999999999996 - type: recall_at_5 value: 30.712 - type: map_at_1 value: 24.11 - type: map_at_10 value: 31.566 - type: map_at_100 value: 32.647 - type: map_at_1000 value: 32.753 - type: map_at_3 value: 29.24 - type: map_at_5 value: 30.564999999999998 - type: mrr_at_1 value: 28.265 - type: mrr_at_10 value: 35.504000000000005 - type: mrr_at_100 value: 36.436 - type: mrr_at_1000 value: 36.503 - type: mrr_at_3 value: 33.349000000000004 - type: mrr_at_5 value: 34.622 - type: ndcg_at_1 value: 28.265 - type: ndcg_at_10 value: 36.192 - type: ndcg_at_100 value: 41.388000000000005 - type: ndcg_at_1000 value: 43.948 - type: ndcg_at_3 value: 31.959 - type: ndcg_at_5 value: 33.998 - type: precision_at_1 value: 28.265 - type: precision_at_10 value: 5.989 - type: precision_at_100 value: 0.9650000000000001 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 14.335 - type: precision_at_5 value: 10.112 - type: recall_at_1 value: 24.11 - type: recall_at_10 value: 46.418 - type: recall_at_100 value: 69.314 - type: recall_at_1000 value: 87.397 - type: recall_at_3 value: 34.724 - type: recall_at_5 value: 39.925 - type: map_at_1 value: 22.091 - type: map_at_10 value: 29.948999999999998 - type: map_at_100 value: 31.502000000000002 - type: map_at_1000 value: 31.713 - type: map_at_3 value: 27.464 - type: map_at_5 value: 28.968 - type: mrr_at_1 value: 26.482 - type: mrr_at_10 value: 34.009 - type: mrr_at_100 value: 35.081 - type: mrr_at_1000 value: 35.138000000000005 - type: mrr_at_3 value: 31.785000000000004 - type: mrr_at_5 value: 33.178999999999995 - type: ndcg_at_1 value: 26.482 - type: ndcg_at_10 value: 35.008 - type: ndcg_at_100 value: 41.272999999999996 - type: ndcg_at_1000 value: 43.972 - type: ndcg_at_3 value: 30.804 - type: ndcg_at_5 value: 33.046 - type: precision_at_1 value: 26.482 - type: precision_at_10 value: 6.462 - type: precision_at_100 value: 1.431 - type: precision_at_1000 value: 0.22899999999999998 - type: precision_at_3 value: 14.360999999999999 - type: precision_at_5 value: 10.474 - type: recall_at_1 value: 22.091 - type: recall_at_10 value: 45.125 - type: recall_at_100 value: 72.313 - type: recall_at_1000 value: 89.503 - type: recall_at_3 value: 33.158 - type: recall_at_5 value: 39.086999999999996 - type: map_at_1 value: 19.883 - type: map_at_10 value: 26.951000000000004 - type: map_at_100 value: 27.927999999999997 - type: map_at_1000 value: 28.022000000000002 - type: map_at_3 value: 24.616 - type: map_at_5 value: 25.917 - type: mrr_at_1 value: 21.996 - type: mrr_at_10 value: 29.221000000000004 - type: mrr_at_100 value: 30.024 - type: mrr_at_1000 value: 30.095 - type: mrr_at_3 value: 26.833000000000002 - type: mrr_at_5 value: 28.155 - type: ndcg_at_1 value: 21.996 - type: ndcg_at_10 value: 31.421 - type: ndcg_at_100 value: 36.237 - type: ndcg_at_1000 value: 38.744 - type: ndcg_at_3 value: 26.671 - type: ndcg_at_5 value: 28.907 - type: precision_at_1 value: 21.996 - type: precision_at_10 value: 5.009 - type: precision_at_100 value: 0.799 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 11.275 - type: precision_at_5 value: 8.059 - type: recall_at_1 value: 19.883 - type: recall_at_10 value: 43.132999999999996 - type: recall_at_100 value: 65.654 - type: recall_at_1000 value: 84.492 - type: recall_at_3 value: 30.209000000000003 - type: recall_at_5 value: 35.616 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 17.756 - type: map_at_10 value: 30.378 - type: map_at_100 value: 32.537 - type: map_at_1000 value: 32.717 - type: map_at_3 value: 25.599 - type: map_at_5 value: 28.372999999999998 - type: mrr_at_1 value: 41.303 - type: mrr_at_10 value: 53.483999999999995 - type: mrr_at_100 value: 54.106 - type: mrr_at_1000 value: 54.127 - type: mrr_at_3 value: 50.315 - type: mrr_at_5 value: 52.396 - type: ndcg_at_1 value: 41.303 - type: ndcg_at_10 value: 40.503 - type: ndcg_at_100 value: 47.821000000000005 - type: ndcg_at_1000 value: 50.788 - type: ndcg_at_3 value: 34.364 - type: ndcg_at_5 value: 36.818 - type: precision_at_1 value: 41.303 - type: precision_at_10 value: 12.463000000000001 - type: precision_at_100 value: 2.037 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 25.798 - type: precision_at_5 value: 19.896 - type: recall_at_1 value: 17.756 - type: recall_at_10 value: 46.102 - type: recall_at_100 value: 70.819 - type: recall_at_1000 value: 87.21799999999999 - type: recall_at_3 value: 30.646 - type: recall_at_5 value: 38.022 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.033 - type: map_at_10 value: 20.584 - type: map_at_100 value: 29.518 - type: map_at_1000 value: 31.186000000000003 - type: map_at_3 value: 14.468 - type: map_at_5 value: 17.177 - type: mrr_at_1 value: 69.75 - type: mrr_at_10 value: 77.025 - type: mrr_at_100 value: 77.36699999999999 - type: mrr_at_1000 value: 77.373 - type: mrr_at_3 value: 75.583 - type: mrr_at_5 value: 76.396 - type: ndcg_at_1 value: 58.5 - type: ndcg_at_10 value: 45.033 - type: ndcg_at_100 value: 49.071 - type: ndcg_at_1000 value: 56.056 - type: ndcg_at_3 value: 49.936 - type: ndcg_at_5 value: 47.471999999999994 - type: precision_at_1 value: 69.75 - type: precision_at_10 value: 35.775 - type: precision_at_100 value: 11.594999999999999 - type: precision_at_1000 value: 2.062 - type: precision_at_3 value: 52.5 - type: precision_at_5 value: 45.300000000000004 - type: recall_at_1 value: 9.033 - type: recall_at_10 value: 26.596999999999998 - type: recall_at_100 value: 54.607000000000006 - type: recall_at_1000 value: 76.961 - type: recall_at_3 value: 15.754999999999999 - type: recall_at_5 value: 20.033 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 48.345000000000006 - type: f1 value: 43.4514918068706 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 71.29100000000001 - type: map_at_10 value: 81.059 - type: map_at_100 value: 81.341 - type: map_at_1000 value: 81.355 - type: map_at_3 value: 79.74799999999999 - type: map_at_5 value: 80.612 - type: mrr_at_1 value: 76.40299999999999 - type: mrr_at_10 value: 84.615 - type: mrr_at_100 value: 84.745 - type: mrr_at_1000 value: 84.748 - type: mrr_at_3 value: 83.776 - type: mrr_at_5 value: 84.343 - type: ndcg_at_1 value: 76.40299999999999 - type: ndcg_at_10 value: 84.981 - type: ndcg_at_100 value: 86.00999999999999 - type: ndcg_at_1000 value: 86.252 - type: ndcg_at_3 value: 82.97 - type: ndcg_at_5 value: 84.152 - type: precision_at_1 value: 76.40299999999999 - type: precision_at_10 value: 10.446 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 32.147999999999996 - type: precision_at_5 value: 20.135 - type: recall_at_1 value: 71.29100000000001 - type: recall_at_10 value: 93.232 - type: recall_at_100 value: 97.363 - type: recall_at_1000 value: 98.905 - type: recall_at_3 value: 87.893 - type: recall_at_5 value: 90.804 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 18.667 - type: map_at_10 value: 30.853 - type: map_at_100 value: 32.494 - type: map_at_1000 value: 32.677 - type: map_at_3 value: 26.91 - type: map_at_5 value: 29.099000000000004 - type: mrr_at_1 value: 37.191 - type: mrr_at_10 value: 46.171 - type: mrr_at_100 value: 47.056 - type: mrr_at_1000 value: 47.099000000000004 - type: mrr_at_3 value: 44.059 - type: mrr_at_5 value: 45.147 - type: ndcg_at_1 value: 37.191 - type: ndcg_at_10 value: 38.437 - type: ndcg_at_100 value: 44.62 - type: ndcg_at_1000 value: 47.795 - type: ndcg_at_3 value: 35.003 - type: ndcg_at_5 value: 36.006 - type: precision_at_1 value: 37.191 - type: precision_at_10 value: 10.586 - type: precision_at_100 value: 1.688 - type: precision_at_1000 value: 0.22699999999999998 - type: precision_at_3 value: 23.302 - type: precision_at_5 value: 17.006 - type: recall_at_1 value: 18.667 - type: recall_at_10 value: 45.367000000000004 - type: recall_at_100 value: 68.207 - type: recall_at_1000 value: 87.072 - type: recall_at_3 value: 32.129000000000005 - type: recall_at_5 value: 37.719 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.494 - type: map_at_10 value: 66.223 - type: map_at_100 value: 67.062 - type: map_at_1000 value: 67.11500000000001 - type: map_at_3 value: 62.867 - type: map_at_5 value: 64.994 - type: mrr_at_1 value: 78.987 - type: mrr_at_10 value: 84.585 - type: mrr_at_100 value: 84.773 - type: mrr_at_1000 value: 84.77900000000001 - type: mrr_at_3 value: 83.592 - type: mrr_at_5 value: 84.235 - type: ndcg_at_1 value: 78.987 - type: ndcg_at_10 value: 73.64 - type: ndcg_at_100 value: 76.519 - type: ndcg_at_1000 value: 77.51 - type: ndcg_at_3 value: 68.893 - type: ndcg_at_5 value: 71.585 - type: precision_at_1 value: 78.987 - type: precision_at_10 value: 15.529000000000002 - type: precision_at_100 value: 1.7770000000000001 - type: precision_at_1000 value: 0.191 - type: precision_at_3 value: 44.808 - type: precision_at_5 value: 29.006999999999998 - type: recall_at_1 value: 39.494 - type: recall_at_10 value: 77.643 - type: recall_at_100 value: 88.825 - type: recall_at_1000 value: 95.321 - type: recall_at_3 value: 67.211 - type: recall_at_5 value: 72.519 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 85.55959999999999 - type: ap value: 80.7246500384617 - type: f1 value: 85.52336485065454 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.631 - type: map_at_10 value: 36.264 - type: map_at_100 value: 37.428 - type: map_at_1000 value: 37.472 - type: map_at_3 value: 32.537 - type: map_at_5 value: 34.746 - type: mrr_at_1 value: 24.312 - type: mrr_at_10 value: 36.858000000000004 - type: mrr_at_100 value: 37.966 - type: mrr_at_1000 value: 38.004 - type: mrr_at_3 value: 33.188 - type: mrr_at_5 value: 35.367 - type: ndcg_at_1 value: 24.312 - type: ndcg_at_10 value: 43.126999999999995 - type: ndcg_at_100 value: 48.642 - type: ndcg_at_1000 value: 49.741 - type: ndcg_at_3 value: 35.589 - type: ndcg_at_5 value: 39.515 - type: precision_at_1 value: 24.312 - type: precision_at_10 value: 6.699 - type: precision_at_100 value: 0.9450000000000001 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 15.153 - type: precision_at_5 value: 11.065999999999999 - type: recall_at_1 value: 23.631 - type: recall_at_10 value: 64.145 - type: recall_at_100 value: 89.41 - type: recall_at_1000 value: 97.83500000000001 - type: recall_at_3 value: 43.769000000000005 - type: recall_at_5 value: 53.169 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.4108527131783 - type: f1 value: 93.1415880261038 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.24806201550388 - type: f1 value: 60.531916308197175 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.71553463349024 - type: f1 value: 71.70753174900791 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.79757901815736 - type: f1 value: 77.83719850433258 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.74193296622113 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 30.64257594108566 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.811018518883625 - type: mrr value: 31.910376577445003 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.409 - type: map_at_10 value: 13.093 - type: map_at_100 value: 16.256999999999998 - type: map_at_1000 value: 17.617 - type: map_at_3 value: 9.555 - type: map_at_5 value: 11.428 - type: mrr_at_1 value: 45.201 - type: mrr_at_10 value: 54.179 - type: mrr_at_100 value: 54.812000000000005 - type: mrr_at_1000 value: 54.840999999999994 - type: mrr_at_3 value: 51.909000000000006 - type: mrr_at_5 value: 53.519000000000005 - type: ndcg_at_1 value: 43.189 - type: ndcg_at_10 value: 35.028 - type: ndcg_at_100 value: 31.226 - type: ndcg_at_1000 value: 39.678000000000004 - type: ndcg_at_3 value: 40.596 - type: ndcg_at_5 value: 38.75 - type: precision_at_1 value: 44.582 - type: precision_at_10 value: 25.974999999999998 - type: precision_at_100 value: 7.793 - type: precision_at_1000 value: 2.036 - type: precision_at_3 value: 38.493 - type: precision_at_5 value: 33.994 - type: recall_at_1 value: 5.409 - type: recall_at_10 value: 16.875999999999998 - type: recall_at_100 value: 30.316 - type: recall_at_1000 value: 60.891 - type: recall_at_3 value: 10.688 - type: recall_at_5 value: 13.832 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 36.375 - type: map_at_10 value: 51.991 - type: map_at_100 value: 52.91400000000001 - type: map_at_1000 value: 52.93600000000001 - type: map_at_3 value: 48.014 - type: map_at_5 value: 50.381 - type: mrr_at_1 value: 40.759 - type: mrr_at_10 value: 54.617000000000004 - type: mrr_at_100 value: 55.301 - type: mrr_at_1000 value: 55.315000000000005 - type: mrr_at_3 value: 51.516 - type: mrr_at_5 value: 53.435 - type: ndcg_at_1 value: 40.759 - type: ndcg_at_10 value: 59.384 - type: ndcg_at_100 value: 63.157 - type: ndcg_at_1000 value: 63.654999999999994 - type: ndcg_at_3 value: 52.114000000000004 - type: ndcg_at_5 value: 55.986000000000004 - type: precision_at_1 value: 40.759 - type: precision_at_10 value: 9.411999999999999 - type: precision_at_100 value: 1.153 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.329 - type: precision_at_5 value: 16.256999999999998 - type: recall_at_1 value: 36.375 - type: recall_at_10 value: 79.053 - type: recall_at_100 value: 95.167 - type: recall_at_1000 value: 98.82 - type: recall_at_3 value: 60.475 - type: recall_at_5 value: 69.327 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.256 - type: map_at_10 value: 83.8 - type: map_at_100 value: 84.425 - type: map_at_1000 value: 84.444 - type: map_at_3 value: 80.906 - type: map_at_5 value: 82.717 - type: mrr_at_1 value: 80.97999999999999 - type: mrr_at_10 value: 87.161 - type: mrr_at_100 value: 87.262 - type: mrr_at_1000 value: 87.263 - type: mrr_at_3 value: 86.175 - type: mrr_at_5 value: 86.848 - type: ndcg_at_1 value: 80.97999999999999 - type: ndcg_at_10 value: 87.697 - type: ndcg_at_100 value: 88.959 - type: ndcg_at_1000 value: 89.09899999999999 - type: ndcg_at_3 value: 84.83800000000001 - type: ndcg_at_5 value: 86.401 - type: precision_at_1 value: 80.97999999999999 - type: precision_at_10 value: 13.261000000000001 - type: precision_at_100 value: 1.5150000000000001 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 37.01 - type: precision_at_5 value: 24.298000000000002 - type: recall_at_1 value: 70.256 - type: recall_at_10 value: 94.935 - type: recall_at_100 value: 99.274 - type: recall_at_1000 value: 99.928 - type: recall_at_3 value: 86.602 - type: recall_at_5 value: 91.133 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.322692497613104 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 61.895813503775074 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.338 - type: map_at_10 value: 10.767 - type: map_at_100 value: 12.537999999999998 - type: map_at_1000 value: 12.803999999999998 - type: map_at_3 value: 7.788 - type: map_at_5 value: 9.302000000000001 - type: mrr_at_1 value: 21.4 - type: mrr_at_10 value: 31.637999999999998 - type: mrr_at_100 value: 32.688 - type: mrr_at_1000 value: 32.756 - type: mrr_at_3 value: 28.433000000000003 - type: mrr_at_5 value: 30.178 - type: ndcg_at_1 value: 21.4 - type: ndcg_at_10 value: 18.293 - type: ndcg_at_100 value: 25.274 - type: ndcg_at_1000 value: 30.284 - type: ndcg_at_3 value: 17.391000000000002 - type: ndcg_at_5 value: 15.146999999999998 - type: precision_at_1 value: 21.4 - type: precision_at_10 value: 9.48 - type: precision_at_100 value: 1.949 - type: precision_at_1000 value: 0.316 - type: precision_at_3 value: 16.167 - type: precision_at_5 value: 13.22 - type: recall_at_1 value: 4.338 - type: recall_at_10 value: 19.213 - type: recall_at_100 value: 39.562999999999995 - type: recall_at_1000 value: 64.08 - type: recall_at_3 value: 9.828000000000001 - type: recall_at_5 value: 13.383000000000001 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.42568163642142 - type: cos_sim_spearman value: 78.5797159641342 - type: euclidean_pearson value: 80.22151260811604 - type: euclidean_spearman value: 78.5797151953878 - type: manhattan_pearson value: 80.21224215864788 - type: manhattan_spearman value: 78.55641478381344 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.44020710812569 - type: cos_sim_spearman value: 78.91631735081286 - type: euclidean_pearson value: 81.64188964182102 - type: euclidean_spearman value: 78.91633286881678 - type: manhattan_pearson value: 81.69294748512496 - type: manhattan_spearman value: 78.93438558002656 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.27165426412311 - type: cos_sim_spearman value: 85.40429140249618 - type: euclidean_pearson value: 84.7509580724893 - type: euclidean_spearman value: 85.40429140249618 - type: manhattan_pearson value: 84.76488289321308 - type: manhattan_spearman value: 85.4256793698708 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.138851760732 - type: cos_sim_spearman value: 81.64101363896586 - type: euclidean_pearson value: 82.55165038934942 - type: euclidean_spearman value: 81.64105257080502 - type: manhattan_pearson value: 82.52802949883335 - type: manhattan_spearman value: 81.61255430718158 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.0654695484029 - type: cos_sim_spearman value: 87.20408521902229 - type: euclidean_pearson value: 86.8110651362115 - type: euclidean_spearman value: 87.20408521902229 - type: manhattan_pearson value: 86.77984656478691 - type: manhattan_spearman value: 87.1719947099227 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.77823915496512 - type: cos_sim_spearman value: 85.43566325729779 - type: euclidean_pearson value: 84.5396956658821 - type: euclidean_spearman value: 85.43566325729779 - type: manhattan_pearson value: 84.5665398848169 - type: manhattan_spearman value: 85.44375870303232 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.20030208471798 - type: cos_sim_spearman value: 87.20485505076539 - type: euclidean_pearson value: 88.10588324368722 - type: euclidean_spearman value: 87.20485505076539 - type: manhattan_pearson value: 87.92324770415183 - type: manhattan_spearman value: 87.0571314561877 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.06093161604453 - type: cos_sim_spearman value: 64.2163140357722 - type: euclidean_pearson value: 65.27589680994006 - type: euclidean_spearman value: 64.2163140357722 - type: manhattan_pearson value: 65.45904383711101 - type: manhattan_spearman value: 64.55404716679305 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.32976164578706 - type: cos_sim_spearman value: 85.54302197678368 - type: euclidean_pearson value: 85.26307149193056 - type: euclidean_spearman value: 85.54302197678368 - type: manhattan_pearson value: 85.26647282029371 - type: manhattan_spearman value: 85.5316135265568 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 81.44675968318754 - type: mrr value: 94.92741826075158 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 56.34400000000001 - type: map_at_10 value: 65.927 - type: map_at_100 value: 66.431 - type: map_at_1000 value: 66.461 - type: map_at_3 value: 63.529 - type: map_at_5 value: 64.818 - type: mrr_at_1 value: 59.333000000000006 - type: mrr_at_10 value: 67.54599999999999 - type: mrr_at_100 value: 67.892 - type: mrr_at_1000 value: 67.917 - type: mrr_at_3 value: 65.778 - type: mrr_at_5 value: 66.794 - type: ndcg_at_1 value: 59.333000000000006 - type: ndcg_at_10 value: 70.5 - type: ndcg_at_100 value: 72.688 - type: ndcg_at_1000 value: 73.483 - type: ndcg_at_3 value: 66.338 - type: ndcg_at_5 value: 68.265 - type: precision_at_1 value: 59.333000000000006 - type: precision_at_10 value: 9.3 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 25.889 - type: precision_at_5 value: 16.866999999999997 - type: recall_at_1 value: 56.34400000000001 - type: recall_at_10 value: 82.789 - type: recall_at_100 value: 92.767 - type: recall_at_1000 value: 99 - type: recall_at_3 value: 71.64399999999999 - type: recall_at_5 value: 76.322 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.75742574257426 - type: cos_sim_ap value: 93.52081548447406 - type: cos_sim_f1 value: 87.33850129198966 - type: cos_sim_precision value: 90.37433155080214 - type: cos_sim_recall value: 84.5 - type: dot_accuracy value: 99.75742574257426 - type: dot_ap value: 93.52081548447406 - type: dot_f1 value: 87.33850129198966 - type: dot_precision value: 90.37433155080214 - type: dot_recall value: 84.5 - type: euclidean_accuracy value: 99.75742574257426 - type: euclidean_ap value: 93.52081548447406 - type: euclidean_f1 value: 87.33850129198966 - type: euclidean_precision value: 90.37433155080214 - type: euclidean_recall value: 84.5 - type: manhattan_accuracy value: 99.75841584158415 - type: manhattan_ap value: 93.4975678585854 - type: manhattan_f1 value: 87.26708074534162 - type: manhattan_precision value: 90.45064377682404 - type: manhattan_recall value: 84.3 - type: max_accuracy value: 99.75841584158415 - type: max_ap value: 93.52081548447406 - type: max_f1 value: 87.33850129198966 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 64.31437036686651 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 33.25569319007206 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.90474939720706 - type: mrr value: 50.568115503777264 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 29.866828641244712 - type: cos_sim_spearman value: 30.077555055873866 - type: dot_pearson value: 29.866832988572266 - type: dot_spearman value: 30.077555055873866 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.232 - type: map_at_10 value: 2.094 - type: map_at_100 value: 11.971 - type: map_at_1000 value: 28.158 - type: map_at_3 value: 0.688 - type: map_at_5 value: 1.114 - type: mrr_at_1 value: 88 - type: mrr_at_10 value: 93.4 - type: mrr_at_100 value: 93.4 - type: mrr_at_1000 value: 93.4 - type: mrr_at_3 value: 93 - type: mrr_at_5 value: 93.4 - type: ndcg_at_1 value: 84 - type: ndcg_at_10 value: 79.923 - type: ndcg_at_100 value: 61.17 - type: ndcg_at_1000 value: 53.03 - type: ndcg_at_3 value: 84.592 - type: ndcg_at_5 value: 82.821 - type: precision_at_1 value: 88 - type: precision_at_10 value: 85 - type: precision_at_100 value: 63.019999999999996 - type: precision_at_1000 value: 23.554 - type: precision_at_3 value: 89.333 - type: precision_at_5 value: 87.2 - type: recall_at_1 value: 0.232 - type: recall_at_10 value: 2.255 - type: recall_at_100 value: 14.823 - type: recall_at_1000 value: 49.456 - type: recall_at_3 value: 0.718 - type: recall_at_5 value: 1.175 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.547 - type: map_at_10 value: 11.375 - type: map_at_100 value: 18.194 - type: map_at_1000 value: 19.749 - type: map_at_3 value: 5.825 - type: map_at_5 value: 8.581 - type: mrr_at_1 value: 32.653 - type: mrr_at_10 value: 51.32 - type: mrr_at_100 value: 51.747 - type: mrr_at_1000 value: 51.747 - type: mrr_at_3 value: 47.278999999999996 - type: mrr_at_5 value: 48.605 - type: ndcg_at_1 value: 29.592000000000002 - type: ndcg_at_10 value: 28.151 - type: ndcg_at_100 value: 39.438 - type: ndcg_at_1000 value: 50.769 - type: ndcg_at_3 value: 30.758999999999997 - type: ndcg_at_5 value: 30.366 - type: precision_at_1 value: 32.653 - type: precision_at_10 value: 25.714 - type: precision_at_100 value: 8.041 - type: precision_at_1000 value: 1.555 - type: precision_at_3 value: 33.333 - type: precision_at_5 value: 31.837 - type: recall_at_1 value: 2.547 - type: recall_at_10 value: 18.19 - type: recall_at_100 value: 49.538 - type: recall_at_1000 value: 83.86 - type: recall_at_3 value: 7.329 - type: recall_at_5 value: 11.532 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.4952 - type: ap value: 14.793362635531409 - type: f1 value: 55.204635551516915 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.5365025466893 - type: f1 value: 61.81742556334845 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.05531070301185 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.51725576682364 - type: cos_sim_ap value: 75.2292304265163 - type: cos_sim_f1 value: 69.54022988505749 - type: cos_sim_precision value: 63.65629110039457 - type: cos_sim_recall value: 76.62269129287598 - type: dot_accuracy value: 86.51725576682364 - type: dot_ap value: 75.22922386081054 - type: dot_f1 value: 69.54022988505749 - type: dot_precision value: 63.65629110039457 - type: dot_recall value: 76.62269129287598 - type: euclidean_accuracy value: 86.51725576682364 - type: euclidean_ap value: 75.22925730473472 - type: euclidean_f1 value: 69.54022988505749 - type: euclidean_precision value: 63.65629110039457 - type: euclidean_recall value: 76.62269129287598 - type: manhattan_accuracy value: 86.52321630804077 - type: manhattan_ap value: 75.20608115037336 - type: manhattan_f1 value: 69.60000000000001 - type: manhattan_precision value: 64.37219730941705 - type: manhattan_recall value: 75.75197889182058 - type: max_accuracy value: 86.52321630804077 - type: max_ap value: 75.22925730473472 - type: max_f1 value: 69.60000000000001 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.34877944657896 - type: cos_sim_ap value: 86.71257569277373 - type: cos_sim_f1 value: 79.10386355986088 - type: cos_sim_precision value: 76.91468470434214 - type: cos_sim_recall value: 81.4213119802895 - type: dot_accuracy value: 89.34877944657896 - type: dot_ap value: 86.71257133133368 - type: dot_f1 value: 79.10386355986088 - type: dot_precision value: 76.91468470434214 - type: dot_recall value: 81.4213119802895 - type: euclidean_accuracy value: 89.34877944657896 - type: euclidean_ap value: 86.71257651501476 - type: euclidean_f1 value: 79.10386355986088 - type: euclidean_precision value: 76.91468470434214 - type: euclidean_recall value: 81.4213119802895 - type: manhattan_accuracy value: 89.35848177901967 - type: manhattan_ap value: 86.69330615469126 - type: manhattan_f1 value: 79.13867741453949 - type: manhattan_precision value: 76.78881807647741 - type: manhattan_recall value: 81.63689559593472 - type: max_accuracy value: 89.35848177901967 - type: max_ap value: 86.71257651501476 - type: max_f1 value: 79.13867741453949 --- # nomic-embed-text-v1: A Reproducible Long Context (8192) Text Embedder `nomic-embed-text-v1` is 8192 context length text encoder that surpasses OpenAI text-embedding-ada-002 and text-embedding-3-small performance on short and long context tasks. # Performance Benchmarks | Name | SeqLen | MTEB | LoCo | Jina Long Context | Open Weights | Open Training Code | Open Data | | :-------------------------------:| :----- | :-------- | :------: | :---------------: | :-----------: | :----------------: | :---------- | | nomic-embed-text-v1 | 8192 | **62.39** |**85.53** | 54.16 | ✅ | ✅ | ✅ | | jina-embeddings-v2-base-en | 8192 | 60.39 | 85.45 | 51.90 | ✅ | ❌ | ❌ | | text-embedding-3-small | 8191 | 62.26 | 82.40 | **58.20** | ❌ | ❌ | ❌ | | text-embedding-ada-002 | 8191 | 60.99 | 52.7 | 55.25 | ❌ | ❌ | ❌ | **Exciting Update!**: `nomic-embed-text-v1` is now multimodal! [nomic-embed-vision-v1](https://huggingface.co/nomic-ai/nomic-embed-vision-v1) is aligned to the embedding space of `nomic-embed-text-v1`, meaning any text embedding is multimodal! ## Usage **Important**: the text prompt *must* include a *task instruction prefix*, instructing the model which task is being performed. For example, if you are implementing a RAG application, you embed your documents as `search_document: <text here>` and embed your user queries as `search_query: <text here>`. ## Task instruction prefixes ### `search_document` #### Purpose: embed texts as documents from a dataset This prefix is used for embedding texts as documents, for example as documents for a RAG index. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['search_document: TSNE is a dimensionality reduction algorithm created by Laurens van Der Maaten'] embeddings = model.encode(sentences) print(embeddings) ``` ### `search_query` #### Purpose: embed texts as questions to answer This prefix is used for embedding texts as questions that documents from a dataset could resolve, for example as queries to be answered by a RAG application. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['search_query: Who is Laurens van Der Maaten?'] embeddings = model.encode(sentences) print(embeddings) ``` ### `clustering` #### Purpose: embed texts to group them into clusters This prefix is used for embedding texts in order to group them into clusters, discover common topics, or remove semantic duplicates. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['clustering: the quick brown fox'] embeddings = model.encode(sentences) print(embeddings) ``` ### `classification` #### Purpose: embed texts to classify them This prefix is used for embedding texts into vectors that will be used as features for a classification model ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['classification: the quick brown fox'] embeddings = model.encode(sentences) print(embeddings) ``` ### Sentence Transformers ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['search_query: What is TSNE?', 'search_query: Who is Laurens van der Maaten?'] embeddings = model.encode(sentences) print(embeddings) ``` ### Transformers ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) sentences = ['search_query: What is TSNE?', 'search_query: Who is Laurens van der Maaten?'] tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased') model = AutoModel.from_pretrained('nomic-ai/nomic-embed-text-v1', trust_remote_code=True) model.eval() encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') with torch.no_grad(): model_output = model(**encoded_input) embeddings = mean_pooling(model_output, encoded_input['attention_mask']) embeddings = F.normalize(embeddings, p=2, dim=1) print(embeddings) ``` The model natively supports scaling of the sequence length past 2048 tokens. To do so, ```diff - tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased') + tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased', model_max_length=8192) - model = AutoModel.from_pretrained('nomic-ai/nomic-embed-text-v1', trust_remote_code=True) + model = AutoModel.from_pretrained('nomic-ai/nomic-embed-text-v1', trust_remote_code=True, rotary_scaling_factor=2) ``` ### Transformers.js ```js import { pipeline } from '@xenova/transformers'; // Create a feature extraction pipeline const extractor = await pipeline('feature-extraction', 'nomic-ai/nomic-embed-text-v1', { quantized: false, // Comment out this line to use the quantized version }); // Compute sentence embeddings const texts = ['search_query: What is TSNE?', 'search_query: Who is Laurens van der Maaten?']; const embeddings = await extractor(texts, { pooling: 'mean', normalize: true }); console.log(embeddings); ``` ## Nomic API The easiest way to get started with Nomic Embed is through the Nomic Embedding API. Generating embeddings with the `nomic` Python client is as easy as ```python from nomic import embed output = embed.text( texts=['Nomic Embedding API', '#keepAIOpen'], model='nomic-embed-text-v1', task_type='search_document' ) print(output) ``` For more information, see the [API reference](https://docs.nomic.ai/reference/endpoints/nomic-embed-text) ## Training Click the Nomic Atlas map below to visualize a 5M sample of our contrastive pretraining data! [![image/webp](https://cdn-uploads.huggingface.co/production/uploads/607997c83a565c15675055b3/pjhJhuNyRfPagRd_c_iUz.webp)](https://atlas.nomic.ai/map/nomic-text-embed-v1-5m-sample) We train our embedder using a multi-stage training pipeline. Starting from a long-context [BERT model](https://huggingface.co/nomic-ai/nomic-bert-2048), the first unsupervised contrastive stage trains on a dataset generated from weakly related text pairs, such as question-answer pairs from forums like StackExchange and Quora, title-body pairs from Amazon reviews, and summarizations from news articles. In the second finetuning stage, higher quality labeled datasets such as search queries and answers from web searches are leveraged. Data curation and hard-example mining is crucial in this stage. For more details, see the Nomic Embed [Technical Report](https://static.nomic.ai/reports/2024_Nomic_Embed_Text_Technical_Report.pdf) and corresponding [blog post](https://blog.nomic.ai/posts/nomic-embed-text-v1). Training data to train the models is released in its entirety. For more details, see the `contrastors` [repository](https://github.com/nomic-ai/contrastors) # Join the Nomic Community - Nomic: [https://nomic.ai](https://nomic.ai) - Discord: [https://discord.gg/myY5YDR8z8](https://discord.gg/myY5YDR8z8) - Twitter: [https://twitter.com/nomic_ai](https://twitter.com/nomic_ai) # Citation If you find the model, dataset, or training code useful, please cite our work ```bibtex @misc{nussbaum2024nomic, title={Nomic Embed: Training a Reproducible Long Context Text Embedder}, author={Zach Nussbaum and John X. Morris and Brandon Duderstadt and Andriy Mulyar}, year={2024}, eprint={2402.01613}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "BIOSSES", "SCIFACT" ]
BAAI/bge-reranker-v2-m3
BAAI
text-classification
[ "sentence-transformers", "safetensors", "xlm-roberta", "text-classification", "transformers", "text-embeddings-inference", "multilingual", "arxiv:2312.15503", "arxiv:2402.03216", "license:apache-2.0", "region:us" ]
"2024-03-15T13:32:18Z"
2024-06-24T14:08:45+00:00
1,007,185
564
--- language: - multilingual license: apache-2.0 pipeline_tag: text-classification tags: - transformers - sentence-transformers - text-embeddings-inference --- # Reranker **More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/tree/master).** - [Model List](#model-list) - [Usage](#usage) - [Fine-tuning](#fine-tune) - [Evaluation](#evaluation) - [Citation](#citation) Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. And the score can be mapped to a float value in [0,1] by sigmoid function. ## Model List | Model | Base model | Language | layerwise | feature | |:--------------------------------------------------------------------------|:--------:|:-----------------------------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------:| | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) | Chinese and English | - | Lightweight reranker model, easy to deploy, with fast inference. | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | [xlm-roberta-large](https://huggingface.co/FacebookAI/xlm-roberta-large) | Chinese and English | - | Lightweight reranker model, easy to deploy, with fast inference. | | [BAAI/bge-reranker-v2-m3](https://huggingface.co/BAAI/bge-reranker-v2-m3) | [bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | - | Lightweight reranker model, possesses strong multilingual capabilities, easy to deploy, with fast inference. | | [BAAI/bge-reranker-v2-gemma](https://huggingface.co/BAAI/bge-reranker-v2-gemma) | [gemma-2b](https://huggingface.co/google/gemma-2b) | Multilingual | - | Suitable for multilingual contexts, performs well in both English proficiency and multilingual capabilities. | | [BAAI/bge-reranker-v2-minicpm-layerwise](https://huggingface.co/BAAI/bge-reranker-v2-minicpm-layerwise) | [MiniCPM-2B-dpo-bf16](https://huggingface.co/openbmb/MiniCPM-2B-dpo-bf16) | Multilingual | 8-40 | Suitable for multilingual contexts, performs well in both English and Chinese proficiency, allows freedom to select layers for output, facilitating accelerated inference. | You can select the model according your senario and resource. - For **multilingual**, utilize [BAAI/bge-reranker-v2-m3](https://huggingface.co/BAAI/bge-reranker-v2-m3) and [BAAI/bge-reranker-v2-gemma](https://huggingface.co/BAAI/bge-reranker-v2-gemma) - For **Chinese or English**, utilize [BAAI/bge-reranker-v2-m3](https://huggingface.co/BAAI/bge-reranker-v2-m3) and [BAAI/bge-reranker-v2-minicpm-layerwise](https://huggingface.co/BAAI/bge-reranker-v2-minicpm-layerwise). - For **efficiency**, utilize [BAAI/bge-reranker-v2-m3](https://huggingface.co/BAAI/bge-reranker-v2-m3) and the low layer of [BAAI/bge-reranker-v2-minicpm-layerwise](https://huggingface.co/BAAI/bge-reranker-v2-minicpm-layerwise). - For better performance, recommand [BAAI/bge-reranker-v2-minicpm-layerwise](https://huggingface.co/BAAI/bge-reranker-v2-minicpm-layerwise) and [BAAI/bge-reranker-v2-gemma](https://huggingface.co/BAAI/bge-reranker-v2-gemma) ## Usage ### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` #### For normal reranker (bge-reranker-base / bge-reranker-large / bge-reranker-v2-m3 ) Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-v2-m3', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) # -5.65234375 # You can map the scores into 0-1 by set "normalize=True", which will apply sigmoid function to the score score = reranker.compute_score(['query', 'passage'], normalize=True) print(score) # 0.003497010252573502 scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) # [-8.1875, 5.26171875] # You can map the scores into 0-1 by set "normalize=True", which will apply sigmoid function to the score scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']], normalize=True) print(scores) # [0.00027803096387751553, 0.9948403768236574] ``` #### For LLM-based reranker ```python from FlagEmbedding import FlagLLMReranker reranker = FlagLLMReranker('BAAI/bge-reranker-v2-gemma', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation # reranker = FlagLLMReranker('BAAI/bge-reranker-v2-gemma', use_bf16=True) # You can also set use_bf16=True to speed up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### For LLM-based layerwise reranker ```python from FlagEmbedding import LayerWiseFlagLLMReranker reranker = LayerWiseFlagLLMReranker('BAAI/bge-reranker-v2-minicpm-layerwise', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation # reranker = LayerWiseFlagLLMReranker('BAAI/bge-reranker-v2-minicpm-layerwise', use_bf16=True) # You can also set use_bf16=True to speed up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage'], cutoff_layers=[28]) # Adjusting 'cutoff_layers' to pick which layers are used for computing the score. print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']], cutoff_layers=[28]) print(scores) ``` ### Using Huggingface transformers #### For normal reranker (bge-reranker-base / bge-reranker-large / bge-reranker-v2-m3 ) Get relevance scores (higher scores indicate more relevance): ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-v2-m3') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-v2-m3') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` #### For LLM-based reranker ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer def get_inputs(pairs, tokenizer, prompt=None, max_length=1024): if prompt is None: prompt = "Given a query A and a passage B, determine whether the passage contains an answer to the query by providing a prediction of either 'Yes' or 'No'." sep = "\n" prompt_inputs = tokenizer(prompt, return_tensors=None, add_special_tokens=False)['input_ids'] sep_inputs = tokenizer(sep, return_tensors=None, add_special_tokens=False)['input_ids'] inputs = [] for query, passage in pairs: query_inputs = tokenizer(f'A: {query}', return_tensors=None, add_special_tokens=False, max_length=max_length * 3 // 4, truncation=True) passage_inputs = tokenizer(f'B: {passage}', return_tensors=None, add_special_tokens=False, max_length=max_length, truncation=True) item = tokenizer.prepare_for_model( [tokenizer.bos_token_id] + query_inputs['input_ids'], sep_inputs + passage_inputs['input_ids'], truncation='only_second', max_length=max_length, padding=False, return_attention_mask=False, return_token_type_ids=False, add_special_tokens=False ) item['input_ids'] = item['input_ids'] + sep_inputs + prompt_inputs item['attention_mask'] = [1] * len(item['input_ids']) inputs.append(item) return tokenizer.pad( inputs, padding=True, max_length=max_length + len(sep_inputs) + len(prompt_inputs), pad_to_multiple_of=8, return_tensors='pt', ) tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-v2-gemma') model = AutoModelForCausalLM.from_pretrained('BAAI/bge-reranker-v2-gemma') yes_loc = tokenizer('Yes', add_special_tokens=False)['input_ids'][0] model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = get_inputs(pairs, tokenizer) scores = model(**inputs, return_dict=True).logits[:, -1, yes_loc].view(-1, ).float() print(scores) ``` #### For LLM-based layerwise reranker ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer def get_inputs(pairs, tokenizer, prompt=None, max_length=1024): if prompt is None: prompt = "Given a query A and a passage B, determine whether the passage contains an answer to the query by providing a prediction of either 'Yes' or 'No'." sep = "\n" prompt_inputs = tokenizer(prompt, return_tensors=None, add_special_tokens=False)['input_ids'] sep_inputs = tokenizer(sep, return_tensors=None, add_special_tokens=False)['input_ids'] inputs = [] for query, passage in pairs: query_inputs = tokenizer(f'A: {query}', return_tensors=None, add_special_tokens=False, max_length=max_length * 3 // 4, truncation=True) passage_inputs = tokenizer(f'B: {passage}', return_tensors=None, add_special_tokens=False, max_length=max_length, truncation=True) item = tokenizer.prepare_for_model( [tokenizer.bos_token_id] + query_inputs['input_ids'], sep_inputs + passage_inputs['input_ids'], truncation='only_second', max_length=max_length, padding=False, return_attention_mask=False, return_token_type_ids=False, add_special_tokens=False ) item['input_ids'] = item['input_ids'] + sep_inputs + prompt_inputs item['attention_mask'] = [1] * len(item['input_ids']) inputs.append(item) return tokenizer.pad( inputs, padding=True, max_length=max_length + len(sep_inputs) + len(prompt_inputs), pad_to_multiple_of=8, return_tensors='pt', ) tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-v2-minicpm-layerwise', trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained('BAAI/bge-reranker-v2-minicpm-layerwise', trust_remote_code=True, torch_dtype=torch.bfloat16) model = model.to('cuda') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = get_inputs(pairs, tokenizer).to(model.device) all_scores = model(**inputs, return_dict=True, cutoff_layers=[28]) all_scores = [scores[:, -1].view(-1, ).float() for scores in all_scores[0]] print(all_scores) ``` ## Fine-tune ### Data Format Train data should be a json file, where each line is a dict like this: ``` {"query": str, "pos": List[str], "neg":List[str], "prompt": str} ``` `query` is the query, and `pos` is a list of positive texts, `neg` is a list of negative texts, `prompt` indicates the relationship between query and texts. If you have no negative texts for a query, you can random sample some from the entire corpus as the negatives. See [toy_finetune_data.jsonl](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_reranker/toy_finetune_data.jsonl) for a toy data file. ### Train You can fine-tune the reranker with the following code: **For llm-based reranker** ```shell torchrun --nproc_per_node {number of gpus} \ -m FlagEmbedding.llm_reranker.finetune_for_instruction.run \ --output_dir {path to save model} \ --model_name_or_path google/gemma-2b \ --train_data ./toy_finetune_data.jsonl \ --learning_rate 2e-4 \ --num_train_epochs 1 \ --per_device_train_batch_size 1 \ --gradient_accumulation_steps 16 \ --dataloader_drop_last True \ --query_max_len 512 \ --passage_max_len 512 \ --train_group_size 16 \ --logging_steps 1 \ --save_steps 2000 \ --save_total_limit 50 \ --ddp_find_unused_parameters False \ --gradient_checkpointing \ --deepspeed stage1.json \ --warmup_ratio 0.1 \ --bf16 \ --use_lora True \ --lora_rank 32 \ --lora_alpha 64 \ --use_flash_attn True \ --target_modules q_proj k_proj v_proj o_proj ``` **For llm-based layerwise reranker** ```shell torchrun --nproc_per_node {number of gpus} \ -m FlagEmbedding.llm_reranker.finetune_for_layerwise.run \ --output_dir {path to save model} \ --model_name_or_path openbmb/MiniCPM-2B-dpo-bf16 \ --train_data ./toy_finetune_data.jsonl \ --learning_rate 2e-4 \ --num_train_epochs 1 \ --per_device_train_batch_size 1 \ --gradient_accumulation_steps 16 \ --dataloader_drop_last True \ --query_max_len 512 \ --passage_max_len 512 \ --train_group_size 16 \ --logging_steps 1 \ --save_steps 2000 \ --save_total_limit 50 \ --ddp_find_unused_parameters False \ --gradient_checkpointing \ --deepspeed stage1.json \ --warmup_ratio 0.1 \ --bf16 \ --use_lora True \ --lora_rank 32 \ --lora_alpha 64 \ --use_flash_attn True \ --target_modules q_proj k_proj v_proj o_proj \ --start_layer 8 \ --head_multi True \ --head_type simple \ --lora_extra_parameters linear_head ``` Our rerankers are initialized from [google/gemma-2b](https://huggingface.co/google/gemma-2b) (for llm-based reranker) and [openbmb/MiniCPM-2B-dpo-bf16](https://huggingface.co/openbmb/MiniCPM-2B-dpo-bf16) (for llm-based layerwise reranker), and we train it on a mixture of multilingual datasets: - [bge-m3-data](https://huggingface.co/datasets/Shitao/bge-m3-data) - [quora train data](https://huggingface.co/datasets/quora) - [fever train data](https://fever.ai/dataset/fever.html) ## Evaluation - llama-index. ![image-20240317193909373](./assets/llama-index.png) - BEIR. rereank the top 100 results from bge-en-v1.5 large. ![image-20240317174633333](./assets/BEIR-bge-en-v1.5.png) rereank the top 100 results from e5 mistral 7b instruct. ![image-20240317172949713](./assets/BEIR-e5-mistral.png) - CMTEB-retrieval. It rereank the top 100 results from bge-zh-v1.5 large. ![image-20240317173026235](./assets/CMTEB-retrieval-bge-zh-v1.5.png) - miracl (multi-language). It rereank the top 100 results from bge-m3. ![image-20240317173117639](./assets/miracl-bge-m3.png) ## Citation If you find this repository useful, please consider giving a star and citation ```bibtex @misc{li2023making, title={Making Large Language Models A Better Foundation For Dense Retrieval}, author={Chaofan Li and Zheng Liu and Shitao Xiao and Yingxia Shao}, year={2023}, eprint={2312.15503}, archivePrefix={arXiv}, primaryClass={cs.CL} } @misc{chen2024bge, title={BGE M3-Embedding: Multi-Lingual, Multi-Functionality, Multi-Granularity Text Embeddings Through Self-Knowledge Distillation}, author={Jianlv Chen and Shitao Xiao and Peitian Zhang and Kun Luo and Defu Lian and Zheng Liu}, year={2024}, eprint={2402.03216}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "BEAR" ]
nomic-ai/nomic-embed-text-v1.5
nomic-ai
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "nomic_bert", "feature-extraction", "sentence-similarity", "mteb", "transformers", "transformers.js", "custom_code", "en", "arxiv:2205.13147", "arxiv:2402.01613", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-02-10T06:32:35Z"
2025-01-16T22:29:18+00:00
986,761
577
--- language: - en library_name: sentence-transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - feature-extraction - sentence-similarity - mteb - transformers - transformers.js model-index: - name: epoch_0_model results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.20895522388058 - type: ap value: 38.57605549557802 - type: f1 value: 69.35586565857854 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.8144 - type: ap value: 88.65222882032363 - type: f1 value: 91.80426301643274 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.162000000000006 - type: f1 value: 46.59329642263158 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 24.253 - type: map_at_10 value: 38.962 - type: map_at_100 value: 40.081 - type: map_at_1000 value: 40.089000000000006 - type: map_at_3 value: 33.499 - type: map_at_5 value: 36.351 - type: mrr_at_1 value: 24.609 - type: mrr_at_10 value: 39.099000000000004 - type: mrr_at_100 value: 40.211000000000006 - type: mrr_at_1000 value: 40.219 - type: mrr_at_3 value: 33.677 - type: mrr_at_5 value: 36.469 - type: ndcg_at_1 value: 24.253 - type: ndcg_at_10 value: 48.010999999999996 - type: ndcg_at_100 value: 52.756 - type: ndcg_at_1000 value: 52.964999999999996 - type: ndcg_at_3 value: 36.564 - type: ndcg_at_5 value: 41.711999999999996 - type: precision_at_1 value: 24.253 - type: precision_at_10 value: 7.738 - type: precision_at_100 value: 0.98 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 15.149000000000001 - type: precision_at_5 value: 11.593 - type: recall_at_1 value: 24.253 - type: recall_at_10 value: 77.383 - type: recall_at_100 value: 98.009 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 45.448 - type: recall_at_5 value: 57.965999999999994 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 45.69069567851087 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 36.35185490976283 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.71274951450321 - type: mrr value: 76.06032625423207 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 86.73980520022269 - type: cos_sim_spearman value: 84.24649792685918 - type: euclidean_pearson value: 85.85197641158186 - type: euclidean_spearman value: 84.24649792685918 - type: manhattan_pearson value: 86.26809552711346 - type: manhattan_spearman value: 84.56397504030865 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.25324675324674 - type: f1 value: 84.17872280892557 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 38.770253446400886 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 32.94307095497281 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.164 - type: map_at_10 value: 42.641 - type: map_at_100 value: 43.947 - type: map_at_1000 value: 44.074999999999996 - type: map_at_3 value: 39.592 - type: map_at_5 value: 41.204 - type: mrr_at_1 value: 39.628 - type: mrr_at_10 value: 48.625 - type: mrr_at_100 value: 49.368 - type: mrr_at_1000 value: 49.413000000000004 - type: mrr_at_3 value: 46.400000000000006 - type: mrr_at_5 value: 47.68 - type: ndcg_at_1 value: 39.628 - type: ndcg_at_10 value: 48.564 - type: ndcg_at_100 value: 53.507000000000005 - type: ndcg_at_1000 value: 55.635999999999996 - type: ndcg_at_3 value: 44.471 - type: ndcg_at_5 value: 46.137 - type: precision_at_1 value: 39.628 - type: precision_at_10 value: 8.856 - type: precision_at_100 value: 1.429 - type: precision_at_1000 value: 0.191 - type: precision_at_3 value: 21.268 - type: precision_at_5 value: 14.649000000000001 - type: recall_at_1 value: 32.164 - type: recall_at_10 value: 59.609 - type: recall_at_100 value: 80.521 - type: recall_at_1000 value: 94.245 - type: recall_at_3 value: 46.521 - type: recall_at_5 value: 52.083999999999996 - type: map_at_1 value: 31.526 - type: map_at_10 value: 41.581 - type: map_at_100 value: 42.815999999999995 - type: map_at_1000 value: 42.936 - type: map_at_3 value: 38.605000000000004 - type: map_at_5 value: 40.351 - type: mrr_at_1 value: 39.489999999999995 - type: mrr_at_10 value: 47.829 - type: mrr_at_100 value: 48.512 - type: mrr_at_1000 value: 48.552 - type: mrr_at_3 value: 45.754 - type: mrr_at_5 value: 46.986 - type: ndcg_at_1 value: 39.489999999999995 - type: ndcg_at_10 value: 47.269 - type: ndcg_at_100 value: 51.564 - type: ndcg_at_1000 value: 53.53099999999999 - type: ndcg_at_3 value: 43.301 - type: ndcg_at_5 value: 45.239000000000004 - type: precision_at_1 value: 39.489999999999995 - type: precision_at_10 value: 8.93 - type: precision_at_100 value: 1.415 - type: precision_at_1000 value: 0.188 - type: precision_at_3 value: 20.892 - type: precision_at_5 value: 14.865999999999998 - type: recall_at_1 value: 31.526 - type: recall_at_10 value: 56.76 - type: recall_at_100 value: 75.029 - type: recall_at_1000 value: 87.491 - type: recall_at_3 value: 44.786 - type: recall_at_5 value: 50.254 - type: map_at_1 value: 40.987 - type: map_at_10 value: 52.827 - type: map_at_100 value: 53.751000000000005 - type: map_at_1000 value: 53.81 - type: map_at_3 value: 49.844 - type: map_at_5 value: 51.473 - type: mrr_at_1 value: 46.833999999999996 - type: mrr_at_10 value: 56.389 - type: mrr_at_100 value: 57.003 - type: mrr_at_1000 value: 57.034 - type: mrr_at_3 value: 54.17999999999999 - type: mrr_at_5 value: 55.486999999999995 - type: ndcg_at_1 value: 46.833999999999996 - type: ndcg_at_10 value: 58.372 - type: ndcg_at_100 value: 62.068 - type: ndcg_at_1000 value: 63.288 - type: ndcg_at_3 value: 53.400000000000006 - type: ndcg_at_5 value: 55.766000000000005 - type: precision_at_1 value: 46.833999999999996 - type: precision_at_10 value: 9.191 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.134 - type: precision_at_3 value: 23.448 - type: precision_at_5 value: 15.862000000000002 - type: recall_at_1 value: 40.987 - type: recall_at_10 value: 71.146 - type: recall_at_100 value: 87.035 - type: recall_at_1000 value: 95.633 - type: recall_at_3 value: 58.025999999999996 - type: recall_at_5 value: 63.815999999999995 - type: map_at_1 value: 24.587 - type: map_at_10 value: 33.114 - type: map_at_100 value: 34.043 - type: map_at_1000 value: 34.123999999999995 - type: map_at_3 value: 30.45 - type: map_at_5 value: 31.813999999999997 - type: mrr_at_1 value: 26.554 - type: mrr_at_10 value: 35.148 - type: mrr_at_100 value: 35.926 - type: mrr_at_1000 value: 35.991 - type: mrr_at_3 value: 32.599000000000004 - type: mrr_at_5 value: 33.893 - type: ndcg_at_1 value: 26.554 - type: ndcg_at_10 value: 38.132 - type: ndcg_at_100 value: 42.78 - type: ndcg_at_1000 value: 44.919 - type: ndcg_at_3 value: 32.833 - type: ndcg_at_5 value: 35.168 - type: precision_at_1 value: 26.554 - type: precision_at_10 value: 5.921 - type: precision_at_100 value: 0.8659999999999999 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 13.861 - type: precision_at_5 value: 9.605 - type: recall_at_1 value: 24.587 - type: recall_at_10 value: 51.690000000000005 - type: recall_at_100 value: 73.428 - type: recall_at_1000 value: 89.551 - type: recall_at_3 value: 37.336999999999996 - type: recall_at_5 value: 43.047000000000004 - type: map_at_1 value: 16.715 - type: map_at_10 value: 24.251 - type: map_at_100 value: 25.326999999999998 - type: map_at_1000 value: 25.455 - type: map_at_3 value: 21.912000000000003 - type: map_at_5 value: 23.257 - type: mrr_at_1 value: 20.274 - type: mrr_at_10 value: 28.552 - type: mrr_at_100 value: 29.42 - type: mrr_at_1000 value: 29.497 - type: mrr_at_3 value: 26.14 - type: mrr_at_5 value: 27.502 - type: ndcg_at_1 value: 20.274 - type: ndcg_at_10 value: 29.088 - type: ndcg_at_100 value: 34.293 - type: ndcg_at_1000 value: 37.271 - type: ndcg_at_3 value: 24.708 - type: ndcg_at_5 value: 26.809 - type: precision_at_1 value: 20.274 - type: precision_at_10 value: 5.361 - type: precision_at_100 value: 0.915 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 11.733 - type: precision_at_5 value: 8.556999999999999 - type: recall_at_1 value: 16.715 - type: recall_at_10 value: 39.587 - type: recall_at_100 value: 62.336000000000006 - type: recall_at_1000 value: 83.453 - type: recall_at_3 value: 27.839999999999996 - type: recall_at_5 value: 32.952999999999996 - type: map_at_1 value: 28.793000000000003 - type: map_at_10 value: 38.582 - type: map_at_100 value: 39.881 - type: map_at_1000 value: 39.987 - type: map_at_3 value: 35.851 - type: map_at_5 value: 37.289 - type: mrr_at_1 value: 34.455999999999996 - type: mrr_at_10 value: 43.909 - type: mrr_at_100 value: 44.74 - type: mrr_at_1000 value: 44.786 - type: mrr_at_3 value: 41.659 - type: mrr_at_5 value: 43.010999999999996 - type: ndcg_at_1 value: 34.455999999999996 - type: ndcg_at_10 value: 44.266 - type: ndcg_at_100 value: 49.639 - type: ndcg_at_1000 value: 51.644 - type: ndcg_at_3 value: 39.865 - type: ndcg_at_5 value: 41.887 - type: precision_at_1 value: 34.455999999999996 - type: precision_at_10 value: 7.843999999999999 - type: precision_at_100 value: 1.243 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 18.831999999999997 - type: precision_at_5 value: 13.147 - type: recall_at_1 value: 28.793000000000003 - type: recall_at_10 value: 55.68300000000001 - type: recall_at_100 value: 77.99000000000001 - type: recall_at_1000 value: 91.183 - type: recall_at_3 value: 43.293 - type: recall_at_5 value: 48.618 - type: map_at_1 value: 25.907000000000004 - type: map_at_10 value: 35.519 - type: map_at_100 value: 36.806 - type: map_at_1000 value: 36.912 - type: map_at_3 value: 32.748 - type: map_at_5 value: 34.232 - type: mrr_at_1 value: 31.621 - type: mrr_at_10 value: 40.687 - type: mrr_at_100 value: 41.583 - type: mrr_at_1000 value: 41.638999999999996 - type: mrr_at_3 value: 38.527 - type: mrr_at_5 value: 39.612 - type: ndcg_at_1 value: 31.621 - type: ndcg_at_10 value: 41.003 - type: ndcg_at_100 value: 46.617999999999995 - type: ndcg_at_1000 value: 48.82 - type: ndcg_at_3 value: 36.542 - type: ndcg_at_5 value: 38.368 - type: precision_at_1 value: 31.621 - type: precision_at_10 value: 7.396999999999999 - type: precision_at_100 value: 1.191 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 17.39 - type: precision_at_5 value: 12.1 - type: recall_at_1 value: 25.907000000000004 - type: recall_at_10 value: 52.115 - type: recall_at_100 value: 76.238 - type: recall_at_1000 value: 91.218 - type: recall_at_3 value: 39.417 - type: recall_at_5 value: 44.435 - type: map_at_1 value: 25.732166666666668 - type: map_at_10 value: 34.51616666666667 - type: map_at_100 value: 35.67241666666666 - type: map_at_1000 value: 35.78675 - type: map_at_3 value: 31.953416666666662 - type: map_at_5 value: 33.333 - type: mrr_at_1 value: 30.300166666666673 - type: mrr_at_10 value: 38.6255 - type: mrr_at_100 value: 39.46183333333334 - type: mrr_at_1000 value: 39.519999999999996 - type: mrr_at_3 value: 36.41299999999999 - type: mrr_at_5 value: 37.6365 - type: ndcg_at_1 value: 30.300166666666673 - type: ndcg_at_10 value: 39.61466666666667 - type: ndcg_at_100 value: 44.60808333333334 - type: ndcg_at_1000 value: 46.91708333333334 - type: ndcg_at_3 value: 35.26558333333333 - type: ndcg_at_5 value: 37.220000000000006 - type: precision_at_1 value: 30.300166666666673 - type: precision_at_10 value: 6.837416666666667 - type: precision_at_100 value: 1.10425 - type: precision_at_1000 value: 0.14875 - type: precision_at_3 value: 16.13716666666667 - type: precision_at_5 value: 11.2815 - type: recall_at_1 value: 25.732166666666668 - type: recall_at_10 value: 50.578916666666665 - type: recall_at_100 value: 72.42183333333334 - type: recall_at_1000 value: 88.48766666666667 - type: recall_at_3 value: 38.41325 - type: recall_at_5 value: 43.515750000000004 - type: map_at_1 value: 23.951 - type: map_at_10 value: 30.974 - type: map_at_100 value: 31.804 - type: map_at_1000 value: 31.900000000000002 - type: map_at_3 value: 28.762 - type: map_at_5 value: 29.94 - type: mrr_at_1 value: 26.534000000000002 - type: mrr_at_10 value: 33.553 - type: mrr_at_100 value: 34.297 - type: mrr_at_1000 value: 34.36 - type: mrr_at_3 value: 31.391000000000002 - type: mrr_at_5 value: 32.525999999999996 - type: ndcg_at_1 value: 26.534000000000002 - type: ndcg_at_10 value: 35.112 - type: ndcg_at_100 value: 39.28 - type: ndcg_at_1000 value: 41.723 - type: ndcg_at_3 value: 30.902 - type: ndcg_at_5 value: 32.759 - type: precision_at_1 value: 26.534000000000002 - type: precision_at_10 value: 5.445 - type: precision_at_100 value: 0.819 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 12.986 - type: precision_at_5 value: 9.049 - type: recall_at_1 value: 23.951 - type: recall_at_10 value: 45.24 - type: recall_at_100 value: 64.12299999999999 - type: recall_at_1000 value: 82.28999999999999 - type: recall_at_3 value: 33.806000000000004 - type: recall_at_5 value: 38.277 - type: map_at_1 value: 16.829 - type: map_at_10 value: 23.684 - type: map_at_100 value: 24.683 - type: map_at_1000 value: 24.81 - type: map_at_3 value: 21.554000000000002 - type: map_at_5 value: 22.768 - type: mrr_at_1 value: 20.096 - type: mrr_at_10 value: 27.230999999999998 - type: mrr_at_100 value: 28.083999999999996 - type: mrr_at_1000 value: 28.166000000000004 - type: mrr_at_3 value: 25.212 - type: mrr_at_5 value: 26.32 - type: ndcg_at_1 value: 20.096 - type: ndcg_at_10 value: 27.989000000000004 - type: ndcg_at_100 value: 32.847 - type: ndcg_at_1000 value: 35.896 - type: ndcg_at_3 value: 24.116 - type: ndcg_at_5 value: 25.964 - type: precision_at_1 value: 20.096 - type: precision_at_10 value: 5 - type: precision_at_100 value: 0.8750000000000001 - type: precision_at_1000 value: 0.131 - type: precision_at_3 value: 11.207 - type: precision_at_5 value: 8.08 - type: recall_at_1 value: 16.829 - type: recall_at_10 value: 37.407000000000004 - type: recall_at_100 value: 59.101000000000006 - type: recall_at_1000 value: 81.024 - type: recall_at_3 value: 26.739 - type: recall_at_5 value: 31.524 - type: map_at_1 value: 24.138 - type: map_at_10 value: 32.275999999999996 - type: map_at_100 value: 33.416000000000004 - type: map_at_1000 value: 33.527 - type: map_at_3 value: 29.854000000000003 - type: map_at_5 value: 31.096 - type: mrr_at_1 value: 28.450999999999997 - type: mrr_at_10 value: 36.214 - type: mrr_at_100 value: 37.134 - type: mrr_at_1000 value: 37.198 - type: mrr_at_3 value: 34.001999999999995 - type: mrr_at_5 value: 35.187000000000005 - type: ndcg_at_1 value: 28.450999999999997 - type: ndcg_at_10 value: 37.166 - type: ndcg_at_100 value: 42.454 - type: ndcg_at_1000 value: 44.976 - type: ndcg_at_3 value: 32.796 - type: ndcg_at_5 value: 34.631 - type: precision_at_1 value: 28.450999999999997 - type: precision_at_10 value: 6.241 - type: precision_at_100 value: 0.9950000000000001 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 14.801 - type: precision_at_5 value: 10.280000000000001 - type: recall_at_1 value: 24.138 - type: recall_at_10 value: 48.111 - type: recall_at_100 value: 71.245 - type: recall_at_1000 value: 88.986 - type: recall_at_3 value: 36.119 - type: recall_at_5 value: 40.846 - type: map_at_1 value: 23.244 - type: map_at_10 value: 31.227 - type: map_at_100 value: 33.007 - type: map_at_1000 value: 33.223 - type: map_at_3 value: 28.924 - type: map_at_5 value: 30.017 - type: mrr_at_1 value: 27.668 - type: mrr_at_10 value: 35.524 - type: mrr_at_100 value: 36.699 - type: mrr_at_1000 value: 36.759 - type: mrr_at_3 value: 33.366 - type: mrr_at_5 value: 34.552 - type: ndcg_at_1 value: 27.668 - type: ndcg_at_10 value: 36.381 - type: ndcg_at_100 value: 43.062 - type: ndcg_at_1000 value: 45.656 - type: ndcg_at_3 value: 32.501999999999995 - type: ndcg_at_5 value: 34.105999999999995 - type: precision_at_1 value: 27.668 - type: precision_at_10 value: 6.798 - type: precision_at_100 value: 1.492 - type: precision_at_1000 value: 0.234 - type: precision_at_3 value: 15.152 - type: precision_at_5 value: 10.791 - type: recall_at_1 value: 23.244 - type: recall_at_10 value: 45.979 - type: recall_at_100 value: 74.822 - type: recall_at_1000 value: 91.078 - type: recall_at_3 value: 34.925 - type: recall_at_5 value: 39.126 - type: map_at_1 value: 19.945 - type: map_at_10 value: 27.517999999999997 - type: map_at_100 value: 28.588 - type: map_at_1000 value: 28.682000000000002 - type: map_at_3 value: 25.345000000000002 - type: map_at_5 value: 26.555 - type: mrr_at_1 value: 21.996 - type: mrr_at_10 value: 29.845 - type: mrr_at_100 value: 30.775999999999996 - type: mrr_at_1000 value: 30.845 - type: mrr_at_3 value: 27.726 - type: mrr_at_5 value: 28.882 - type: ndcg_at_1 value: 21.996 - type: ndcg_at_10 value: 32.034 - type: ndcg_at_100 value: 37.185 - type: ndcg_at_1000 value: 39.645 - type: ndcg_at_3 value: 27.750999999999998 - type: ndcg_at_5 value: 29.805999999999997 - type: precision_at_1 value: 21.996 - type: precision_at_10 value: 5.065 - type: precision_at_100 value: 0.819 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 12.076 - type: precision_at_5 value: 8.392 - type: recall_at_1 value: 19.945 - type: recall_at_10 value: 43.62 - type: recall_at_100 value: 67.194 - type: recall_at_1000 value: 85.7 - type: recall_at_3 value: 32.15 - type: recall_at_5 value: 37.208999999999996 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 18.279 - type: map_at_10 value: 31.052999999999997 - type: map_at_100 value: 33.125 - type: map_at_1000 value: 33.306000000000004 - type: map_at_3 value: 26.208 - type: map_at_5 value: 28.857 - type: mrr_at_1 value: 42.671 - type: mrr_at_10 value: 54.557 - type: mrr_at_100 value: 55.142 - type: mrr_at_1000 value: 55.169000000000004 - type: mrr_at_3 value: 51.488 - type: mrr_at_5 value: 53.439 - type: ndcg_at_1 value: 42.671 - type: ndcg_at_10 value: 41.276 - type: ndcg_at_100 value: 48.376000000000005 - type: ndcg_at_1000 value: 51.318 - type: ndcg_at_3 value: 35.068 - type: ndcg_at_5 value: 37.242 - type: precision_at_1 value: 42.671 - type: precision_at_10 value: 12.638 - type: precision_at_100 value: 2.045 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 26.08 - type: precision_at_5 value: 19.805 - type: recall_at_1 value: 18.279 - type: recall_at_10 value: 46.946 - type: recall_at_100 value: 70.97200000000001 - type: recall_at_1000 value: 87.107 - type: recall_at_3 value: 31.147999999999996 - type: recall_at_5 value: 38.099 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.573 - type: map_at_10 value: 19.747 - type: map_at_100 value: 28.205000000000002 - type: map_at_1000 value: 29.831000000000003 - type: map_at_3 value: 14.109 - type: map_at_5 value: 16.448999999999998 - type: mrr_at_1 value: 71 - type: mrr_at_10 value: 77.68599999999999 - type: mrr_at_100 value: 77.995 - type: mrr_at_1000 value: 78.00200000000001 - type: mrr_at_3 value: 76.292 - type: mrr_at_5 value: 77.029 - type: ndcg_at_1 value: 59.12500000000001 - type: ndcg_at_10 value: 43.9 - type: ndcg_at_100 value: 47.863 - type: ndcg_at_1000 value: 54.848 - type: ndcg_at_3 value: 49.803999999999995 - type: ndcg_at_5 value: 46.317 - type: precision_at_1 value: 71 - type: precision_at_10 value: 34.4 - type: precision_at_100 value: 11.063 - type: precision_at_1000 value: 1.989 - type: precision_at_3 value: 52.333 - type: precision_at_5 value: 43.7 - type: recall_at_1 value: 8.573 - type: recall_at_10 value: 25.615 - type: recall_at_100 value: 53.385000000000005 - type: recall_at_1000 value: 75.46000000000001 - type: recall_at_3 value: 15.429 - type: recall_at_5 value: 19.357 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 47.989999999999995 - type: f1 value: 42.776314451497555 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 74.13499999999999 - type: map_at_10 value: 82.825 - type: map_at_100 value: 83.096 - type: map_at_1000 value: 83.111 - type: map_at_3 value: 81.748 - type: map_at_5 value: 82.446 - type: mrr_at_1 value: 79.553 - type: mrr_at_10 value: 86.654 - type: mrr_at_100 value: 86.774 - type: mrr_at_1000 value: 86.778 - type: mrr_at_3 value: 85.981 - type: mrr_at_5 value: 86.462 - type: ndcg_at_1 value: 79.553 - type: ndcg_at_10 value: 86.345 - type: ndcg_at_100 value: 87.32 - type: ndcg_at_1000 value: 87.58200000000001 - type: ndcg_at_3 value: 84.719 - type: ndcg_at_5 value: 85.677 - type: precision_at_1 value: 79.553 - type: precision_at_10 value: 10.402000000000001 - type: precision_at_100 value: 1.1119999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 32.413 - type: precision_at_5 value: 20.138 - type: recall_at_1 value: 74.13499999999999 - type: recall_at_10 value: 93.215 - type: recall_at_100 value: 97.083 - type: recall_at_1000 value: 98.732 - type: recall_at_3 value: 88.79 - type: recall_at_5 value: 91.259 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 18.298000000000002 - type: map_at_10 value: 29.901 - type: map_at_100 value: 31.528 - type: map_at_1000 value: 31.713 - type: map_at_3 value: 25.740000000000002 - type: map_at_5 value: 28.227999999999998 - type: mrr_at_1 value: 36.728 - type: mrr_at_10 value: 45.401 - type: mrr_at_100 value: 46.27 - type: mrr_at_1000 value: 46.315 - type: mrr_at_3 value: 42.978 - type: mrr_at_5 value: 44.29 - type: ndcg_at_1 value: 36.728 - type: ndcg_at_10 value: 37.456 - type: ndcg_at_100 value: 43.832 - type: ndcg_at_1000 value: 47 - type: ndcg_at_3 value: 33.694 - type: ndcg_at_5 value: 35.085 - type: precision_at_1 value: 36.728 - type: precision_at_10 value: 10.386 - type: precision_at_100 value: 1.701 - type: precision_at_1000 value: 0.22599999999999998 - type: precision_at_3 value: 22.479 - type: precision_at_5 value: 16.605 - type: recall_at_1 value: 18.298000000000002 - type: recall_at_10 value: 44.369 - type: recall_at_100 value: 68.098 - type: recall_at_1000 value: 87.21900000000001 - type: recall_at_3 value: 30.215999999999998 - type: recall_at_5 value: 36.861 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.568 - type: map_at_10 value: 65.061 - type: map_at_100 value: 65.896 - type: map_at_1000 value: 65.95100000000001 - type: map_at_3 value: 61.831 - type: map_at_5 value: 63.849000000000004 - type: mrr_at_1 value: 79.136 - type: mrr_at_10 value: 84.58200000000001 - type: mrr_at_100 value: 84.765 - type: mrr_at_1000 value: 84.772 - type: mrr_at_3 value: 83.684 - type: mrr_at_5 value: 84.223 - type: ndcg_at_1 value: 79.136 - type: ndcg_at_10 value: 72.622 - type: ndcg_at_100 value: 75.539 - type: ndcg_at_1000 value: 76.613 - type: ndcg_at_3 value: 68.065 - type: ndcg_at_5 value: 70.58 - type: precision_at_1 value: 79.136 - type: precision_at_10 value: 15.215 - type: precision_at_100 value: 1.7500000000000002 - type: precision_at_1000 value: 0.189 - type: precision_at_3 value: 44.011 - type: precision_at_5 value: 28.388999999999996 - type: recall_at_1 value: 39.568 - type: recall_at_10 value: 76.077 - type: recall_at_100 value: 87.481 - type: recall_at_1000 value: 94.56400000000001 - type: recall_at_3 value: 66.01599999999999 - type: recall_at_5 value: 70.97200000000001 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 85.312 - type: ap value: 80.36296867333715 - type: f1 value: 85.26613311552218 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.363999999999997 - type: map_at_10 value: 35.711999999999996 - type: map_at_100 value: 36.876999999999995 - type: map_at_1000 value: 36.923 - type: map_at_3 value: 32.034 - type: map_at_5 value: 34.159 - type: mrr_at_1 value: 24.04 - type: mrr_at_10 value: 36.345 - type: mrr_at_100 value: 37.441 - type: mrr_at_1000 value: 37.480000000000004 - type: mrr_at_3 value: 32.713 - type: mrr_at_5 value: 34.824 - type: ndcg_at_1 value: 24.026 - type: ndcg_at_10 value: 42.531 - type: ndcg_at_100 value: 48.081 - type: ndcg_at_1000 value: 49.213 - type: ndcg_at_3 value: 35.044 - type: ndcg_at_5 value: 38.834 - type: precision_at_1 value: 24.026 - type: precision_at_10 value: 6.622999999999999 - type: precision_at_100 value: 0.941 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.909 - type: precision_at_5 value: 10.871 - type: recall_at_1 value: 23.363999999999997 - type: recall_at_10 value: 63.426 - type: recall_at_100 value: 88.96300000000001 - type: recall_at_1000 value: 97.637 - type: recall_at_3 value: 43.095 - type: recall_at_5 value: 52.178000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.0095759233926 - type: f1 value: 92.78387794667408 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 75.0296397628819 - type: f1 value: 58.45699589820874 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.45662407531944 - type: f1 value: 71.42364781421813 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.07800941492937 - type: f1 value: 77.22799045640845 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.531234379250606 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 30.941490381193802 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.3115090856725 - type: mrr value: 31.290667638675757 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.465 - type: map_at_10 value: 13.03 - type: map_at_100 value: 16.057 - type: map_at_1000 value: 17.49 - type: map_at_3 value: 9.553 - type: map_at_5 value: 11.204 - type: mrr_at_1 value: 43.653 - type: mrr_at_10 value: 53.269 - type: mrr_at_100 value: 53.72 - type: mrr_at_1000 value: 53.761 - type: mrr_at_3 value: 50.929 - type: mrr_at_5 value: 52.461 - type: ndcg_at_1 value: 42.26 - type: ndcg_at_10 value: 34.673 - type: ndcg_at_100 value: 30.759999999999998 - type: ndcg_at_1000 value: 39.728 - type: ndcg_at_3 value: 40.349000000000004 - type: ndcg_at_5 value: 37.915 - type: precision_at_1 value: 43.653 - type: precision_at_10 value: 25.789 - type: precision_at_100 value: 7.754999999999999 - type: precision_at_1000 value: 2.07 - type: precision_at_3 value: 38.596000000000004 - type: precision_at_5 value: 33.251 - type: recall_at_1 value: 5.465 - type: recall_at_10 value: 17.148 - type: recall_at_100 value: 29.768 - type: recall_at_1000 value: 62.239 - type: recall_at_3 value: 10.577 - type: recall_at_5 value: 13.315 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 37.008 - type: map_at_10 value: 52.467 - type: map_at_100 value: 53.342999999999996 - type: map_at_1000 value: 53.366 - type: map_at_3 value: 48.412 - type: map_at_5 value: 50.875 - type: mrr_at_1 value: 41.541 - type: mrr_at_10 value: 54.967 - type: mrr_at_100 value: 55.611 - type: mrr_at_1000 value: 55.627 - type: mrr_at_3 value: 51.824999999999996 - type: mrr_at_5 value: 53.763000000000005 - type: ndcg_at_1 value: 41.541 - type: ndcg_at_10 value: 59.724999999999994 - type: ndcg_at_100 value: 63.38700000000001 - type: ndcg_at_1000 value: 63.883 - type: ndcg_at_3 value: 52.331 - type: ndcg_at_5 value: 56.327000000000005 - type: precision_at_1 value: 41.541 - type: precision_at_10 value: 9.447 - type: precision_at_100 value: 1.1520000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.262 - type: precision_at_5 value: 16.314999999999998 - type: recall_at_1 value: 37.008 - type: recall_at_10 value: 79.145 - type: recall_at_100 value: 94.986 - type: recall_at_1000 value: 98.607 - type: recall_at_3 value: 60.277 - type: recall_at_5 value: 69.407 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.402 - type: map_at_10 value: 84.181 - type: map_at_100 value: 84.796 - type: map_at_1000 value: 84.81400000000001 - type: map_at_3 value: 81.209 - type: map_at_5 value: 83.085 - type: mrr_at_1 value: 81.02000000000001 - type: mrr_at_10 value: 87.263 - type: mrr_at_100 value: 87.36 - type: mrr_at_1000 value: 87.36 - type: mrr_at_3 value: 86.235 - type: mrr_at_5 value: 86.945 - type: ndcg_at_1 value: 81.01 - type: ndcg_at_10 value: 87.99900000000001 - type: ndcg_at_100 value: 89.217 - type: ndcg_at_1000 value: 89.33 - type: ndcg_at_3 value: 85.053 - type: ndcg_at_5 value: 86.703 - type: precision_at_1 value: 81.01 - type: precision_at_10 value: 13.336 - type: precision_at_100 value: 1.52 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 37.14 - type: precision_at_5 value: 24.44 - type: recall_at_1 value: 70.402 - type: recall_at_10 value: 95.214 - type: recall_at_100 value: 99.438 - type: recall_at_1000 value: 99.928 - type: recall_at_3 value: 86.75699999999999 - type: recall_at_5 value: 91.44099999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.51721502758904 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 61.054808572333016 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.578 - type: map_at_10 value: 11.036999999999999 - type: map_at_100 value: 12.879999999999999 - type: map_at_1000 value: 13.150999999999998 - type: map_at_3 value: 8.133 - type: map_at_5 value: 9.559 - type: mrr_at_1 value: 22.6 - type: mrr_at_10 value: 32.68 - type: mrr_at_100 value: 33.789 - type: mrr_at_1000 value: 33.854 - type: mrr_at_3 value: 29.7 - type: mrr_at_5 value: 31.480000000000004 - type: ndcg_at_1 value: 22.6 - type: ndcg_at_10 value: 18.616 - type: ndcg_at_100 value: 25.883 - type: ndcg_at_1000 value: 30.944 - type: ndcg_at_3 value: 18.136 - type: ndcg_at_5 value: 15.625 - type: precision_at_1 value: 22.6 - type: precision_at_10 value: 9.48 - type: precision_at_100 value: 1.991 - type: precision_at_1000 value: 0.321 - type: precision_at_3 value: 16.8 - type: precision_at_5 value: 13.54 - type: recall_at_1 value: 4.578 - type: recall_at_10 value: 19.213 - type: recall_at_100 value: 40.397 - type: recall_at_1000 value: 65.2 - type: recall_at_3 value: 10.208 - type: recall_at_5 value: 13.718 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.44288351714071 - type: cos_sim_spearman value: 79.37995604564952 - type: euclidean_pearson value: 81.1078874670718 - type: euclidean_spearman value: 79.37995905980499 - type: manhattan_pearson value: 81.03697527288986 - type: manhattan_spearman value: 79.33490235296236 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.95557650436523 - type: cos_sim_spearman value: 78.5190672399868 - type: euclidean_pearson value: 81.58064025904707 - type: euclidean_spearman value: 78.5190672399868 - type: manhattan_pearson value: 81.52857930619889 - type: manhattan_spearman value: 78.50421361308034 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.79128416228737 - type: cos_sim_spearman value: 86.05402451477147 - type: euclidean_pearson value: 85.46280267054289 - type: euclidean_spearman value: 86.05402451477147 - type: manhattan_pearson value: 85.46278563858236 - type: manhattan_spearman value: 86.08079590861004 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.20623089568763 - type: cos_sim_spearman value: 81.53786907061009 - type: euclidean_pearson value: 82.82272250091494 - type: euclidean_spearman value: 81.53786907061009 - type: manhattan_pearson value: 82.78850494027013 - type: manhattan_spearman value: 81.5135618083407 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 85.46366618397936 - type: cos_sim_spearman value: 86.96566013336908 - type: euclidean_pearson value: 86.62651697548931 - type: euclidean_spearman value: 86.96565526364454 - type: manhattan_pearson value: 86.58812160258009 - type: manhattan_spearman value: 86.9336484321288 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.51858358641559 - type: cos_sim_spearman value: 84.7652527954999 - type: euclidean_pearson value: 84.23914783766861 - type: euclidean_spearman value: 84.7652527954999 - type: manhattan_pearson value: 84.22749648503171 - type: manhattan_spearman value: 84.74527996746386 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.28026563313065 - type: cos_sim_spearman value: 87.46928143824915 - type: euclidean_pearson value: 88.30558762000372 - type: euclidean_spearman value: 87.46928143824915 - type: manhattan_pearson value: 88.10513330809331 - type: manhattan_spearman value: 87.21069787834173 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 62.376497134587375 - type: cos_sim_spearman value: 65.0159550112516 - type: euclidean_pearson value: 65.64572120879598 - type: euclidean_spearman value: 65.0159550112516 - type: manhattan_pearson value: 65.88143604989976 - type: manhattan_spearman value: 65.17547297222434 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.22876368947644 - type: cos_sim_spearman value: 85.46935577445318 - type: euclidean_pearson value: 85.32830231392005 - type: euclidean_spearman value: 85.46935577445318 - type: manhattan_pearson value: 85.30353211758495 - type: manhattan_spearman value: 85.42821085956945 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 80.60986667767133 - type: mrr value: 94.29432314236236 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 54.528 - type: map_at_10 value: 65.187 - type: map_at_100 value: 65.62599999999999 - type: map_at_1000 value: 65.657 - type: map_at_3 value: 62.352 - type: map_at_5 value: 64.025 - type: mrr_at_1 value: 57.333 - type: mrr_at_10 value: 66.577 - type: mrr_at_100 value: 66.88 - type: mrr_at_1000 value: 66.908 - type: mrr_at_3 value: 64.556 - type: mrr_at_5 value: 65.739 - type: ndcg_at_1 value: 57.333 - type: ndcg_at_10 value: 70.275 - type: ndcg_at_100 value: 72.136 - type: ndcg_at_1000 value: 72.963 - type: ndcg_at_3 value: 65.414 - type: ndcg_at_5 value: 67.831 - type: precision_at_1 value: 57.333 - type: precision_at_10 value: 9.5 - type: precision_at_100 value: 1.057 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 25.778000000000002 - type: precision_at_5 value: 17.2 - type: recall_at_1 value: 54.528 - type: recall_at_10 value: 84.356 - type: recall_at_100 value: 92.833 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 71.283 - type: recall_at_5 value: 77.14999999999999 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.74158415841585 - type: cos_sim_ap value: 92.90048959850317 - type: cos_sim_f1 value: 86.35650810245687 - type: cos_sim_precision value: 90.4709748083242 - type: cos_sim_recall value: 82.6 - type: dot_accuracy value: 99.74158415841585 - type: dot_ap value: 92.90048959850317 - type: dot_f1 value: 86.35650810245687 - type: dot_precision value: 90.4709748083242 - type: dot_recall value: 82.6 - type: euclidean_accuracy value: 99.74158415841585 - type: euclidean_ap value: 92.90048959850317 - type: euclidean_f1 value: 86.35650810245687 - type: euclidean_precision value: 90.4709748083242 - type: euclidean_recall value: 82.6 - type: manhattan_accuracy value: 99.74158415841585 - type: manhattan_ap value: 92.87344692947894 - type: manhattan_f1 value: 86.38497652582159 - type: manhattan_precision value: 90.29443838604145 - type: manhattan_recall value: 82.8 - type: max_accuracy value: 99.74158415841585 - type: max_ap value: 92.90048959850317 - type: max_f1 value: 86.38497652582159 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 63.191648770424216 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.02944668730218 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.466386167525265 - type: mrr value: 51.19071492233257 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.198022505886435 - type: cos_sim_spearman value: 30.40170257939193 - type: dot_pearson value: 30.198015316402614 - type: dot_spearman value: 30.40170257939193 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.242 - type: map_at_10 value: 2.17 - type: map_at_100 value: 12.221 - type: map_at_1000 value: 28.63 - type: map_at_3 value: 0.728 - type: map_at_5 value: 1.185 - type: mrr_at_1 value: 94 - type: mrr_at_10 value: 97 - type: mrr_at_100 value: 97 - type: mrr_at_1000 value: 97 - type: mrr_at_3 value: 97 - type: mrr_at_5 value: 97 - type: ndcg_at_1 value: 89 - type: ndcg_at_10 value: 82.30499999999999 - type: ndcg_at_100 value: 61.839999999999996 - type: ndcg_at_1000 value: 53.381 - type: ndcg_at_3 value: 88.877 - type: ndcg_at_5 value: 86.05199999999999 - type: precision_at_1 value: 94 - type: precision_at_10 value: 87 - type: precision_at_100 value: 63.38 - type: precision_at_1000 value: 23.498 - type: precision_at_3 value: 94 - type: precision_at_5 value: 92 - type: recall_at_1 value: 0.242 - type: recall_at_10 value: 2.302 - type: recall_at_100 value: 14.979000000000001 - type: recall_at_1000 value: 49.638 - type: recall_at_3 value: 0.753 - type: recall_at_5 value: 1.226 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.006 - type: map_at_10 value: 11.805 - type: map_at_100 value: 18.146 - type: map_at_1000 value: 19.788 - type: map_at_3 value: 5.914 - type: map_at_5 value: 8.801 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 56.36600000000001 - type: mrr_at_100 value: 56.721999999999994 - type: mrr_at_1000 value: 56.721999999999994 - type: mrr_at_3 value: 52.041000000000004 - type: mrr_at_5 value: 54.796 - type: ndcg_at_1 value: 37.755 - type: ndcg_at_10 value: 29.863 - type: ndcg_at_100 value: 39.571 - type: ndcg_at_1000 value: 51.385999999999996 - type: ndcg_at_3 value: 32.578 - type: ndcg_at_5 value: 32.351 - type: precision_at_1 value: 40.816 - type: precision_at_10 value: 26.531 - type: precision_at_100 value: 7.796 - type: precision_at_1000 value: 1.555 - type: precision_at_3 value: 32.653 - type: precision_at_5 value: 33.061 - type: recall_at_1 value: 3.006 - type: recall_at_10 value: 18.738 - type: recall_at_100 value: 48.058 - type: recall_at_1000 value: 83.41300000000001 - type: recall_at_3 value: 7.166 - type: recall_at_5 value: 12.102 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.4178 - type: ap value: 14.648781342150446 - type: f1 value: 55.07299194946378 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 60.919637804187886 - type: f1 value: 61.24122013967399 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.207896583685695 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.23114978840078 - type: cos_sim_ap value: 74.26624727825818 - type: cos_sim_f1 value: 68.72377190817083 - type: cos_sim_precision value: 64.56400742115028 - type: cos_sim_recall value: 73.45646437994723 - type: dot_accuracy value: 86.23114978840078 - type: dot_ap value: 74.26624032659652 - type: dot_f1 value: 68.72377190817083 - type: dot_precision value: 64.56400742115028 - type: dot_recall value: 73.45646437994723 - type: euclidean_accuracy value: 86.23114978840078 - type: euclidean_ap value: 74.26624714480556 - type: euclidean_f1 value: 68.72377190817083 - type: euclidean_precision value: 64.56400742115028 - type: euclidean_recall value: 73.45646437994723 - type: manhattan_accuracy value: 86.16558383501221 - type: manhattan_ap value: 74.2091943976357 - type: manhattan_f1 value: 68.64221520524654 - type: manhattan_precision value: 63.59135913591359 - type: manhattan_recall value: 74.5646437994723 - type: max_accuracy value: 86.23114978840078 - type: max_ap value: 74.26624727825818 - type: max_f1 value: 68.72377190817083 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.3681841114604 - type: cos_sim_ap value: 86.65166387498546 - type: cos_sim_f1 value: 79.02581944698774 - type: cos_sim_precision value: 75.35796605434099 - type: cos_sim_recall value: 83.06898675700647 - type: dot_accuracy value: 89.3681841114604 - type: dot_ap value: 86.65166019802056 - type: dot_f1 value: 79.02581944698774 - type: dot_precision value: 75.35796605434099 - type: dot_recall value: 83.06898675700647 - type: euclidean_accuracy value: 89.3681841114604 - type: euclidean_ap value: 86.65166462876266 - type: euclidean_f1 value: 79.02581944698774 - type: euclidean_precision value: 75.35796605434099 - type: euclidean_recall value: 83.06898675700647 - type: manhattan_accuracy value: 89.36624364497226 - type: manhattan_ap value: 86.65076471274106 - type: manhattan_f1 value: 79.07408783532733 - type: manhattan_precision value: 76.41102972856527 - type: manhattan_recall value: 81.92947336002464 - type: max_accuracy value: 89.3681841114604 - type: max_ap value: 86.65166462876266 - type: max_f1 value: 79.07408783532733 --- # nomic-embed-text-v1.5: Resizable Production Embeddings with Matryoshka Representation Learning **Exciting Update!**: `nomic-embed-text-v1.5` is now multimodal! [nomic-embed-vision-v1](https://huggingface.co/nomic-ai/nomic-embed-vision-v1.5) is aligned to the embedding space of `nomic-embed-text-v1.5`, meaning any text embedding is multimodal! ## Usage **Important**: the text prompt *must* include a *task instruction prefix*, instructing the model which task is being performed. For example, if you are implementing a RAG application, you embed your documents as `search_document: <text here>` and embed your user queries as `search_query: <text here>`. ## Task instruction prefixes ### `search_document` #### Purpose: embed texts as documents from a dataset This prefix is used for embedding texts as documents, for example as documents for a RAG index. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['search_document: TSNE is a dimensionality reduction algorithm created by Laurens van Der Maaten'] embeddings = model.encode(sentences) print(embeddings) ``` ### `search_query` #### Purpose: embed texts as questions to answer This prefix is used for embedding texts as questions that documents from a dataset could resolve, for example as queries to be answered by a RAG application. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['search_query: Who is Laurens van Der Maaten?'] embeddings = model.encode(sentences) print(embeddings) ``` ### `clustering` #### Purpose: embed texts to group them into clusters This prefix is used for embedding texts in order to group them into clusters, discover common topics, or remove semantic duplicates. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['clustering: the quick brown fox'] embeddings = model.encode(sentences) print(embeddings) ``` ### `classification` #### Purpose: embed texts to classify them This prefix is used for embedding texts into vectors that will be used as features for a classification model ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/nomic-embed-text-v1", trust_remote_code=True) sentences = ['classification: the quick brown fox'] embeddings = model.encode(sentences) print(embeddings) ``` ### Sentence Transformers ```python import torch.nn.functional as F from sentence_transformers import SentenceTransformer matryoshka_dim = 512 model = SentenceTransformer("nomic-ai/nomic-embed-text-v1.5", trust_remote_code=True) sentences = ['search_query: What is TSNE?', 'search_query: Who is Laurens van der Maaten?'] embeddings = model.encode(sentences, convert_to_tensor=True) embeddings = F.layer_norm(embeddings, normalized_shape=(embeddings.shape[1],)) embeddings = embeddings[:, :matryoshka_dim] embeddings = F.normalize(embeddings, p=2, dim=1) print(embeddings) ``` ### Transformers ```diff import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) sentences = ['search_query: What is TSNE?', 'search_query: Who is Laurens van der Maaten?'] tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased') model = AutoModel.from_pretrained('nomic-ai/nomic-embed-text-v1.5', trust_remote_code=True, safe_serialization=True) model.eval() encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') + matryoshka_dim = 512 with torch.no_grad(): model_output = model(**encoded_input) embeddings = mean_pooling(model_output, encoded_input['attention_mask']) + embeddings = F.layer_norm(embeddings, normalized_shape=(embeddings.shape[1],)) + embeddings = embeddings[:, :matryoshka_dim] embeddings = F.normalize(embeddings, p=2, dim=1) print(embeddings) ``` The model natively supports scaling of the sequence length past 2048 tokens. To do so, ```diff - tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased') + tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased', model_max_length=8192) - model = AutoModel.from_pretrained('nomic-ai/nomic-embed-text-v1', trust_remote_code=True) + model = AutoModel.from_pretrained('nomic-ai/nomic-embed-text-v1', trust_remote_code=True, rotary_scaling_factor=2) ``` ### Transformers.js ```js import { pipeline, layer_norm } from '@huggingface/transformers'; // Create a feature extraction pipeline const extractor = await pipeline('feature-extraction', 'nomic-ai/nomic-embed-text-v1.5'); // Define sentences const texts = ['search_query: What is TSNE?', 'search_query: Who is Laurens van der Maaten?']; // Compute sentence embeddings let embeddings = await extractor(texts, { pooling: 'mean' }); console.log(embeddings); // Tensor of shape [2, 768] const matryoshka_dim = 512; embeddings = layer_norm(embeddings, [embeddings.dims[1]]) .slice(null, [0, matryoshka_dim]) .normalize(2, -1); console.log(embeddings.tolist()); ``` ## Nomic API The easiest way to use Nomic Embed is through the Nomic Embedding API. Generating embeddings with the `nomic` Python client is as easy as ```python from nomic import embed output = embed.text( texts=['Nomic Embedding API', '#keepAIOpen'], model='nomic-embed-text-v1.5', task_type='search_document', dimensionality=256, ) print(output) ``` For more information, see the [API reference](https://docs.nomic.ai/reference/endpoints/nomic-embed-text) ## Infinity Usage with [Infinity](https://github.com/michaelfeil/infinity). ```bash docker run --gpus all -v $PWD/data:/app/.cache -e HF_TOKEN=$HF_TOKEN -p "7997":"7997" \ michaelf34/infinity:0.0.70 \ v2 --model-id nomic-ai/nomic-embed-text-v1.5 --revision "main" --dtype float16 --batch-size 8 --engine torch --port 7997 --no-bettertransformer ``` ## Adjusting Dimensionality `nomic-embed-text-v1.5` is an improvement upon [Nomic Embed](https://huggingface.co/nomic-ai/nomic-embed-text-v1) that utilizes [Matryoshka Representation Learning](https://arxiv.org/abs/2205.13147) which gives developers the flexibility to trade off the embedding size for a negligible reduction in performance. | Name | SeqLen | Dimension | MTEB | | :-------------------------------:| :----- | :-------- | :------: | | nomic-embed-text-v1 | 8192 | 768 | **62.39** | | nomic-embed-text-v1.5 | 8192 | 768 | 62.28 | | nomic-embed-text-v1.5 | 8192 | 512 | 61.96 | | nomic-embed-text-v1.5 | 8192 | 256 | 61.04 | | nomic-embed-text-v1.5 | 8192 | 128 | 59.34 | | nomic-embed-text-v1.5 | 8192 | 64 | 56.10 | ![image/png](https://cdn-uploads.huggingface.co/production/uploads/607997c83a565c15675055b3/CRnaHV-c2wMUMZKw72q85.png) ## Training Click the Nomic Atlas map below to visualize a 5M sample of our contrastive pretraining data! [![image/webp](https://cdn-uploads.huggingface.co/production/uploads/607997c83a565c15675055b3/pjhJhuNyRfPagRd_c_iUz.webp)](https://atlas.nomic.ai/map/nomic-text-embed-v1-5m-sample) We train our embedder using a multi-stage training pipeline. Starting from a long-context [BERT model](https://huggingface.co/nomic-ai/nomic-bert-2048), the first unsupervised contrastive stage trains on a dataset generated from weakly related text pairs, such as question-answer pairs from forums like StackExchange and Quora, title-body pairs from Amazon reviews, and summarizations from news articles. In the second finetuning stage, higher quality labeled datasets such as search queries and answers from web searches are leveraged. Data curation and hard-example mining is crucial in this stage. For more details, see the Nomic Embed [Technical Report](https://static.nomic.ai/reports/2024_Nomic_Embed_Text_Technical_Report.pdf) and corresponding [blog post](https://blog.nomic.ai/posts/nomic-embed-matryoshka). Training data to train the models is released in its entirety. For more details, see the `contrastors` [repository](https://github.com/nomic-ai/contrastors) # Join the Nomic Community - Nomic: [https://nomic.ai](https://nomic.ai) - Discord: [https://discord.gg/myY5YDR8z8](https://discord.gg/myY5YDR8z8) - Twitter: [https://twitter.com/nomic_ai](https://twitter.com/nomic_ai) # Citation If you find the model, dataset, or training code useful, please cite our work ```bibtex @misc{nussbaum2024nomic, title={Nomic Embed: Training a Reproducible Long Context Text Embedder}, author={Zach Nussbaum and John X. Morris and Brandon Duderstadt and Andriy Mulyar}, year={2024}, eprint={2402.01613}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "BIOSSES", "SCIFACT" ]
BAAI/bge-reranker-base
BAAI
text-classification
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "xlm-roberta", "mteb", "text-embeddings-inference", "text-classification", "en", "zh", "arxiv:2401.03462", "arxiv:2312.15503", "arxiv:2311.13534", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "region:us" ]
"2023-09-11T12:30:04Z"
2024-06-24T14:10:03+00:00
979,604
176
--- language: - en - zh library_name: sentence-transformers license: mit pipeline_tag: text-classification tags: - mteb - text-embeddings-inference model-index: - name: bge-reranker-base results: - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: None metrics: - type: map value: 81.27206722525007 - type: mrr value: 84.14238095238095 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: None metrics: - type: map value: 84.10369934291236 - type: mrr value: 86.79376984126984 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 35.4600511272538 - type: mrr value: 34.60238095238095 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: None metrics: - type: map value: 67.27728847727172 - type: mrr value: 77.1315192743764 --- **We have updated the [new reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_reranker), supporting larger lengths, more languages, and achieving better performance.** <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> **More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding).** [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding focuses on retrieval-augmented LLMs, consisting of the following projects currently: - **Long-Context LLM**: [Activation Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon) - **Fine-tuning of LM** : [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail) - **Embedding Model**: [Visualized-BGE](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/visual), [BGE-M3](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3), [LLM Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), [BGE Embedding](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/baai_general_embedding) - **Reranker Model**: [llm rerankers](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_reranker), [BGE Reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) - **Benchmark**: [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) ## News - 3/18/2024: Release new [rerankers](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_reranker), built upon powerful M3 and LLM (GEMMA and MiniCPM, not so large actually) backbones, supporitng multi-lingual processing and larger inputs, massive improvements of ranking performances on BEIR, C-MTEB/Retrieval, MIRACL, LlamaIndex Evaluation. - 3/18/2024: Release [Visualized-BGE](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/visual), equipping BGE with visual capabilities. Visualized-BGE can be utilized to generate embeddings for hybrid image-text data. - 1/30/2024: Release **BGE-M3**, a new member to BGE model series! M3 stands for **M**ulti-linguality (100+ languages), **M**ulti-granularities (input length up to 8192), **M**ulti-Functionality (unification of dense, lexical, multi-vec/colbert retrieval). It is the first embedding model which supports all three retrieval methods, achieving new SOTA on multi-lingual (MIRACL) and cross-lingual (MKQA) benchmarks. [Technical Report](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/BGE_M3/BGE_M3.pdf) and [Code](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3). :fire: - 1/9/2024: Release [Activation-Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon), an effective, efficient, compatible, and low-cost (training) method to extend the context length of LLM. [Technical Report](https://arxiv.org/abs/2401.03462) :fire: - 12/24/2023: Release **LLaRA**, a LLaMA-7B based dense retriever, leading to state-of-the-art performances on MS MARCO and BEIR. Model and code will be open-sourced. Please stay tuned. [Technical Report](https://arxiv.org/abs/2312.15503) - 11/23/2023: Release [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail), a method to maintain general capabilities during fine-tuning by merging multiple language models. [Technical Report](https://arxiv.org/abs/2311.13534) :fire: - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Technical Report](https://arxiv.org/pdf/2310.07554.pdf) - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3#usage) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3) | Multi-Functionality(dense retrieval, sparse retrieval, multi-vector(colbert)), Multi-Linguality, and Multi-Granularity(8192 tokens) | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. Refer to this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) for the fine-tuning for reranker </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` #### Usage reranker with the ONNX files ```python from optimum.onnxruntime import ORTModelForSequenceClassification # type: ignore import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-base') model_ort = ORTModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-base', file_name="onnx/model.onnx") # Sentences we want sentence embeddings for pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] # Tokenize sentences encoded_input = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt') scores_ort = model_ort(**encoded_input, return_dict=True).logits.view(-1, ).float() # Compute token embeddings with torch.inference_mode(): scores = model_ort(**encoded_input, return_dict=True).logits.view(-1, ).float() # scores and scores_ort are identical ``` #### Usage reranker with infinity Its also possible to deploy the onnx/torch files with the [infinity_emb](https://github.com/michaelfeil/infinity) pip package. ```python import asyncio from infinity_emb import AsyncEmbeddingEngine, EngineArgs query='what is a panda?' docs = ['The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear', "Paris is in France."] engine = AsyncEmbeddingEngine.from_args( EngineArgs(model_name_or_path = "BAAI/bge-reranker-base", device="cpu", engine="torch" # or engine="optimum" for onnx )) async def main(): async with engine: ranking, usage = await engine.rerank(query=query, docs=docs) print(list(zip(ranking, docs))) asyncio.run(main()) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR" ]
BAAI/bge-reranker-large
BAAI
feature-extraction
[ "transformers", "pytorch", "onnx", "safetensors", "xlm-roberta", "text-classification", "mteb", "feature-extraction", "en", "zh", "arxiv:2401.03462", "arxiv:2312.15503", "arxiv:2311.13534", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-09-12T07:39:18Z"
2024-05-11T13:39:02+00:00
917,242
383
--- language: - en - zh license: mit pipeline_tag: feature-extraction tags: - mteb model-index: - name: bge-reranker-base results: - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: None metrics: - type: map value: 81.27206722525007 - type: mrr value: 84.14238095238095 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: None metrics: - type: map value: 84.10369934291236 - type: mrr value: 86.79376984126984 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 35.4600511272538 - type: mrr value: 34.60238095238095 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: None metrics: - type: map value: 67.27728847727172 - type: mrr value: 77.1315192743764 --- **We have updated the [new reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_reranker), supporting larger lengths, more languages, and achieving better performance.** <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> **More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding).** [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding focuses on retrieval-augmented LLMs, consisting of the following projects currently: - **Long-Context LLM**: [Activation Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon) - **Fine-tuning of LM** : [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail) - **Embedding Model**: [Visualized-BGE](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/visual), [BGE-M3](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3), [LLM Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), [BGE Embedding](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/baai_general_embedding) - **Reranker Model**: [llm rerankers](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_reranker), [BGE Reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) - **Benchmark**: [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) ## News - 3/18/2024: Release new [rerankers](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_reranker), built upon powerful M3 and LLM (GEMMA and MiniCPM, not so large actually) backbones, supporitng multi-lingual processing and larger inputs, massive improvements of ranking performances on BEIR, C-MTEB/Retrieval, MIRACL, LlamaIndex Evaluation. - 3/18/2024: Release [Visualized-BGE](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/visual), equipping BGE with visual capabilities. Visualized-BGE can be utilized to generate embeddings for hybrid image-text data. - 1/30/2024: Release **BGE-M3**, a new member to BGE model series! M3 stands for **M**ulti-linguality (100+ languages), **M**ulti-granularities (input length up to 8192), **M**ulti-Functionality (unification of dense, lexical, multi-vec/colbert retrieval). It is the first embedding model which supports all three retrieval methods, achieving new SOTA on multi-lingual (MIRACL) and cross-lingual (MKQA) benchmarks. [Technical Report](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/BGE_M3/BGE_M3.pdf) and [Code](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3). :fire: - 1/9/2024: Release [Activation-Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon), an effective, efficient, compatible, and low-cost (training) method to extend the context length of LLM. [Technical Report](https://arxiv.org/abs/2401.03462) :fire: - 12/24/2023: Release **LLaRA**, a LLaMA-7B based dense retriever, leading to state-of-the-art performances on MS MARCO and BEIR. Model and code will be open-sourced. Please stay tuned. [Technical Report](https://arxiv.org/abs/2312.15503) - 11/23/2023: Release [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail), a method to maintain general capabilities during fine-tuning by merging multiple language models. [Technical Report](https://arxiv.org/abs/2311.13534) :fire: - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Technical Report](https://arxiv.org/pdf/2310.07554.pdf) - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3#usage) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3) | Multi-Functionality(dense retrieval, sparse retrieval, multi-vector(colbert)), Multi-Linguality, and Multi-Granularity(8192 tokens) | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. Refer to this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) for the fine-tuning for reranker </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` #### Usage reranker with the ONNX files ```python from optimum.onnxruntime import ORTModelForSequenceClassification # type: ignore import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-base') model_ort = ORTModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-base', file_name="onnx/model.onnx") # Sentences we want sentence embeddings for pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] # Tokenize sentences encoded_input = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt') scores_ort = model_ort(**encoded_input, return_dict=True).logits.view(-1, ).float() # Compute token embeddings with torch.inference_mode(): scores = model_ort(**encoded_input, return_dict=True).logits.view(-1, ).float() # scores and scores_ort are identical ``` #### Usage reranker with infinity Its also possible to deploy the onnx/torch files with the [infinity_emb](https://github.com/michaelfeil/infinity) pip package. ```python import asyncio from infinity_emb import AsyncEmbeddingEngine, EngineArgs query='what is a panda?' docs = ['The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear', "Paris is in France."] engine = AsyncEmbeddingEngine.from_args( EngineArgs(model_name_or_path = "BAAI/bge-reranker-base", device="cpu", engine="torch" # or engine="optimum" for onnx )) async def main(): async with engine: ranking, usage = await engine.rerank(query=query, docs=docs) print(list(zip(ranking, docs))) asyncio.run(main()) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR" ]
microsoft/Phi-3-mini-4k-instruct
microsoft
text-generation
[ "transformers", "safetensors", "phi3", "text-generation", "nlp", "code", "conversational", "custom_code", "en", "fr", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2024-04-22T16:18:17Z"
2024-09-20T18:09:38+00:00
903,680
1,154
--- language: - en - fr license: mit license_link: https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/resolve/main/LICENSE pipeline_tag: text-generation tags: - nlp - code inference: parameters: temperature: 0 widget: - messages: - role: user content: Can you provide ways to eat combinations of bananas and dragonfruits? --- 🎉 **Phi-3.5**: [[mini-instruct]](https://huggingface.co/microsoft/Phi-3.5-mini-instruct); [[MoE-instruct]](https://huggingface.co/microsoft/Phi-3.5-MoE-instruct) ; [[vision-instruct]](https://huggingface.co/microsoft/Phi-3.5-vision-instruct) ## Model Summary The Phi-3-Mini-4K-Instruct is a 3.8B parameters, lightweight, state-of-the-art open model trained with the Phi-3 datasets that includes both synthetic data and the filtered publicly available websites data with a focus on high-quality and reasoning dense properties. The model belongs to the Phi-3 family with the Mini version in two variants [4K](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) and [128K](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) which is the context length (in tokens) that it can support. The model has underwent a post-training process that incorporates both supervised fine-tuning and direct preference optimization for the instruction following and safety measures. When assessed against benchmarks testing common sense, language understanding, math, code, long context and logical reasoning, Phi-3 Mini-4K-Instruct showcased a robust and state-of-the-art performance among models with less than 13 billion parameters. Resources and Technical Documentation: 🏡 [Phi-3 Portal](https://azure.microsoft.com/en-us/products/phi-3) <br> 📰 [Phi-3 Microsoft Blog](https://aka.ms/Phi-3Build2024) <br> 📖 [Phi-3 Technical Report](https://aka.ms/phi3-tech-report) <br> 🛠️ [Phi-3 on Azure AI Studio](https://aka.ms/phi3-azure-ai) <br> 👩‍🍳 [Phi-3 Cookbook](https://github.com/microsoft/Phi-3CookBook) <br> 🖥️ [Try It](https://aka.ms/try-phi3) | | Short Context | Long Context | | :------- | :------------- | :------------ | | Mini | 4K [[HF]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-onnx) ; [[GGUF]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-gguf) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct-onnx)| | Small | 8K [[HF]](https://huggingface.co/microsoft/Phi-3-small-8k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-small-8k-instruct-onnx-cuda) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-small-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-small-128k-instruct-onnx-cuda)| | Medium | 4K [[HF]](https://huggingface.co/microsoft/Phi-3-medium-4k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-medium-4k-instruct-onnx-cuda) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-medium-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-medium-128k-instruct-onnx-cuda)| | Vision | | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-vision-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-vision-128k-instruct-onnx-cuda)| ## Intended Uses **Primary use cases** The model is intended for broad commercial and research use in English. The model provides uses for general purpose AI systems and applications which require 1) memory/compute constrained environments; 2) latency bound scenarios; 3) strong reasoning (especially math and logic). Our model is designed to accelerate research on language and multimodal models, for use as a building block for generative AI powered features. **Out-of-scope use cases** Our models are not specifically designed or evaluated for all downstream purposes. Developers should consider common limitations of language models as they select use cases, and evaluate and mitigate for accuracy, safety, and fairness before using within a specific downstream use case, particularly for high-risk scenarios. Developers should be aware of and adhere to applicable laws or regulations (including privacy, trade compliance laws, etc.) that are relevant to their use case. **Nothing contained in this Model Card should be interpreted as or deemed a restriction or modification to the license the model is released under.** ## Release Notes This is an update over the original instruction-tuned Phi-3-mini release based on valuable customer feedback. The model used additional post-training data leading to substantial gains on instruction following and structure output. We also improve multi-turn conversation quality, explicitly support <|system|> tag, and significantly improve reasoning capability. We believe most use cases will benefit from this release, but we encourage users to test in their particular AI applications. We appreciate the enthusiastic adoption of the Phi-3 model family, and continue to welcome all feedback from the community. The table below highlights improvements on instruction following, structure output, and reasoning of the new release on publich and internal benchmark datasets. | Benchmarks | Original | June 2024 Update | |:------------|:----------|:------------------| | Instruction Extra Hard | 5.7 | 6.0 | | Instruction Hard | 4.9 | 5.1 | | Instructions Challenge | 24.6 | 42.3 | | JSON Structure Output | 11.5 | 52.3 | | XML Structure Output | 14.4 | 49.8 | | GPQA | 23.7 | 30.6 | | MMLU | 68.8 | 70.9 | | **Average** | **21.9** | **36.7** | Notes: if users would like to check out the previous version, use the git commit id **ff07dc01615f8113924aed013115ab2abd32115b**. For the model conversion, e.g. GGUF and other formats, we invite the community to experiment with various approaches and share your valuable feedback. Let's innovate together! ## How to Use Phi-3 Mini-4K-Instruct has been integrated in the `4.41.2` version of `transformers`. The current `transformers` version can be verified with: `pip list | grep transformers`. Examples of required packages: ``` flash_attn==2.5.8 torch==2.3.1 accelerate==0.31.0 transformers==4.41.2 ``` Phi-3 Mini-4K-Instruct is also available in [Azure AI Studio](https://aka.ms/try-phi3) ### Tokenizer Phi-3 Mini-4K-Instruct supports a vocabulary size of up to `32064` tokens. The [tokenizer files](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/blob/main/added_tokens.json) already provide placeholder tokens that can be used for downstream fine-tuning, but they can also be extended up to the model's vocabulary size. ### Chat Format Given the nature of the training data, the Phi-3 Mini-4K-Instruct model is best suited for prompts using the chat format as follows. You can provide the prompt as a question with a generic template as follow: ```markdown <|system|> You are a helpful assistant.<|end|> <|user|> Question?<|end|> <|assistant|> ``` For example: ```markdown <|system|> You are a helpful assistant.<|end|> <|user|> How to explain Internet for a medieval knight?<|end|> <|assistant|> ``` where the model generates the text after `<|assistant|>` . In case of few-shots prompt, the prompt can be formatted as the following: ```markdown <|system|> You are a helpful travel assistant.<|end|> <|user|> I am going to Paris, what should I see?<|end|> <|assistant|> Paris, the capital of France, is known for its stunning architecture, art museums, historical landmarks, and romantic atmosphere. Here are some of the top attractions to see in Paris:\n\n1. The Eiffel Tower: The iconic Eiffel Tower is one of the most recognizable landmarks in the world and offers breathtaking views of the city.\n2. The Louvre Museum: The Louvre is one of the world's largest and most famous museums, housing an impressive collection of art and artifacts, including the Mona Lisa.\n3. Notre-Dame Cathedral: This beautiful cathedral is one of the most famous landmarks in Paris and is known for its Gothic architecture and stunning stained glass windows.\n\nThese are just a few of the many attractions that Paris has to offer. With so much to see and do, it's no wonder that Paris is one of the most popular tourist destinations in the world."<|end|> <|user|> What is so great about #1?<|end|> <|assistant|> ``` ### Sample inference code This code snippets show how to get quickly started with running the model on a GPU: ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline torch.random.manual_seed(0) model = AutoModelForCausalLM.from_pretrained( "microsoft/Phi-3-mini-4k-instruct", device_map="cuda", torch_dtype="auto", trust_remote_code=True, ) tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct") messages = [ {"role": "system", "content": "You are a helpful AI assistant."}, {"role": "user", "content": "Can you provide ways to eat combinations of bananas and dragonfruits?"}, {"role": "assistant", "content": "Sure! Here are some ways to eat bananas and dragonfruits together: 1. Banana and dragonfruit smoothie: Blend bananas and dragonfruits together with some milk and honey. 2. Banana and dragonfruit salad: Mix sliced bananas and dragonfruits together with some lemon juice and honey."}, {"role": "user", "content": "What about solving an 2x + 3 = 7 equation?"}, ] pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, ) generation_args = { "max_new_tokens": 500, "return_full_text": False, "temperature": 0.0, "do_sample": False, } output = pipe(messages, **generation_args) print(output[0]['generated_text']) ``` Note: If you want to use flash attention, call _AutoModelForCausalLM.from_pretrained()_ with _attn_implementation="flash_attention_2"_ ## Responsible AI Considerations Like other language models, the Phi series models can potentially behave in ways that are unfair, unreliable, or offensive. Some of the limiting behaviors to be aware of include: + Quality of Service: the Phi models are trained primarily on English text. Languages other than English will experience worse performance. English language varieties with less representation in the training data might experience worse performance than standard American English. + Representation of Harms & Perpetuation of Stereotypes: These models can over- or under-represent groups of people, erase representation of some groups, or reinforce demeaning or negative stereotypes. Despite safety post-training, these limitations may still be present due to differing levels of representation of different groups or prevalence of examples of negative stereotypes in training data that reflect real-world patterns and societal biases. + Inappropriate or Offensive Content: these models may produce other types of inappropriate or offensive content, which may make it inappropriate to deploy for sensitive contexts without additional mitigations that are specific to the use case. + Information Reliability: Language models can generate nonsensical content or fabricate content that might sound reasonable but is inaccurate or outdated. + Limited Scope for Code: Majority of Phi-3 training data is based in Python and use common packages such as "typing, math, random, collections, datetime, itertools". If the model generates Python scripts that utilize other packages or scripts in other languages, we strongly recommend users manually verify all API uses. Developers should apply responsible AI best practices and are responsible for ensuring that a specific use case complies with relevant laws and regulations (e.g. privacy, trade, etc.). Important areas for consideration include: + Allocation: Models may not be suitable for scenarios that could have consequential impact on legal status or the allocation of resources or life opportunities (ex: housing, employment, credit, etc.) without further assessments and additional debiasing techniques. + High-Risk Scenarios: Developers should assess suitability of using models in high-risk scenarios where unfair, unreliable or offensive outputs might be extremely costly or lead to harm. This includes providing advice in sensitive or expert domains where accuracy and reliability are critical (ex: legal or health advice). Additional safeguards should be implemented at the application level according to the deployment context. + Misinformation: Models may produce inaccurate information. Developers should follow transparency best practices and inform end-users they are interacting with an AI system. At the application level, developers can build feedback mechanisms and pipelines to ground responses in use-case specific, contextual information, a technique known as Retrieval Augmented Generation (RAG). + Generation of Harmful Content: Developers should assess outputs for their context and use available safety classifiers or custom solutions appropriate for their use case. + Misuse: Other forms of misuse such as fraud, spam, or malware production may be possible, and developers should ensure that their applications do not violate applicable laws and regulations. ## Training ### Model * Architecture: Phi-3 Mini-4K-Instruct has 3.8B parameters and is a dense decoder-only Transformer model. The model is fine-tuned with Supervised fine-tuning (SFT) and Direct Preference Optimization (DPO) to ensure alignment with human preferences and safety guidlines. * Inputs: Text. It is best suited for prompts using chat format. * Context length: 4K tokens * GPUs: 512 H100-80G * Training time: 10 days * Training data: 4.9T tokens * Outputs: Generated text in response to the input * Dates: Our models were trained between May and June 2024 * Status: This is a static model trained on an offline dataset with cutoff date October 2023. Future versions of the tuned models may be released as we improve models. * Release dates: June, 2024. ### Datasets Our training data includes a wide variety of sources, totaling 4.9 trillion tokens, and is a combination of 1) Publicly available documents filtered rigorously for quality, selected high-quality educational data, and code; 2) Newly created synthetic, “textbook-like” data for the purpose of teaching math, coding, common sense reasoning, general knowledge of the world (science, daily activities, theory of mind, etc.); 3) High quality chat format supervised data covering various topics to reflect human preferences on different aspects such as instruct-following, truthfulness, honesty and helpfulness. We are focusing on the quality of data that could potentially improve the reasoning ability for the model, and we filter the publicly available documents to contain the correct level of knowledge. As an example, the result of a game in premier league in a particular day might be good training data for frontier models, but we need to remove such information to leave more model capacity for reasoning for the small size models. More details about data can be found in the [Phi-3 Technical Report](https://aka.ms/phi3-tech-report). ### Fine-tuning A basic example of multi-GPUs supervised fine-tuning (SFT) with TRL and Accelerate modules is provided [here](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/resolve/main/sample_finetune.py). ## Benchmarks We report the results under completion format for Phi-3-Mini-4K-Instruct on standard open-source benchmarks measuring the model's reasoning ability (both common sense reasoning and logical reasoning). We compare to Mistral-7b-v0.1, Mixtral-8x7b, Gemma 7B, Llama-3-8B-Instruct, and GPT3.5-Turbo-1106. All the reported numbers are produced with the exact same pipeline to ensure that the numbers are comparable. These numbers might differ from other published numbers due to slightly different choices in the evaluation. As is now standard, we use few-shot prompts to evaluate the models, at temperature 0. The prompts and number of shots are part of a Microsoft internal tool to evaluate language models, and in particular we did no optimization to the pipeline for Phi-3. More specifically, we do not change prompts, pick different few-shot examples, change prompt format, or do any other form of optimization for the model. The number of k–shot examples is listed per-benchmark. | Category | Benchmark | Phi-3-Mini-4K-Ins | Gemma-7B | Mistral-7b | Mixtral-8x7b | Llama-3-8B-Ins | GPT3.5-Turbo-1106 | |:----------|:-----------|:-------------------|:----------|:------------|:--------------|:----------------|:-------------------| | Popular aggregated benchmark | AGI Eval <br>5-shot| 39.0 | 42.1 | 35.1 | 45.2 | 42 | 48.4 | | | MMLU <br>5-shot | 70.9 | 63.6 | 61.7 | 70.5 | 66.5 | 71.4 | | | BigBench Hard CoT<br>3-shot| 73.5 | 59.6 | 57.3 | 69.7 | 51.5 | 68.3 | | Language Understanding | ANLI <br>7-shot | 53.6 | 48.7 | 47.1 | 55.2 | 57.3 | 58.1 | | | HellaSwag <br>5-shot| 75.3 | 49.8 | 58.5 | 70.4 | 71.1 | 78.8 | | Reasoning | ARC Challenge <br>10-shot | 86.3 | 78.3 | 78.6 | 87.3 | 82.8 | 87.4 | | | BoolQ <br>0-shot | 78.1 | 66 | 72.2 | 76.6 | 80.9 | 79.1 | | | MedQA <br>2-shot| 56.5 | 49.6 | 50 | 62.2 | 60.5 | 63.4 | | | OpenBookQA <br>10-shot| 82.2 | 78.6 | 79.8 | 85.8 | 82.6 | 86 | | | PIQA <br>5-shot| 83.5 | 78.1 | 77.7 | 86 | 75.7 | 86.6 | | | GPQA <br>0-shot| 30.6 | 2.9 | 15 | 6.9 | 32.4 | 30.8 | | | Social IQA <br>5-shot| 77.6 | 65.5 | 74.6 | 75.9 | 73.9 | 68.3 | | | TruthfulQA (MC2) <br>10-shot| 64.7 | 52.1 | 53 | 60.1 | 63.2 | 67.7 | | | WinoGrande <br>5-shot| 71.6 | 55.6 | 54.2 | 62 | 65 | 68.8 | | Factual Knowledge | TriviaQA <br>5-shot| 61.4 | 72.3 | 75.2 | 82.2 | 67.7 | 85.8 | | Math | GSM8K CoT <br>8-shot| 85.7 | 59.8 | 46.4 | 64.7 | 77.4 | 78.1 | | Code Generation | HumanEval <br>0-shot| 57.3 | 34.1 | 28.0 | 37.8 | 60.4 | 62.2 | | | MBPP <br>3-shot| 69.8 | 51.5 | 50.8 | 60.2 | 67.7 | 77.8 | | **Average** | | **67.6** | **56.0** | **56.4** | **64.4** | **65.5** | **70.4** | We take a closer look at different categories across 100 public benchmark datasets at the table below: | Category | Phi-3-Mini-4K-Instruct | Gemma-7B | Mistral-7B | Mixtral 8x7B | Llama-3-8B-Instruct | GPT-3.5-Turbo | |:----------|:------------------------|:----------|:------------|:--------------|:---------------------|:---------------| | Popular aggregated benchmark | 61.1 | 59.4 | 56.5 | 66.2 | 59.9 | 67.0 | | Reasoning | 70.8 | 60.3 | 62.8 | 68.1 | 69.6 | 71.8 | | Language understanding | 60.5 | 57.6 | 52.5 | 66.1 | 63.2 | 67.7 | | Code generation | 60.7 | 45.6 | 42.9 | 52.7 | 56.4 | 70.4 | | Math | 50.6 | 35.8 | 25.4 | 40.3 | 41.1 | 52.8 | | Factual knowledge | 38.4 | 46.7 | 49.8 | 58.6 | 43.1 | 63.4 | | Multilingual | 56.7 | 66.5 | 57.4 | 66.7 | 66.6 | 71.0 | | Robustness | 61.1 | 38.4 | 40.6 | 51.0 | 64.5 | 69.3 | Overall, the model with only 3.8B-param achieves a similar level of language understanding and reasoning ability as much larger models. However, it is still fundamentally limited by its size for certain tasks. The model simply does not have the capacity to store too much world knowledge, which can be seen for example with low performance on TriviaQA. However, we believe such weakness can be resolved by augmenting Phi-3-Mini with a search engine. ## Cross Platform Support [ONNX runtime](https://onnxruntime.ai/blogs/accelerating-phi-3) now supports Phi-3 mini models across platforms and hardware. Optimized phi-3 models are also published here in ONNX format, to run with ONNX Runtime on CPU and GPU across devices, including server platforms, Windows, Linux and Mac desktops, and mobile CPUs, with the precision best suited to each of these targets. DirectML GPU acceleration is supported for Windows desktops GPUs (AMD, Intel, and NVIDIA). Along with DML, ONNX Runtime provides cross platform support for Phi3 mini across a range of devices CPU, GPU, and mobile. Here are some of the optimized configurations we have added: 1. ONNX models for int4 DML: Quantized to int4 via AWQ 2. ONNX model for fp16 CUDA 3. ONNX model for int4 CUDA: Quantized to int4 via RTN 4. ONNX model for int4 CPU and Mobile: Quantized to int4 via R ## Software * [PyTorch](https://github.com/pytorch/pytorch) * [Transformers](https://github.com/huggingface/transformers) * [Flash-Attention](https://github.com/HazyResearch/flash-attention) ## Hardware Note that by default, the Phi-3 Mini-4K-Instruct model uses flash attention, which requires certain types of GPU hardware to run. We have tested on the following GPU types: * NVIDIA A100 * NVIDIA A6000 * NVIDIA H100 If you want to run the model on: * NVIDIA V100 or earlier generation GPUs: call AutoModelForCausalLM.from_pretrained() with attn_implementation="eager" * CPU: use the **GGUF** quantized models [4K](https://aka.ms/Phi3-mini-4k-instruct-gguf) + Optimized inference on GPU, CPU, and Mobile: use the **ONNX** models [4K](https://aka.ms/Phi3-mini-4k-instruct-onnx) ## License The model is licensed under the [MIT license](https://huggingface.co/microsoft/Phi-3-mini-4k/resolve/main/LICENSE). ## Trademarks This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow [Microsoft’s Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party’s policies.
[ "MEDQA" ]
Xenova/bge-base-en-v1.5
Xenova
feature-extraction
[ "transformers.js", "onnx", "bert", "feature-extraction", "base_model:BAAI/bge-base-en-v1.5", "base_model:quantized:BAAI/bge-base-en-v1.5", "license:mit", "region:us" ]
"2023-09-13T15:48:03Z"
2024-12-02T23:46:44+00:00
890,755
7
--- base_model: BAAI/bge-base-en-v1.5 library_name: transformers.js license: mit --- https://huggingface.co/BAAI/bge-base-en-v1.5 with ONNX weights to be compatible with Transformers.js. ## Usage (Transformers.js) If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@huggingface/transformers) using: ```bash npm i @huggingface/transformers ``` You can then use the model to compute embeddings, as follows: ```js import { pipeline } from '@huggingface/transformers'; // Create a feature-extraction pipeline const extractor = await pipeline('feature-extraction', 'Xenova/bge-base-en-v1.5'); // Compute sentence embeddings const texts = ['Hello world.', 'Example sentence.']; const embeddings = await extractor(texts, { pooling: 'mean', normalize: true }); console.log(embeddings); // Tensor { // dims: [ 2, 768 ], // type: 'float32', // data: Float32Array(1536) [ 0.019079938530921936, 0.041718777269124985, ... ], // size: 1536 // } console.log(embeddings.tolist()); // Convert embeddings to a JavaScript list // [ // [ 0.019079938530921936, 0.041718777269124985, 0.037672195583581924, ... ], // [ 0.020936904475092888, 0.020080938935279846, -0.00787576474249363, ... ] // ] ``` You can also use the model for retrieval. For example: ```js import { pipeline, cos_sim } from '@huggingface/transformers'; // Create a feature-extraction pipeline const extractor = await pipeline('feature-extraction', 'Xenova/bge-small-en-v1.5'); // List of documents you want to embed const texts = [ 'Hello world.', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.', 'I love pandas so much!', ]; // Compute sentence embeddings const embeddings = await extractor(texts, { pooling: 'mean', normalize: true }); // Prepend recommended query instruction for retrieval. const query_prefix = 'Represent this sentence for searching relevant passages: ' const query = query_prefix + 'What is a panda?'; const query_embeddings = await extractor(query, { pooling: 'mean', normalize: true }); // Sort by cosine similarity score const scores = embeddings.tolist().map( (embedding, i) => ({ id: i, score: cos_sim(query_embeddings.data, embedding), text: texts[i], }) ).sort((a, b) => b.score - a.score); console.log(scores); // [ // { id: 1, score: 0.7787772374597298, text: 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.' }, // { id: 2, score: 0.7071589521880506, text: 'I love pandas so much!' }, // { id: 0, score: 0.4252782730390429, text: 'Hello world.' } // ] ``` --- Note: Having a separate repo for ONNX weights is intended to be a temporary solution until WebML gains more traction. If you would like to make your models web-ready, we recommend converting to ONNX using [🤗 Optimum](https://huggingface.co/docs/optimum/index) and structuring your repo like this one (with ONNX weights located in a subfolder named `onnx`).
[ "BEAR" ]
jinaai/jina-embeddings-v2-small-en
jinaai
feature-extraction
[ "sentence-transformers", "pytorch", "coreml", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "mteb", "custom_code", "en", "dataset:jinaai/negation-dataset", "arxiv:2108.12409", "arxiv:2310.19923", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "region:us" ]
"2023-09-27T20:17:27Z"
2025-01-06T16:26:03+00:00
842,417
133
--- datasets: - jinaai/negation-dataset language: en license: apache-2.0 tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb inference: false model-index: - name: jina-embedding-s-en-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.35820895522387 - type: ap value: 33.99931933598115 - type: f1 value: 65.3853685535555 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 82.90140000000001 - type: ap value: 78.01434597815617 - type: f1 value: 82.83357802722676 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.88999999999999 - type: f1 value: 39.209432767163456 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 23.257 - type: map_at_10 value: 37.946000000000005 - type: map_at_100 value: 39.17 - type: map_at_1000 value: 39.181 - type: map_at_3 value: 32.99 - type: map_at_5 value: 35.467999999999996 - type: mrr_at_1 value: 23.541999999999998 - type: mrr_at_10 value: 38.057 - type: mrr_at_100 value: 39.289 - type: mrr_at_1000 value: 39.299 - type: mrr_at_3 value: 33.096 - type: mrr_at_5 value: 35.628 - type: ndcg_at_1 value: 23.257 - type: ndcg_at_10 value: 46.729 - type: ndcg_at_100 value: 51.900999999999996 - type: ndcg_at_1000 value: 52.16 - type: ndcg_at_3 value: 36.323 - type: ndcg_at_5 value: 40.766999999999996 - type: precision_at_1 value: 23.257 - type: precision_at_10 value: 7.510999999999999 - type: precision_at_100 value: 0.976 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 15.339 - type: precision_at_5 value: 11.350999999999999 - type: recall_at_1 value: 23.257 - type: recall_at_10 value: 75.107 - type: recall_at_100 value: 97.58200000000001 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 46.017 - type: recall_at_5 value: 56.757000000000005 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 44.02420878391967 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 35.16136856000258 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 59.61809790513646 - type: mrr value: 73.07215406938397 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 82.0167350090749 - type: cos_sim_spearman value: 80.51569002630401 - type: euclidean_pearson value: 81.46820525099726 - type: euclidean_spearman value: 80.51569002630401 - type: manhattan_pearson value: 81.35596555056757 - type: manhattan_spearman value: 80.12592210903303 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 78.25 - type: f1 value: 77.34950913540605 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.57238596005698 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 29.066444306196683 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 31.891000000000002 - type: map_at_10 value: 42.772 - type: map_at_100 value: 44.108999999999995 - type: map_at_1000 value: 44.236 - type: map_at_3 value: 39.289 - type: map_at_5 value: 41.113 - type: mrr_at_1 value: 39.342 - type: mrr_at_10 value: 48.852000000000004 - type: mrr_at_100 value: 49.534 - type: mrr_at_1000 value: 49.582 - type: mrr_at_3 value: 46.089999999999996 - type: mrr_at_5 value: 47.685 - type: ndcg_at_1 value: 39.342 - type: ndcg_at_10 value: 48.988 - type: ndcg_at_100 value: 53.854 - type: ndcg_at_1000 value: 55.955 - type: ndcg_at_3 value: 43.877 - type: ndcg_at_5 value: 46.027 - type: precision_at_1 value: 39.342 - type: precision_at_10 value: 9.285 - type: precision_at_100 value: 1.488 - type: precision_at_1000 value: 0.194 - type: precision_at_3 value: 20.696 - type: precision_at_5 value: 14.878 - type: recall_at_1 value: 31.891000000000002 - type: recall_at_10 value: 60.608 - type: recall_at_100 value: 81.025 - type: recall_at_1000 value: 94.883 - type: recall_at_3 value: 45.694 - type: recall_at_5 value: 51.684 - type: map_at_1 value: 28.778 - type: map_at_10 value: 37.632 - type: map_at_100 value: 38.800000000000004 - type: map_at_1000 value: 38.934999999999995 - type: map_at_3 value: 35.293 - type: map_at_5 value: 36.547000000000004 - type: mrr_at_1 value: 35.35 - type: mrr_at_10 value: 42.936 - type: mrr_at_100 value: 43.69 - type: mrr_at_1000 value: 43.739 - type: mrr_at_3 value: 41.062 - type: mrr_at_5 value: 42.097 - type: ndcg_at_1 value: 35.35 - type: ndcg_at_10 value: 42.528 - type: ndcg_at_100 value: 46.983000000000004 - type: ndcg_at_1000 value: 49.187999999999995 - type: ndcg_at_3 value: 39.271 - type: ndcg_at_5 value: 40.654 - type: precision_at_1 value: 35.35 - type: precision_at_10 value: 7.828 - type: precision_at_100 value: 1.3010000000000002 - type: precision_at_1000 value: 0.17700000000000002 - type: precision_at_3 value: 18.96 - type: precision_at_5 value: 13.120999999999999 - type: recall_at_1 value: 28.778 - type: recall_at_10 value: 50.775000000000006 - type: recall_at_100 value: 69.66799999999999 - type: recall_at_1000 value: 83.638 - type: recall_at_3 value: 40.757 - type: recall_at_5 value: 44.86 - type: map_at_1 value: 37.584 - type: map_at_10 value: 49.69 - type: map_at_100 value: 50.639 - type: map_at_1000 value: 50.702999999999996 - type: map_at_3 value: 46.61 - type: map_at_5 value: 48.486000000000004 - type: mrr_at_1 value: 43.009 - type: mrr_at_10 value: 52.949999999999996 - type: mrr_at_100 value: 53.618 - type: mrr_at_1000 value: 53.65299999999999 - type: mrr_at_3 value: 50.605999999999995 - type: mrr_at_5 value: 52.095 - type: ndcg_at_1 value: 43.009 - type: ndcg_at_10 value: 55.278000000000006 - type: ndcg_at_100 value: 59.134 - type: ndcg_at_1000 value: 60.528999999999996 - type: ndcg_at_3 value: 50.184 - type: ndcg_at_5 value: 52.919000000000004 - type: precision_at_1 value: 43.009 - type: precision_at_10 value: 8.821 - type: precision_at_100 value: 1.161 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 22.424 - type: precision_at_5 value: 15.436 - type: recall_at_1 value: 37.584 - type: recall_at_10 value: 68.514 - type: recall_at_100 value: 85.099 - type: recall_at_1000 value: 95.123 - type: recall_at_3 value: 55.007 - type: recall_at_5 value: 61.714999999999996 - type: map_at_1 value: 24.7 - type: map_at_10 value: 32.804 - type: map_at_100 value: 33.738 - type: map_at_1000 value: 33.825 - type: map_at_3 value: 30.639 - type: map_at_5 value: 31.781 - type: mrr_at_1 value: 26.328000000000003 - type: mrr_at_10 value: 34.679 - type: mrr_at_100 value: 35.510000000000005 - type: mrr_at_1000 value: 35.577999999999996 - type: mrr_at_3 value: 32.58 - type: mrr_at_5 value: 33.687 - type: ndcg_at_1 value: 26.328000000000003 - type: ndcg_at_10 value: 37.313 - type: ndcg_at_100 value: 42.004000000000005 - type: ndcg_at_1000 value: 44.232 - type: ndcg_at_3 value: 33.076 - type: ndcg_at_5 value: 34.966 - type: precision_at_1 value: 26.328000000000003 - type: precision_at_10 value: 5.627 - type: precision_at_100 value: 0.8410000000000001 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 14.011000000000001 - type: precision_at_5 value: 9.582 - type: recall_at_1 value: 24.7 - type: recall_at_10 value: 49.324 - type: recall_at_100 value: 71.018 - type: recall_at_1000 value: 87.905 - type: recall_at_3 value: 37.7 - type: recall_at_5 value: 42.281 - type: map_at_1 value: 14.350999999999999 - type: map_at_10 value: 21.745 - type: map_at_100 value: 22.731 - type: map_at_1000 value: 22.852 - type: map_at_3 value: 19.245 - type: map_at_5 value: 20.788 - type: mrr_at_1 value: 18.159 - type: mrr_at_10 value: 25.833000000000002 - type: mrr_at_100 value: 26.728 - type: mrr_at_1000 value: 26.802 - type: mrr_at_3 value: 23.383000000000003 - type: mrr_at_5 value: 24.887999999999998 - type: ndcg_at_1 value: 18.159 - type: ndcg_at_10 value: 26.518000000000004 - type: ndcg_at_100 value: 31.473000000000003 - type: ndcg_at_1000 value: 34.576 - type: ndcg_at_3 value: 21.907 - type: ndcg_at_5 value: 24.39 - type: precision_at_1 value: 18.159 - type: precision_at_10 value: 4.938 - type: precision_at_100 value: 0.853 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 10.655000000000001 - type: precision_at_5 value: 7.985 - type: recall_at_1 value: 14.350999999999999 - type: recall_at_10 value: 37.284 - type: recall_at_100 value: 59.11300000000001 - type: recall_at_1000 value: 81.634 - type: recall_at_3 value: 24.753 - type: recall_at_5 value: 30.979 - type: map_at_1 value: 26.978 - type: map_at_10 value: 36.276 - type: map_at_100 value: 37.547000000000004 - type: map_at_1000 value: 37.678 - type: map_at_3 value: 33.674 - type: map_at_5 value: 35.119 - type: mrr_at_1 value: 32.916000000000004 - type: mrr_at_10 value: 41.798 - type: mrr_at_100 value: 42.72 - type: mrr_at_1000 value: 42.778 - type: mrr_at_3 value: 39.493 - type: mrr_at_5 value: 40.927 - type: ndcg_at_1 value: 32.916000000000004 - type: ndcg_at_10 value: 41.81 - type: ndcg_at_100 value: 47.284 - type: ndcg_at_1000 value: 49.702 - type: ndcg_at_3 value: 37.486999999999995 - type: ndcg_at_5 value: 39.597 - type: precision_at_1 value: 32.916000000000004 - type: precision_at_10 value: 7.411 - type: precision_at_100 value: 1.189 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 17.581 - type: precision_at_5 value: 12.397 - type: recall_at_1 value: 26.978 - type: recall_at_10 value: 52.869 - type: recall_at_100 value: 75.78399999999999 - type: recall_at_1000 value: 91.545 - type: recall_at_3 value: 40.717 - type: recall_at_5 value: 46.168 - type: map_at_1 value: 24.641 - type: map_at_10 value: 32.916000000000004 - type: map_at_100 value: 34.165 - type: map_at_1000 value: 34.286 - type: map_at_3 value: 30.335 - type: map_at_5 value: 31.569000000000003 - type: mrr_at_1 value: 30.593999999999998 - type: mrr_at_10 value: 38.448 - type: mrr_at_100 value: 39.299 - type: mrr_at_1000 value: 39.362 - type: mrr_at_3 value: 36.244 - type: mrr_at_5 value: 37.232 - type: ndcg_at_1 value: 30.593999999999998 - type: ndcg_at_10 value: 38.2 - type: ndcg_at_100 value: 43.742 - type: ndcg_at_1000 value: 46.217000000000006 - type: ndcg_at_3 value: 33.925 - type: ndcg_at_5 value: 35.394 - type: precision_at_1 value: 30.593999999999998 - type: precision_at_10 value: 6.895 - type: precision_at_100 value: 1.1320000000000001 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 16.096 - type: precision_at_5 value: 11.05 - type: recall_at_1 value: 24.641 - type: recall_at_10 value: 48.588 - type: recall_at_100 value: 72.841 - type: recall_at_1000 value: 89.535 - type: recall_at_3 value: 36.087 - type: recall_at_5 value: 40.346 - type: map_at_1 value: 24.79425 - type: map_at_10 value: 33.12033333333333 - type: map_at_100 value: 34.221333333333334 - type: map_at_1000 value: 34.3435 - type: map_at_3 value: 30.636583333333338 - type: map_at_5 value: 31.974083333333326 - type: mrr_at_1 value: 29.242416666666664 - type: mrr_at_10 value: 37.11675 - type: mrr_at_100 value: 37.93783333333334 - type: mrr_at_1000 value: 38.003083333333336 - type: mrr_at_3 value: 34.904666666666664 - type: mrr_at_5 value: 36.12916666666667 - type: ndcg_at_1 value: 29.242416666666664 - type: ndcg_at_10 value: 38.03416666666667 - type: ndcg_at_100 value: 42.86674999999999 - type: ndcg_at_1000 value: 45.34550000000001 - type: ndcg_at_3 value: 33.76466666666666 - type: ndcg_at_5 value: 35.668666666666674 - type: precision_at_1 value: 29.242416666666664 - type: precision_at_10 value: 6.589833333333334 - type: precision_at_100 value: 1.0693333333333332 - type: precision_at_1000 value: 0.14641666666666667 - type: precision_at_3 value: 15.430749999999998 - type: precision_at_5 value: 10.833833333333333 - type: recall_at_1 value: 24.79425 - type: recall_at_10 value: 48.582916666666655 - type: recall_at_100 value: 69.88499999999999 - type: recall_at_1000 value: 87.211 - type: recall_at_3 value: 36.625499999999995 - type: recall_at_5 value: 41.553999999999995 - type: map_at_1 value: 22.767 - type: map_at_10 value: 28.450999999999997 - type: map_at_100 value: 29.332 - type: map_at_1000 value: 29.426000000000002 - type: map_at_3 value: 26.379 - type: map_at_5 value: 27.584999999999997 - type: mrr_at_1 value: 25.46 - type: mrr_at_10 value: 30.974 - type: mrr_at_100 value: 31.784000000000002 - type: mrr_at_1000 value: 31.857999999999997 - type: mrr_at_3 value: 28.962 - type: mrr_at_5 value: 30.066 - type: ndcg_at_1 value: 25.46 - type: ndcg_at_10 value: 32.041 - type: ndcg_at_100 value: 36.522 - type: ndcg_at_1000 value: 39.101 - type: ndcg_at_3 value: 28.152 - type: ndcg_at_5 value: 30.03 - type: precision_at_1 value: 25.46 - type: precision_at_10 value: 4.893 - type: precision_at_100 value: 0.77 - type: precision_at_1000 value: 0.107 - type: precision_at_3 value: 11.605 - type: precision_at_5 value: 8.19 - type: recall_at_1 value: 22.767 - type: recall_at_10 value: 40.71 - type: recall_at_100 value: 61.334999999999994 - type: recall_at_1000 value: 80.567 - type: recall_at_3 value: 30.198000000000004 - type: recall_at_5 value: 34.803 - type: map_at_1 value: 16.722 - type: map_at_10 value: 22.794 - type: map_at_100 value: 23.7 - type: map_at_1000 value: 23.822 - type: map_at_3 value: 20.781 - type: map_at_5 value: 22.024 - type: mrr_at_1 value: 20.061999999999998 - type: mrr_at_10 value: 26.346999999999998 - type: mrr_at_100 value: 27.153 - type: mrr_at_1000 value: 27.233 - type: mrr_at_3 value: 24.375 - type: mrr_at_5 value: 25.593 - type: ndcg_at_1 value: 20.061999999999998 - type: ndcg_at_10 value: 26.785999999999998 - type: ndcg_at_100 value: 31.319999999999997 - type: ndcg_at_1000 value: 34.346 - type: ndcg_at_3 value: 23.219 - type: ndcg_at_5 value: 25.107000000000003 - type: precision_at_1 value: 20.061999999999998 - type: precision_at_10 value: 4.78 - type: precision_at_100 value: 0.83 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 10.874 - type: precision_at_5 value: 7.956 - type: recall_at_1 value: 16.722 - type: recall_at_10 value: 35.204 - type: recall_at_100 value: 55.797 - type: recall_at_1000 value: 77.689 - type: recall_at_3 value: 25.245 - type: recall_at_5 value: 30.115 - type: map_at_1 value: 24.842 - type: map_at_10 value: 32.917 - type: map_at_100 value: 33.961000000000006 - type: map_at_1000 value: 34.069 - type: map_at_3 value: 30.595 - type: map_at_5 value: 31.837 - type: mrr_at_1 value: 29.011 - type: mrr_at_10 value: 36.977 - type: mrr_at_100 value: 37.814 - type: mrr_at_1000 value: 37.885999999999996 - type: mrr_at_3 value: 34.966 - type: mrr_at_5 value: 36.043 - type: ndcg_at_1 value: 29.011 - type: ndcg_at_10 value: 37.735 - type: ndcg_at_100 value: 42.683 - type: ndcg_at_1000 value: 45.198 - type: ndcg_at_3 value: 33.650000000000006 - type: ndcg_at_5 value: 35.386 - type: precision_at_1 value: 29.011 - type: precision_at_10 value: 6.259 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 15.329999999999998 - type: precision_at_5 value: 10.541 - type: recall_at_1 value: 24.842 - type: recall_at_10 value: 48.304 - type: recall_at_100 value: 70.04899999999999 - type: recall_at_1000 value: 87.82600000000001 - type: recall_at_3 value: 36.922 - type: recall_at_5 value: 41.449999999999996 - type: map_at_1 value: 24.252000000000002 - type: map_at_10 value: 32.293 - type: map_at_100 value: 33.816 - type: map_at_1000 value: 34.053 - type: map_at_3 value: 29.781999999999996 - type: map_at_5 value: 31.008000000000003 - type: mrr_at_1 value: 29.051 - type: mrr_at_10 value: 36.722 - type: mrr_at_100 value: 37.663000000000004 - type: mrr_at_1000 value: 37.734 - type: mrr_at_3 value: 34.354 - type: mrr_at_5 value: 35.609 - type: ndcg_at_1 value: 29.051 - type: ndcg_at_10 value: 37.775999999999996 - type: ndcg_at_100 value: 43.221 - type: ndcg_at_1000 value: 46.116 - type: ndcg_at_3 value: 33.403 - type: ndcg_at_5 value: 35.118 - type: precision_at_1 value: 29.051 - type: precision_at_10 value: 7.332 - type: precision_at_100 value: 1.49 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 15.415000000000001 - type: precision_at_5 value: 11.107 - type: recall_at_1 value: 24.252000000000002 - type: recall_at_10 value: 47.861 - type: recall_at_100 value: 72.21600000000001 - type: recall_at_1000 value: 90.886 - type: recall_at_3 value: 35.533 - type: recall_at_5 value: 39.959 - type: map_at_1 value: 20.025000000000002 - type: map_at_10 value: 27.154 - type: map_at_100 value: 28.118 - type: map_at_1000 value: 28.237000000000002 - type: map_at_3 value: 25.017 - type: map_at_5 value: 25.832 - type: mrr_at_1 value: 21.627 - type: mrr_at_10 value: 28.884999999999998 - type: mrr_at_100 value: 29.741 - type: mrr_at_1000 value: 29.831999999999997 - type: mrr_at_3 value: 26.741 - type: mrr_at_5 value: 27.628000000000004 - type: ndcg_at_1 value: 21.627 - type: ndcg_at_10 value: 31.436999999999998 - type: ndcg_at_100 value: 36.181000000000004 - type: ndcg_at_1000 value: 38.986 - type: ndcg_at_3 value: 27.025 - type: ndcg_at_5 value: 28.436 - type: precision_at_1 value: 21.627 - type: precision_at_10 value: 5.009 - type: precision_at_100 value: 0.7929999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 11.522 - type: precision_at_5 value: 7.763000000000001 - type: recall_at_1 value: 20.025000000000002 - type: recall_at_10 value: 42.954 - type: recall_at_100 value: 64.67500000000001 - type: recall_at_1000 value: 85.301 - type: recall_at_3 value: 30.892999999999997 - type: recall_at_5 value: 34.288000000000004 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 10.079 - type: map_at_10 value: 16.930999999999997 - type: map_at_100 value: 18.398999999999997 - type: map_at_1000 value: 18.561 - type: map_at_3 value: 14.294 - type: map_at_5 value: 15.579 - type: mrr_at_1 value: 22.606 - type: mrr_at_10 value: 32.513 - type: mrr_at_100 value: 33.463 - type: mrr_at_1000 value: 33.513999999999996 - type: mrr_at_3 value: 29.479 - type: mrr_at_5 value: 31.3 - type: ndcg_at_1 value: 22.606 - type: ndcg_at_10 value: 24.053 - type: ndcg_at_100 value: 30.258000000000003 - type: ndcg_at_1000 value: 33.516 - type: ndcg_at_3 value: 19.721 - type: ndcg_at_5 value: 21.144 - type: precision_at_1 value: 22.606 - type: precision_at_10 value: 7.55 - type: precision_at_100 value: 1.399 - type: precision_at_1000 value: 0.2 - type: precision_at_3 value: 14.701 - type: precision_at_5 value: 11.192 - type: recall_at_1 value: 10.079 - type: recall_at_10 value: 28.970000000000002 - type: recall_at_100 value: 50.805 - type: recall_at_1000 value: 69.378 - type: recall_at_3 value: 18.199 - type: recall_at_5 value: 22.442 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 7.794 - type: map_at_10 value: 15.165999999999999 - type: map_at_100 value: 20.508000000000003 - type: map_at_1000 value: 21.809 - type: map_at_3 value: 11.568000000000001 - type: map_at_5 value: 13.059000000000001 - type: mrr_at_1 value: 56.49999999999999 - type: mrr_at_10 value: 65.90899999999999 - type: mrr_at_100 value: 66.352 - type: mrr_at_1000 value: 66.369 - type: mrr_at_3 value: 64.0 - type: mrr_at_5 value: 65.10000000000001 - type: ndcg_at_1 value: 44.25 - type: ndcg_at_10 value: 32.649 - type: ndcg_at_100 value: 36.668 - type: ndcg_at_1000 value: 43.918 - type: ndcg_at_3 value: 37.096000000000004 - type: ndcg_at_5 value: 34.048 - type: precision_at_1 value: 56.49999999999999 - type: precision_at_10 value: 25.45 - type: precision_at_100 value: 8.055 - type: precision_at_1000 value: 1.7489999999999999 - type: precision_at_3 value: 41.0 - type: precision_at_5 value: 32.85 - type: recall_at_1 value: 7.794 - type: recall_at_10 value: 20.101 - type: recall_at_100 value: 42.448 - type: recall_at_1000 value: 65.88000000000001 - type: recall_at_3 value: 12.753 - type: recall_at_5 value: 15.307 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 44.01 - type: f1 value: 38.659680951114964 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 49.713 - type: map_at_10 value: 61.79 - type: map_at_100 value: 62.28 - type: map_at_1000 value: 62.297000000000004 - type: map_at_3 value: 59.361 - type: map_at_5 value: 60.92100000000001 - type: mrr_at_1 value: 53.405 - type: mrr_at_10 value: 65.79899999999999 - type: mrr_at_100 value: 66.219 - type: mrr_at_1000 value: 66.227 - type: mrr_at_3 value: 63.431000000000004 - type: mrr_at_5 value: 64.98 - type: ndcg_at_1 value: 53.405 - type: ndcg_at_10 value: 68.01899999999999 - type: ndcg_at_100 value: 70.197 - type: ndcg_at_1000 value: 70.571 - type: ndcg_at_3 value: 63.352 - type: ndcg_at_5 value: 66.018 - type: precision_at_1 value: 53.405 - type: precision_at_10 value: 9.119 - type: precision_at_100 value: 1.03 - type: precision_at_1000 value: 0.107 - type: precision_at_3 value: 25.602999999999998 - type: precision_at_5 value: 16.835 - type: recall_at_1 value: 49.713 - type: recall_at_10 value: 83.306 - type: recall_at_100 value: 92.92 - type: recall_at_1000 value: 95.577 - type: recall_at_3 value: 70.798 - type: recall_at_5 value: 77.254 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 15.310000000000002 - type: map_at_10 value: 26.204 - type: map_at_100 value: 27.932000000000002 - type: map_at_1000 value: 28.121000000000002 - type: map_at_3 value: 22.481 - type: map_at_5 value: 24.678 - type: mrr_at_1 value: 29.784 - type: mrr_at_10 value: 39.582 - type: mrr_at_100 value: 40.52 - type: mrr_at_1000 value: 40.568 - type: mrr_at_3 value: 37.114000000000004 - type: mrr_at_5 value: 38.596000000000004 - type: ndcg_at_1 value: 29.784 - type: ndcg_at_10 value: 33.432 - type: ndcg_at_100 value: 40.281 - type: ndcg_at_1000 value: 43.653999999999996 - type: ndcg_at_3 value: 29.612 - type: ndcg_at_5 value: 31.223 - type: precision_at_1 value: 29.784 - type: precision_at_10 value: 9.645 - type: precision_at_100 value: 1.645 - type: precision_at_1000 value: 0.22499999999999998 - type: precision_at_3 value: 20.165 - type: precision_at_5 value: 15.401000000000002 - type: recall_at_1 value: 15.310000000000002 - type: recall_at_10 value: 40.499 - type: recall_at_100 value: 66.643 - type: recall_at_1000 value: 87.059 - type: recall_at_3 value: 27.492 - type: recall_at_5 value: 33.748 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 33.599000000000004 - type: map_at_10 value: 47.347 - type: map_at_100 value: 48.191 - type: map_at_1000 value: 48.263 - type: map_at_3 value: 44.698 - type: map_at_5 value: 46.278999999999996 - type: mrr_at_1 value: 67.19800000000001 - type: mrr_at_10 value: 74.054 - type: mrr_at_100 value: 74.376 - type: mrr_at_1000 value: 74.392 - type: mrr_at_3 value: 72.849 - type: mrr_at_5 value: 73.643 - type: ndcg_at_1 value: 67.19800000000001 - type: ndcg_at_10 value: 56.482 - type: ndcg_at_100 value: 59.694 - type: ndcg_at_1000 value: 61.204 - type: ndcg_at_3 value: 52.43299999999999 - type: ndcg_at_5 value: 54.608000000000004 - type: precision_at_1 value: 67.19800000000001 - type: precision_at_10 value: 11.613999999999999 - type: precision_at_100 value: 1.415 - type: precision_at_1000 value: 0.16199999999999998 - type: precision_at_3 value: 32.726 - type: precision_at_5 value: 21.349999999999998 - type: recall_at_1 value: 33.599000000000004 - type: recall_at_10 value: 58.069 - type: recall_at_100 value: 70.736 - type: recall_at_1000 value: 80.804 - type: recall_at_3 value: 49.088 - type: recall_at_5 value: 53.376000000000005 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 73.64359999999999 - type: ap value: 67.54685976014599 - type: f1 value: 73.55148707559482 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 19.502 - type: map_at_10 value: 30.816 - type: map_at_100 value: 32.007999999999996 - type: map_at_1000 value: 32.067 - type: map_at_3 value: 27.215 - type: map_at_5 value: 29.304000000000002 - type: mrr_at_1 value: 20.072000000000003 - type: mrr_at_10 value: 31.406 - type: mrr_at_100 value: 32.549 - type: mrr_at_1000 value: 32.602 - type: mrr_at_3 value: 27.839000000000002 - type: mrr_at_5 value: 29.926000000000002 - type: ndcg_at_1 value: 20.086000000000002 - type: ndcg_at_10 value: 37.282 - type: ndcg_at_100 value: 43.206 - type: ndcg_at_1000 value: 44.690000000000005 - type: ndcg_at_3 value: 29.932 - type: ndcg_at_5 value: 33.668 - type: precision_at_1 value: 20.086000000000002 - type: precision_at_10 value: 5.961 - type: precision_at_100 value: 0.898 - type: precision_at_1000 value: 0.10200000000000001 - type: precision_at_3 value: 12.856000000000002 - type: precision_at_5 value: 9.596 - type: recall_at_1 value: 19.502 - type: recall_at_10 value: 57.182 - type: recall_at_100 value: 84.952 - type: recall_at_1000 value: 96.34700000000001 - type: recall_at_3 value: 37.193 - type: recall_at_5 value: 46.157 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.96488828089375 - type: f1 value: 93.32119260543482 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 72.4965800273598 - type: f1 value: 49.34896217536082 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.60928043039678 - type: f1 value: 64.34244712074538 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.75453934095493 - type: f1 value: 68.39224867489249 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.862573504920082 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 27.511123551196803 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.99145104942086 - type: mrr value: 32.03606480418627 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.015 - type: map_at_10 value: 11.054 - type: map_at_100 value: 13.773 - type: map_at_1000 value: 15.082999999999998 - type: map_at_3 value: 8.253 - type: map_at_5 value: 9.508999999999999 - type: mrr_at_1 value: 42.105 - type: mrr_at_10 value: 50.44499999999999 - type: mrr_at_100 value: 51.080000000000005 - type: mrr_at_1000 value: 51.129999999999995 - type: mrr_at_3 value: 48.555 - type: mrr_at_5 value: 49.84 - type: ndcg_at_1 value: 40.402 - type: ndcg_at_10 value: 30.403000000000002 - type: ndcg_at_100 value: 28.216 - type: ndcg_at_1000 value: 37.021 - type: ndcg_at_3 value: 35.53 - type: ndcg_at_5 value: 33.202999999999996 - type: precision_at_1 value: 42.105 - type: precision_at_10 value: 22.353 - type: precision_at_100 value: 7.266 - type: precision_at_1000 value: 2.011 - type: precision_at_3 value: 32.921 - type: precision_at_5 value: 28.297 - type: recall_at_1 value: 5.015 - type: recall_at_10 value: 14.393 - type: recall_at_100 value: 28.893 - type: recall_at_1000 value: 60.18 - type: recall_at_3 value: 9.184000000000001 - type: recall_at_5 value: 11.39 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 29.524 - type: map_at_10 value: 44.182 - type: map_at_100 value: 45.228 - type: map_at_1000 value: 45.265 - type: map_at_3 value: 39.978 - type: map_at_5 value: 42.482 - type: mrr_at_1 value: 33.256 - type: mrr_at_10 value: 46.661 - type: mrr_at_100 value: 47.47 - type: mrr_at_1000 value: 47.496 - type: mrr_at_3 value: 43.187999999999995 - type: mrr_at_5 value: 45.330999999999996 - type: ndcg_at_1 value: 33.227000000000004 - type: ndcg_at_10 value: 51.589 - type: ndcg_at_100 value: 56.043 - type: ndcg_at_1000 value: 56.937000000000005 - type: ndcg_at_3 value: 43.751 - type: ndcg_at_5 value: 47.937000000000005 - type: precision_at_1 value: 33.227000000000004 - type: precision_at_10 value: 8.556999999999999 - type: precision_at_100 value: 1.103 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 19.921 - type: precision_at_5 value: 14.396999999999998 - type: recall_at_1 value: 29.524 - type: recall_at_10 value: 71.615 - type: recall_at_100 value: 91.056 - type: recall_at_1000 value: 97.72800000000001 - type: recall_at_3 value: 51.451 - type: recall_at_5 value: 61.119 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 69.596 - type: map_at_10 value: 83.281 - type: map_at_100 value: 83.952 - type: map_at_1000 value: 83.97200000000001 - type: map_at_3 value: 80.315 - type: map_at_5 value: 82.223 - type: mrr_at_1 value: 80.17 - type: mrr_at_10 value: 86.522 - type: mrr_at_100 value: 86.644 - type: mrr_at_1000 value: 86.64500000000001 - type: mrr_at_3 value: 85.438 - type: mrr_at_5 value: 86.21799999999999 - type: ndcg_at_1 value: 80.19 - type: ndcg_at_10 value: 87.19 - type: ndcg_at_100 value: 88.567 - type: ndcg_at_1000 value: 88.70400000000001 - type: ndcg_at_3 value: 84.17999999999999 - type: ndcg_at_5 value: 85.931 - type: precision_at_1 value: 80.19 - type: precision_at_10 value: 13.209000000000001 - type: precision_at_100 value: 1.518 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 36.717 - type: precision_at_5 value: 24.248 - type: recall_at_1 value: 69.596 - type: recall_at_10 value: 94.533 - type: recall_at_100 value: 99.322 - type: recall_at_1000 value: 99.965 - type: recall_at_3 value: 85.911 - type: recall_at_5 value: 90.809 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 49.27650627571912 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 57.08550946534183 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.568 - type: map_at_10 value: 10.862 - type: map_at_100 value: 12.757 - type: map_at_1000 value: 13.031 - type: map_at_3 value: 7.960000000000001 - type: map_at_5 value: 9.337 - type: mrr_at_1 value: 22.5 - type: mrr_at_10 value: 32.6 - type: mrr_at_100 value: 33.603 - type: mrr_at_1000 value: 33.672000000000004 - type: mrr_at_3 value: 29.299999999999997 - type: mrr_at_5 value: 31.25 - type: ndcg_at_1 value: 22.5 - type: ndcg_at_10 value: 18.605 - type: ndcg_at_100 value: 26.029999999999998 - type: ndcg_at_1000 value: 31.256 - type: ndcg_at_3 value: 17.873 - type: ndcg_at_5 value: 15.511 - type: precision_at_1 value: 22.5 - type: precision_at_10 value: 9.58 - type: precision_at_100 value: 2.033 - type: precision_at_1000 value: 0.33 - type: precision_at_3 value: 16.633 - type: precision_at_5 value: 13.54 - type: recall_at_1 value: 4.568 - type: recall_at_10 value: 19.402 - type: recall_at_100 value: 41.277 - type: recall_at_1000 value: 66.963 - type: recall_at_3 value: 10.112 - type: recall_at_5 value: 13.712 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.31992291680787 - type: cos_sim_spearman value: 76.7212346922664 - type: euclidean_pearson value: 80.42189271706478 - type: euclidean_spearman value: 76.7212342532493 - type: manhattan_pearson value: 80.33171093031578 - type: manhattan_spearman value: 76.63192883074694 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.16654278886763 - type: cos_sim_spearman value: 73.66390263429565 - type: euclidean_pearson value: 79.7485360086639 - type: euclidean_spearman value: 73.66389870373436 - type: manhattan_pearson value: 79.73652237443706 - type: manhattan_spearman value: 73.65296117151647 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 82.40389689929246 - type: cos_sim_spearman value: 83.29727595993955 - type: euclidean_pearson value: 82.23970587854079 - type: euclidean_spearman value: 83.29727595993955 - type: manhattan_pearson value: 82.18823600831897 - type: manhattan_spearman value: 83.20746192209594 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 81.73505246913413 - type: cos_sim_spearman value: 79.1686548248754 - type: euclidean_pearson value: 80.48889135993412 - type: euclidean_spearman value: 79.16864112930354 - type: manhattan_pearson value: 80.40720651057302 - type: manhattan_spearman value: 79.0640155089286 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.3953512879065 - type: cos_sim_spearman value: 87.29947322714338 - type: euclidean_pearson value: 86.59759438529645 - type: euclidean_spearman value: 87.29947511092824 - type: manhattan_pearson value: 86.52097806169155 - type: manhattan_spearman value: 87.22987242146534 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.48565753792056 - type: cos_sim_spearman value: 83.6049720319893 - type: euclidean_pearson value: 82.56452023172913 - type: euclidean_spearman value: 83.60490168191697 - type: manhattan_pearson value: 82.58079941137872 - type: manhattan_spearman value: 83.60975807374051 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.18239976618212 - type: cos_sim_spearman value: 88.23061724730616 - type: euclidean_pearson value: 87.78482472776658 - type: euclidean_spearman value: 88.23061724730616 - type: manhattan_pearson value: 87.75059641730239 - type: manhattan_spearman value: 88.22527413524622 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.42816418706765 - type: cos_sim_spearman value: 63.4569864520124 - type: euclidean_pearson value: 64.35405409953853 - type: euclidean_spearman value: 63.4569864520124 - type: manhattan_pearson value: 63.96649236073056 - type: manhattan_spearman value: 63.01448583722708 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 83.41659638047614 - type: cos_sim_spearman value: 84.03893866106175 - type: euclidean_pearson value: 84.2251203953798 - type: euclidean_spearman value: 84.03893866106175 - type: manhattan_pearson value: 84.22733643205514 - type: manhattan_spearman value: 84.06504411263612 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 79.75608022582414 - type: mrr value: 94.0947732369301 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 50.161 - type: map_at_10 value: 59.458999999999996 - type: map_at_100 value: 60.156 - type: map_at_1000 value: 60.194 - type: map_at_3 value: 56.45400000000001 - type: map_at_5 value: 58.165 - type: mrr_at_1 value: 53.333 - type: mrr_at_10 value: 61.050000000000004 - type: mrr_at_100 value: 61.586 - type: mrr_at_1000 value: 61.624 - type: mrr_at_3 value: 58.889 - type: mrr_at_5 value: 60.122 - type: ndcg_at_1 value: 53.333 - type: ndcg_at_10 value: 63.888999999999996 - type: ndcg_at_100 value: 66.963 - type: ndcg_at_1000 value: 68.062 - type: ndcg_at_3 value: 59.01 - type: ndcg_at_5 value: 61.373999999999995 - type: precision_at_1 value: 53.333 - type: precision_at_10 value: 8.633000000000001 - type: precision_at_100 value: 1.027 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 23.111 - type: precision_at_5 value: 15.467 - type: recall_at_1 value: 50.161 - type: recall_at_10 value: 75.922 - type: recall_at_100 value: 90.0 - type: recall_at_1000 value: 98.667 - type: recall_at_3 value: 62.90599999999999 - type: recall_at_5 value: 68.828 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.81188118811882 - type: cos_sim_ap value: 95.11619225962413 - type: cos_sim_f1 value: 90.35840484603736 - type: cos_sim_precision value: 91.23343527013252 - type: cos_sim_recall value: 89.5 - type: dot_accuracy value: 99.81188118811882 - type: dot_ap value: 95.11619225962413 - type: dot_f1 value: 90.35840484603736 - type: dot_precision value: 91.23343527013252 - type: dot_recall value: 89.5 - type: euclidean_accuracy value: 99.81188118811882 - type: euclidean_ap value: 95.11619225962413 - type: euclidean_f1 value: 90.35840484603736 - type: euclidean_precision value: 91.23343527013252 - type: euclidean_recall value: 89.5 - type: manhattan_accuracy value: 99.80891089108911 - type: manhattan_ap value: 95.07294266220966 - type: manhattan_f1 value: 90.21794221996959 - type: manhattan_precision value: 91.46968139773895 - type: manhattan_recall value: 89.0 - type: max_accuracy value: 99.81188118811882 - type: max_ap value: 95.11619225962413 - type: max_f1 value: 90.35840484603736 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 55.3481874105239 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.421291695525 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.98746633276634 - type: mrr value: 50.63143249724133 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.009961979844036 - type: cos_sim_spearman value: 30.558416108881044 - type: dot_pearson value: 31.009964941134253 - type: dot_spearman value: 30.545760761761393 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.207 - type: map_at_10 value: 1.6 - type: map_at_100 value: 8.594 - type: map_at_1000 value: 20.213 - type: map_at_3 value: 0.585 - type: map_at_5 value: 0.9039999999999999 - type: mrr_at_1 value: 78.0 - type: mrr_at_10 value: 87.4 - type: mrr_at_100 value: 87.4 - type: mrr_at_1000 value: 87.4 - type: mrr_at_3 value: 86.667 - type: mrr_at_5 value: 87.06700000000001 - type: ndcg_at_1 value: 73.0 - type: ndcg_at_10 value: 65.18 - type: ndcg_at_100 value: 49.631 - type: ndcg_at_1000 value: 43.498999999999995 - type: ndcg_at_3 value: 71.83800000000001 - type: ndcg_at_5 value: 69.271 - type: precision_at_1 value: 78.0 - type: precision_at_10 value: 69.19999999999999 - type: precision_at_100 value: 50.980000000000004 - type: precision_at_1000 value: 19.426 - type: precision_at_3 value: 77.333 - type: precision_at_5 value: 74.0 - type: recall_at_1 value: 0.207 - type: recall_at_10 value: 1.822 - type: recall_at_100 value: 11.849 - type: recall_at_1000 value: 40.492 - type: recall_at_3 value: 0.622 - type: recall_at_5 value: 0.9809999999999999 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.001 - type: map_at_10 value: 10.376000000000001 - type: map_at_100 value: 16.936999999999998 - type: map_at_1000 value: 18.615000000000002 - type: map_at_3 value: 5.335999999999999 - type: map_at_5 value: 7.374 - type: mrr_at_1 value: 20.408 - type: mrr_at_10 value: 38.29 - type: mrr_at_100 value: 39.33 - type: mrr_at_1000 value: 39.347 - type: mrr_at_3 value: 32.993 - type: mrr_at_5 value: 36.973 - type: ndcg_at_1 value: 17.347 - type: ndcg_at_10 value: 23.515 - type: ndcg_at_100 value: 37.457 - type: ndcg_at_1000 value: 49.439 - type: ndcg_at_3 value: 22.762999999999998 - type: ndcg_at_5 value: 22.622 - type: precision_at_1 value: 20.408 - type: precision_at_10 value: 22.448999999999998 - type: precision_at_100 value: 8.184 - type: precision_at_1000 value: 1.608 - type: precision_at_3 value: 25.85 - type: precision_at_5 value: 25.306 - type: recall_at_1 value: 2.001 - type: recall_at_10 value: 17.422 - type: recall_at_100 value: 51.532999999999994 - type: recall_at_1000 value: 87.466 - type: recall_at_3 value: 6.861000000000001 - type: recall_at_5 value: 10.502 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.54419999999999 - type: ap value: 14.372170450843907 - type: f1 value: 54.94420257390529 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.402942840973395 - type: f1 value: 59.4166538875571 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 41.569064336457906 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.31322644096085 - type: cos_sim_ap value: 72.14518894837381 - type: cos_sim_f1 value: 66.67489813557229 - type: cos_sim_precision value: 62.65954977953121 - type: cos_sim_recall value: 71.2401055408971 - type: dot_accuracy value: 85.31322644096085 - type: dot_ap value: 72.14521480685293 - type: dot_f1 value: 66.67489813557229 - type: dot_precision value: 62.65954977953121 - type: dot_recall value: 71.2401055408971 - type: euclidean_accuracy value: 85.31322644096085 - type: euclidean_ap value: 72.14520820485349 - type: euclidean_f1 value: 66.67489813557229 - type: euclidean_precision value: 62.65954977953121 - type: euclidean_recall value: 71.2401055408971 - type: manhattan_accuracy value: 85.21785778148656 - type: manhattan_ap value: 72.01177147657364 - type: manhattan_f1 value: 66.62594673833374 - type: manhattan_precision value: 62.0336669699727 - type: manhattan_recall value: 71.95250659630607 - type: max_accuracy value: 85.31322644096085 - type: max_ap value: 72.14521480685293 - type: max_f1 value: 66.67489813557229 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.12756626693057 - type: cos_sim_ap value: 86.05430786440826 - type: cos_sim_f1 value: 78.27759692216631 - type: cos_sim_precision value: 75.33466248931929 - type: cos_sim_recall value: 81.45980905451185 - type: dot_accuracy value: 89.12950673341872 - type: dot_ap value: 86.05431161145492 - type: dot_f1 value: 78.27759692216631 - type: dot_precision value: 75.33466248931929 - type: dot_recall value: 81.45980905451185 - type: euclidean_accuracy value: 89.12756626693057 - type: euclidean_ap value: 86.05431303247397 - type: euclidean_f1 value: 78.27759692216631 - type: euclidean_precision value: 75.33466248931929 - type: euclidean_recall value: 81.45980905451185 - type: manhattan_accuracy value: 89.04994760740482 - type: manhattan_ap value: 86.00860610892074 - type: manhattan_f1 value: 78.1846776005392 - type: manhattan_precision value: 76.10438839480975 - type: manhattan_recall value: 80.3818909762858 - type: max_accuracy value: 89.12950673341872 - type: max_ap value: 86.05431303247397 - type: max_f1 value: 78.27759692216631 --- <!-- TODO: add evaluation results here --> <br><br> <p align="center"> <img src="https://huggingface.co/datasets/jinaai/documentation-images/resolve/main/logo.webp" alt="Jina AI: Your Search Foundation, Supercharged!" width="150px"> </p> <p align="center"> <b>The text embedding set trained by <a href="https://jina.ai/"><b>Jina AI</b></a>.</b> </p> ## Quick Start The easiest way to starting using `jina-embeddings-v2-small-en` is to use Jina AI's [Embedding API](https://jina.ai/embeddings/). ## Intended Usage & Model Info `jina-embeddings-v2-small-en` is an English, monolingual **embedding model** supporting **8192 sequence length**. It is based on a BERT architecture (JinaBERT) that supports the symmetric bidirectional variant of [ALiBi](https://arxiv.org/abs/2108.12409) to allow longer sequence length. The backbone `jina-bert-v2-small-en` is pretrained on the C4 dataset. The model is further trained on Jina AI's collection of more than 400 millions of sentence pairs and hard negatives. These pairs were obtained from various domains and were carefully selected through a thorough cleaning process. The embedding model was trained using 512 sequence length, but extrapolates to 8k sequence length (or even longer) thanks to ALiBi. This makes our model useful for a range of use cases, especially when processing long documents is needed, including long document retrieval, semantic textual similarity, text reranking, recommendation, RAG and LLM-based generative search, etc. This model has 33 million parameters, which enables lightning-fast and memory efficient inference, while still delivering impressive performance. Additionally, we provide the following embedding models: - [`jina-embeddings-v2-small-en`](https://huggingface.co/jinaai/jina-embeddings-v2-small-en): 33 million parameters **(you are here)**. - [`jina-embeddings-v2-base-en`](https://huggingface.co/jinaai/jina-embeddings-v2-base-en): 137 million parameters. - [`jina-embeddings-v2-base-zh`](https://huggingface.co/jinaai/jina-embeddings-v2-base-zh): 161 million parameters Chinese-English Bilingual embeddings. - [`jina-embeddings-v2-base-de`](https://huggingface.co/jinaai/jina-embeddings-v2-base-de): 161 million parameters German-English Bilingual embeddings. - [`jina-embeddings-v2-base-es`](): Spanish-English Bilingual embeddings (soon). ## Data & Parameters Jina Embeddings V2 [technical report](https://arxiv.org/abs/2310.19923) ## Usage **<details><summary>Please apply mean pooling when integrating the model.</summary>** <p> ### Why mean pooling? `mean poooling` takes all token embeddings from model output and averaging them at sentence/paragraph level. It has been proved to be the most effective way to produce high-quality sentence embeddings. We offer an `encode` function to deal with this. However, if you would like to do it without using the default `encode` function: ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) sentences = ['How is the weather today?', 'What is the current weather like today?'] tokenizer = AutoTokenizer.from_pretrained('jinaai/jina-embeddings-v2-small-en') model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-small-en', trust_remote_code=True) encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') with torch.no_grad(): model_output = model(**encoded_input) embeddings = mean_pooling(model_output, encoded_input['attention_mask']) embeddings = F.normalize(embeddings, p=2, dim=1) ``` </p> </details> You can use Jina Embedding models directly from transformers package. ```python !pip install transformers from transformers import AutoModel from numpy.linalg import norm cos_sim = lambda a,b: (a @ b.T) / (norm(a)*norm(b)) model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-small-en', trust_remote_code=True) # trust_remote_code is needed to use the encode method embeddings = model.encode(['How is the weather today?', 'What is the current weather like today?']) print(cos_sim(embeddings[0], embeddings[1])) ``` If you only want to handle shorter sequence, such as 2k, pass the `max_length` parameter to the `encode` function: ```python embeddings = model.encode( ['Very long ... document'], max_length=2048 ) ``` The latest sentence-transformers also supports Jina embeddings: ```python !pip install -U sentence-transformers from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim model = SentenceTransformer( "jinaai/jina-embeddings-v2-small-en", # switch to en/zh for English or Chinese trust_remote_code=True ) # control your input sequence length up to 8192 model.max_seq_length = 1024 embeddings = model.encode([ 'How is the weather today?', 'What is the current weather like today?' ]) print(cos_sim(embeddings[0], embeddings[1])) ``` ## Alternatives to Using Transformers Package 1. _Managed SaaS_: Get started with a free key on Jina AI's [Embedding API](https://jina.ai/embeddings/). 2. _Private and high-performance deployment_: Get started by picking from our suite of models and deploy them on [AWS Sagemaker](https://aws.amazon.com/marketplace/seller-profile?id=seller-stch2ludm6vgy). ## RAG Performance According to the latest blog post from [LLamaIndex](https://blog.llamaindex.ai/boosting-rag-picking-the-best-embedding-reranker-models-42d079022e83), > In summary, to achieve the peak performance in both hit rate and MRR, the combination of OpenAI or JinaAI-Base embeddings with the CohereRerank/bge-reranker-large reranker stands out. <img src="https://miro.medium.com/v2/resize:fit:4800/format:webp/1*ZP2RVejCZovF3FDCg-Bx3A.png" width="780px"> ## Plans 1. Bilingual embedding models supporting more European & Asian languages, including Spanish, French, Italian and Japanese. 2. Multimodal embedding models enable Multimodal RAG applications. 3. High-performt rerankers. ## Trouble Shooting **Loading of Model Code failed** If you forgot to pass the `trust_remote_code=True` flag when calling `AutoModel.from_pretrained` or initializing the model via the `SentenceTransformer` class, you will receive an error that the model weights could not be initialized. This is caused by tranformers falling back to creating a default BERT model, instead of a jina-embedding model: ```bash Some weights of the model checkpoint at jinaai/jina-embeddings-v2-base-en were not used when initializing BertModel: ['encoder.layer.2.mlp.layernorm.weight', 'encoder.layer.3.mlp.layernorm.weight', 'encoder.layer.10.mlp.wo.bias', 'encoder.layer.5.mlp.wo.bias', 'encoder.layer.2.mlp.layernorm.bias', 'encoder.layer.1.mlp.gated_layers.weight', 'encoder.layer.5.mlp.gated_layers.weight', 'encoder.layer.8.mlp.layernorm.bias', ... ``` ## Contact Join our [Discord community](https://discord.jina.ai) and chat with other community members about ideas. ## Citation If you find Jina Embeddings useful in your research, please cite the following paper: ``` @misc{günther2023jina, title={Jina Embeddings 2: 8192-Token General-Purpose Text Embeddings for Long Documents}, author={Michael Günther and Jackmin Ong and Isabelle Mohr and Alaeddine Abdessalem and Tanguy Abel and Mohammad Kalim Akram and Susana Guzman and Georgios Mastrapas and Saba Sturua and Bo Wang and Maximilian Werk and Nan Wang and Han Xiao}, year={2023}, eprint={2310.19923}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "BIOSSES", "SCIFACT" ]
intfloat/multilingual-e5-large-instruct
intfloat
feature-extraction
[ "sentence-transformers", "onnx", "safetensors", "xlm-roberta", "feature-extraction", "mteb", "transformers", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "arxiv:2402.05672", "arxiv:2401.00368", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-02-08T12:59:32Z"
2025-02-17T04:16:16+00:00
812,767
382
--- language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - 'no' - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh license: mit tags: - mteb - sentence-transformers - transformers model-index: - name: multilingual-e5-large-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.23880597014924 - type: ap value: 39.07351965022687 - type: f1 value: 70.04836733862683 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 66.71306209850107 - type: ap value: 79.01499914759529 - type: f1 value: 64.81951817560703 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.85307346326837 - type: ap value: 22.447519885878737 - type: f1 value: 61.0162730745633 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.04925053533191 - type: ap value: 23.44983217128922 - type: f1 value: 62.5723230907759 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.28742500000001 - type: ap value: 94.8449918887462 - type: f1 value: 96.28680923610432 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 56.716 - type: f1 value: 55.76510398266401 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 52.99999999999999 - type: f1 value: 52.00829994765178 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.806000000000004 - type: f1 value: 48.082345914983634 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.507999999999996 - type: f1 value: 47.68752844642045 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.709999999999994 - type: f1 value: 47.05870376637181 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.662000000000006 - type: f1 value: 43.42371965372771 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 31.721 - type: map_at_10 value: 49.221 - type: map_at_100 value: 49.884 - type: map_at_1000 value: 49.888 - type: map_at_3 value: 44.31 - type: map_at_5 value: 47.276 - type: mrr_at_1 value: 32.432 - type: mrr_at_10 value: 49.5 - type: mrr_at_100 value: 50.163000000000004 - type: mrr_at_1000 value: 50.166 - type: mrr_at_3 value: 44.618 - type: mrr_at_5 value: 47.541 - type: ndcg_at_1 value: 31.721 - type: ndcg_at_10 value: 58.384 - type: ndcg_at_100 value: 61.111000000000004 - type: ndcg_at_1000 value: 61.187999999999995 - type: ndcg_at_3 value: 48.386 - type: ndcg_at_5 value: 53.708999999999996 - type: precision_at_1 value: 31.721 - type: precision_at_10 value: 8.741 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.057 - type: precision_at_5 value: 14.609 - type: recall_at_1 value: 31.721 - type: recall_at_10 value: 87.411 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 60.171 - type: recall_at_5 value: 73.044 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 46.40419580759799 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 40.48593255007969 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.889179122289995 - type: mrr value: 77.61146286769556 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.15075203727929 - type: cos_sim_spearman value: 86.9622224570873 - type: euclidean_pearson value: 86.70473853624121 - type: euclidean_spearman value: 86.9622224570873 - type: manhattan_pearson value: 86.21089380980065 - type: manhattan_spearman value: 86.75318154937008 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.65553235908142 - type: f1 value: 99.60681976339595 - type: precision value: 99.58246346555325 - type: recall value: 99.65553235908142 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.26260180497468 - type: f1 value: 99.14520507740848 - type: precision value: 99.08650671362535 - type: recall value: 99.26260180497468 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.07412538967787 - type: f1 value: 97.86629719431936 - type: precision value: 97.76238309664012 - type: recall value: 98.07412538967787 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.42074776197998 - type: f1 value: 99.38564156573635 - type: precision value: 99.36808846761454 - type: recall value: 99.42074776197998 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.73376623376623 - type: f1 value: 85.68480707214599 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.935218072113855 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.276389017675264 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 27.764166666666668 - type: map_at_10 value: 37.298166666666674 - type: map_at_100 value: 38.530166666666666 - type: map_at_1000 value: 38.64416666666667 - type: map_at_3 value: 34.484833333333334 - type: map_at_5 value: 36.0385 - type: mrr_at_1 value: 32.93558333333333 - type: mrr_at_10 value: 41.589749999999995 - type: mrr_at_100 value: 42.425333333333334 - type: mrr_at_1000 value: 42.476333333333336 - type: mrr_at_3 value: 39.26825 - type: mrr_at_5 value: 40.567083333333336 - type: ndcg_at_1 value: 32.93558333333333 - type: ndcg_at_10 value: 42.706583333333334 - type: ndcg_at_100 value: 47.82483333333333 - type: ndcg_at_1000 value: 49.95733333333334 - type: ndcg_at_3 value: 38.064750000000004 - type: ndcg_at_5 value: 40.18158333333333 - type: precision_at_1 value: 32.93558333333333 - type: precision_at_10 value: 7.459833333333334 - type: precision_at_100 value: 1.1830833333333335 - type: precision_at_1000 value: 0.15608333333333332 - type: precision_at_3 value: 17.5235 - type: precision_at_5 value: 12.349833333333333 - type: recall_at_1 value: 27.764166666666668 - type: recall_at_10 value: 54.31775 - type: recall_at_100 value: 76.74350000000001 - type: recall_at_1000 value: 91.45208333333332 - type: recall_at_3 value: 41.23425 - type: recall_at_5 value: 46.73983333333334 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 12.969 - type: map_at_10 value: 21.584999999999997 - type: map_at_100 value: 23.3 - type: map_at_1000 value: 23.5 - type: map_at_3 value: 18.218999999999998 - type: map_at_5 value: 19.983 - type: mrr_at_1 value: 29.316 - type: mrr_at_10 value: 40.033 - type: mrr_at_100 value: 40.96 - type: mrr_at_1000 value: 41.001 - type: mrr_at_3 value: 37.123 - type: mrr_at_5 value: 38.757999999999996 - type: ndcg_at_1 value: 29.316 - type: ndcg_at_10 value: 29.858 - type: ndcg_at_100 value: 36.756 - type: ndcg_at_1000 value: 40.245999999999995 - type: ndcg_at_3 value: 24.822 - type: ndcg_at_5 value: 26.565 - type: precision_at_1 value: 29.316 - type: precision_at_10 value: 9.186 - type: precision_at_100 value: 1.6549999999999998 - type: precision_at_1000 value: 0.22999999999999998 - type: precision_at_3 value: 18.436 - type: precision_at_5 value: 13.876 - type: recall_at_1 value: 12.969 - type: recall_at_10 value: 35.142 - type: recall_at_100 value: 59.143 - type: recall_at_1000 value: 78.594 - type: recall_at_3 value: 22.604 - type: recall_at_5 value: 27.883000000000003 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.527999999999999 - type: map_at_10 value: 17.974999999999998 - type: map_at_100 value: 25.665 - type: map_at_1000 value: 27.406000000000002 - type: map_at_3 value: 13.017999999999999 - type: map_at_5 value: 15.137 - type: mrr_at_1 value: 62.5 - type: mrr_at_10 value: 71.891 - type: mrr_at_100 value: 72.294 - type: mrr_at_1000 value: 72.296 - type: mrr_at_3 value: 69.958 - type: mrr_at_5 value: 71.121 - type: ndcg_at_1 value: 50.875 - type: ndcg_at_10 value: 38.36 - type: ndcg_at_100 value: 44.235 - type: ndcg_at_1000 value: 52.154 - type: ndcg_at_3 value: 43.008 - type: ndcg_at_5 value: 40.083999999999996 - type: precision_at_1 value: 62.5 - type: precision_at_10 value: 30.0 - type: precision_at_100 value: 10.038 - type: precision_at_1000 value: 2.0869999999999997 - type: precision_at_3 value: 46.833000000000006 - type: precision_at_5 value: 38.800000000000004 - type: recall_at_1 value: 8.527999999999999 - type: recall_at_10 value: 23.828 - type: recall_at_100 value: 52.322 - type: recall_at_1000 value: 77.143 - type: recall_at_3 value: 14.136000000000001 - type: recall_at_5 value: 17.761 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 51.51 - type: f1 value: 47.632159862049896 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 60.734 - type: map_at_10 value: 72.442 - type: map_at_100 value: 72.735 - type: map_at_1000 value: 72.75 - type: map_at_3 value: 70.41199999999999 - type: map_at_5 value: 71.80499999999999 - type: mrr_at_1 value: 65.212 - type: mrr_at_10 value: 76.613 - type: mrr_at_100 value: 76.79899999999999 - type: mrr_at_1000 value: 76.801 - type: mrr_at_3 value: 74.8 - type: mrr_at_5 value: 76.12400000000001 - type: ndcg_at_1 value: 65.212 - type: ndcg_at_10 value: 77.988 - type: ndcg_at_100 value: 79.167 - type: ndcg_at_1000 value: 79.452 - type: ndcg_at_3 value: 74.362 - type: ndcg_at_5 value: 76.666 - type: precision_at_1 value: 65.212 - type: precision_at_10 value: 10.003 - type: precision_at_100 value: 1.077 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 29.518 - type: precision_at_5 value: 19.016 - type: recall_at_1 value: 60.734 - type: recall_at_10 value: 90.824 - type: recall_at_100 value: 95.71600000000001 - type: recall_at_1000 value: 97.577 - type: recall_at_3 value: 81.243 - type: recall_at_5 value: 86.90299999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 23.845 - type: map_at_10 value: 39.281 - type: map_at_100 value: 41.422 - type: map_at_1000 value: 41.593 - type: map_at_3 value: 34.467 - type: map_at_5 value: 37.017 - type: mrr_at_1 value: 47.531 - type: mrr_at_10 value: 56.204 - type: mrr_at_100 value: 56.928999999999995 - type: mrr_at_1000 value: 56.962999999999994 - type: mrr_at_3 value: 54.115 - type: mrr_at_5 value: 55.373000000000005 - type: ndcg_at_1 value: 47.531 - type: ndcg_at_10 value: 47.711999999999996 - type: ndcg_at_100 value: 54.510999999999996 - type: ndcg_at_1000 value: 57.103 - type: ndcg_at_3 value: 44.145 - type: ndcg_at_5 value: 45.032 - type: precision_at_1 value: 47.531 - type: precision_at_10 value: 13.194 - type: precision_at_100 value: 2.045 - type: precision_at_1000 value: 0.249 - type: precision_at_3 value: 29.424 - type: precision_at_5 value: 21.451 - type: recall_at_1 value: 23.845 - type: recall_at_10 value: 54.967 - type: recall_at_100 value: 79.11399999999999 - type: recall_at_1000 value: 94.56700000000001 - type: recall_at_3 value: 40.256 - type: recall_at_5 value: 46.215 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 37.819 - type: map_at_10 value: 60.889 - type: map_at_100 value: 61.717999999999996 - type: map_at_1000 value: 61.778 - type: map_at_3 value: 57.254000000000005 - type: map_at_5 value: 59.541 - type: mrr_at_1 value: 75.638 - type: mrr_at_10 value: 82.173 - type: mrr_at_100 value: 82.362 - type: mrr_at_1000 value: 82.37 - type: mrr_at_3 value: 81.089 - type: mrr_at_5 value: 81.827 - type: ndcg_at_1 value: 75.638 - type: ndcg_at_10 value: 69.317 - type: ndcg_at_100 value: 72.221 - type: ndcg_at_1000 value: 73.382 - type: ndcg_at_3 value: 64.14 - type: ndcg_at_5 value: 67.07600000000001 - type: precision_at_1 value: 75.638 - type: precision_at_10 value: 14.704999999999998 - type: precision_at_100 value: 1.698 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 41.394999999999996 - type: precision_at_5 value: 27.162999999999997 - type: recall_at_1 value: 37.819 - type: recall_at_10 value: 73.52499999999999 - type: recall_at_100 value: 84.875 - type: recall_at_1000 value: 92.559 - type: recall_at_3 value: 62.092999999999996 - type: recall_at_5 value: 67.907 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 94.60079999999999 - type: ap value: 92.67396345347356 - type: f1 value: 94.5988098167121 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.285 - type: map_at_10 value: 33.436 - type: map_at_100 value: 34.63 - type: map_at_1000 value: 34.681 - type: map_at_3 value: 29.412 - type: map_at_5 value: 31.715 - type: mrr_at_1 value: 21.848 - type: mrr_at_10 value: 33.979 - type: mrr_at_100 value: 35.118 - type: mrr_at_1000 value: 35.162 - type: mrr_at_3 value: 30.036 - type: mrr_at_5 value: 32.298 - type: ndcg_at_1 value: 21.862000000000002 - type: ndcg_at_10 value: 40.43 - type: ndcg_at_100 value: 46.17 - type: ndcg_at_1000 value: 47.412 - type: ndcg_at_3 value: 32.221 - type: ndcg_at_5 value: 36.332 - type: precision_at_1 value: 21.862000000000002 - type: precision_at_10 value: 6.491 - type: precision_at_100 value: 0.935 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.744 - type: precision_at_5 value: 10.331999999999999 - type: recall_at_1 value: 21.285 - type: recall_at_10 value: 62.083 - type: recall_at_100 value: 88.576 - type: recall_at_1000 value: 98.006 - type: recall_at_3 value: 39.729 - type: recall_at_5 value: 49.608000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.92612859097127 - type: f1 value: 93.82370333372853 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.67681036911807 - type: f1 value: 92.14191382411472 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.26817878585723 - type: f1 value: 91.92824250337878 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.96554963983714 - type: f1 value: 90.02859329630792 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.02509860164935 - type: f1 value: 89.30665159182062 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.55515370705244 - type: f1 value: 87.94449232331907 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.4623803009576 - type: f1 value: 66.06738378772725 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.3716539870386 - type: f1 value: 60.37614033396853 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 80.34022681787857 - type: f1 value: 58.302008026952 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.72095208268087 - type: f1 value: 59.64524724009049 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.87020437432773 - type: f1 value: 57.80202694670567 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.73598553345387 - type: f1 value: 58.19628250675031 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.6630800268998 - type: f1 value: 65.00996668051691 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.7128446536651 - type: f1 value: 57.95860594874963 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.61129791526563 - type: f1 value: 59.75328290206483 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.00134498991257 - type: f1 value: 67.0230483991802 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.54068594485541 - type: f1 value: 65.54604628946976 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.032952252858095 - type: f1 value: 58.715741857057104 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.80901143241427 - type: f1 value: 68.33963989243877 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.47141896435777 - type: f1 value: 69.56765020308262 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.2373907195696 - type: f1 value: 69.04529836036467 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 77.05783456624076 - type: f1 value: 74.69430584708174 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.82111634162744 - type: f1 value: 70.77228952803762 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.25353059852051 - type: f1 value: 71.05310103416411 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.28648285137861 - type: f1 value: 69.08020473732226 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.31540013449899 - type: f1 value: 70.9426355465791 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.2151983860121 - type: f1 value: 67.52541755908858 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.58372562205784 - type: f1 value: 69.49769064229827 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.9233355749832 - type: f1 value: 69.36311548259593 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.07330195023538 - type: f1 value: 64.99882022345572 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.62273032952253 - type: f1 value: 70.6394885471001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.77000672494957 - type: f1 value: 62.9368944815065 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.453261600538 - type: f1 value: 70.85069934666681 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.6906523201076 - type: f1 value: 72.03249740074217 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.03631472763953 - type: f1 value: 59.3165215571852 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.913920645595155 - type: f1 value: 57.367337711611285 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.42837928715535 - type: f1 value: 52.60527294970906 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.33490248823135 - type: f1 value: 63.213340969404065 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.58507061197041 - type: f1 value: 68.40256628040486 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.11230665770006 - type: f1 value: 66.44863577842305 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.70073974445192 - type: f1 value: 67.21291337273702 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.43913920645595 - type: f1 value: 64.09838087422806 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.80026899798251 - type: f1 value: 68.76986742962444 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.78816408876934 - type: f1 value: 62.18781873428972 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.6577000672495 - type: f1 value: 68.75171511133003 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.42501681237391 - type: f1 value: 71.18434963451544 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.64828513786146 - type: f1 value: 70.67741914007422 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.62811028917284 - type: f1 value: 71.36402039740959 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.88634835238736 - type: f1 value: 69.23701923480677 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.15938130464022 - type: f1 value: 71.87792218993388 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.96301277740416 - type: f1 value: 67.29584200202983 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.49562878278412 - type: f1 value: 66.91716685679431 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.6805648957633 - type: f1 value: 72.02723592594374 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.00605245460659 - type: f1 value: 60.16716669482932 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.90988567585742 - type: f1 value: 63.99405488777784 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.62273032952253 - type: f1 value: 65.17213906909481 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.50907868190988 - type: f1 value: 69.15165697194853 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.30733019502352 - type: f1 value: 66.69024007380474 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.24277067921989 - type: f1 value: 68.80515408492947 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.49831876260929 - type: f1 value: 64.83778567111116 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.28782784129119 - type: f1 value: 69.3294186700733 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.315400134499 - type: f1 value: 71.22674385243207 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.37794216543377 - type: f1 value: 68.96962492838232 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.33557498318764 - type: f1 value: 72.28949738478356 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.84398117014123 - type: f1 value: 64.71026362091463 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.76462676529925 - type: f1 value: 69.8229667407667 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.02420981842636 - type: f1 value: 71.76576384895898 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.7572293207801 - type: f1 value: 72.76840765295256 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.02286482851379 - type: f1 value: 66.17237947327872 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.60928043039678 - type: f1 value: 77.27094731234773 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.68325487558843 - type: f1 value: 77.97530399082261 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.13315400134498 - type: f1 value: 75.97558584796424 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.47410894418292 - type: f1 value: 80.52244841473792 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.9670477471419 - type: f1 value: 77.37318805793146 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.09683927370544 - type: f1 value: 77.69773737430847 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.20847343644922 - type: f1 value: 75.17071738727348 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.07464694014796 - type: f1 value: 77.16136207698571 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.53396099529255 - type: f1 value: 73.58296404484122 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.75319435104237 - type: f1 value: 75.24674707850833 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.0948217888366 - type: f1 value: 76.47559490205028 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.07599193006052 - type: f1 value: 70.76028043093511 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.10490921318089 - type: f1 value: 77.01215275283272 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.25756556825824 - type: f1 value: 70.20605314648762 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.08137188971082 - type: f1 value: 77.3899269057439 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.35440484196369 - type: f1 value: 79.58964690002772 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.42299932750504 - type: f1 value: 68.07844356925413 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.15669132481507 - type: f1 value: 65.89383352608513 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.11432414256894 - type: f1 value: 57.69910594559806 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.24747814391392 - type: f1 value: 70.42455553830918 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.46267652992603 - type: f1 value: 76.8854559308316 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.24815063887021 - type: f1 value: 72.77805034658074 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.11566913248151 - type: f1 value: 73.86147988001356 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.0168123739072 - type: f1 value: 69.38515920054571 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.41156691324814 - type: f1 value: 73.43474953408237 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.39609952925353 - type: f1 value: 67.29731681109291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.20914593140552 - type: f1 value: 77.07066497935367 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.52387357094821 - type: f1 value: 78.5259569473291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.6913248150639 - type: f1 value: 76.91201656350455 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.1217215870881 - type: f1 value: 77.41179937912504 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.25891055817083 - type: f1 value: 75.8089244542887 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.70679219905851 - type: f1 value: 78.21459594517711 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.83523873570948 - type: f1 value: 74.86847028401978 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.71755211835911 - type: f1 value: 74.0214326485662 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.06523201075991 - type: f1 value: 79.10545620325138 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.91862811028918 - type: f1 value: 66.50386121217983 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.93140551445865 - type: f1 value: 70.755435928495 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.40753194351042 - type: f1 value: 71.61816115782923 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.1815736381977 - type: f1 value: 75.08016717887205 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.86482851378614 - type: f1 value: 72.39521180006291 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.46940147948891 - type: f1 value: 76.70044085362349 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.89307330195024 - type: f1 value: 71.5721825332298 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.7511768661735 - type: f1 value: 75.17918654541515 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.69535978480162 - type: f1 value: 78.90019070153316 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.45729657027572 - type: f1 value: 76.19578371794672 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 36.92715354123554 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 35.53536244162518 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.08507884504006 - type: mrr value: 34.32436977159129 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.935 - type: map_at_10 value: 13.297 - type: map_at_100 value: 16.907 - type: map_at_1000 value: 18.391 - type: map_at_3 value: 9.626999999999999 - type: map_at_5 value: 11.190999999999999 - type: mrr_at_1 value: 46.129999999999995 - type: mrr_at_10 value: 54.346000000000004 - type: mrr_at_100 value: 55.067 - type: mrr_at_1000 value: 55.1 - type: mrr_at_3 value: 51.961 - type: mrr_at_5 value: 53.246 - type: ndcg_at_1 value: 44.118 - type: ndcg_at_10 value: 35.534 - type: ndcg_at_100 value: 32.946999999999996 - type: ndcg_at_1000 value: 41.599000000000004 - type: ndcg_at_3 value: 40.25 - type: ndcg_at_5 value: 37.978 - type: precision_at_1 value: 46.129999999999995 - type: precision_at_10 value: 26.842 - type: precision_at_100 value: 8.427 - type: precision_at_1000 value: 2.128 - type: precision_at_3 value: 37.977 - type: precision_at_5 value: 32.879000000000005 - type: recall_at_1 value: 5.935 - type: recall_at_10 value: 17.211000000000002 - type: recall_at_100 value: 34.33 - type: recall_at_1000 value: 65.551 - type: recall_at_3 value: 10.483 - type: recall_at_5 value: 13.078999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 35.231 - type: map_at_10 value: 50.202000000000005 - type: map_at_100 value: 51.154999999999994 - type: map_at_1000 value: 51.181 - type: map_at_3 value: 45.774 - type: map_at_5 value: 48.522 - type: mrr_at_1 value: 39.687 - type: mrr_at_10 value: 52.88 - type: mrr_at_100 value: 53.569 - type: mrr_at_1000 value: 53.58500000000001 - type: mrr_at_3 value: 49.228 - type: mrr_at_5 value: 51.525 - type: ndcg_at_1 value: 39.687 - type: ndcg_at_10 value: 57.754000000000005 - type: ndcg_at_100 value: 61.597 - type: ndcg_at_1000 value: 62.18900000000001 - type: ndcg_at_3 value: 49.55 - type: ndcg_at_5 value: 54.11899999999999 - type: precision_at_1 value: 39.687 - type: precision_at_10 value: 9.313 - type: precision_at_100 value: 1.146 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 22.229 - type: precision_at_5 value: 15.939 - type: recall_at_1 value: 35.231 - type: recall_at_10 value: 78.083 - type: recall_at_100 value: 94.42099999999999 - type: recall_at_1000 value: 98.81 - type: recall_at_3 value: 57.047000000000004 - type: recall_at_5 value: 67.637 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.241 - type: map_at_10 value: 85.462 - type: map_at_100 value: 86.083 - type: map_at_1000 value: 86.09700000000001 - type: map_at_3 value: 82.49499999999999 - type: map_at_5 value: 84.392 - type: mrr_at_1 value: 82.09 - type: mrr_at_10 value: 88.301 - type: mrr_at_100 value: 88.383 - type: mrr_at_1000 value: 88.384 - type: mrr_at_3 value: 87.37 - type: mrr_at_5 value: 88.035 - type: ndcg_at_1 value: 82.12 - type: ndcg_at_10 value: 89.149 - type: ndcg_at_100 value: 90.235 - type: ndcg_at_1000 value: 90.307 - type: ndcg_at_3 value: 86.37599999999999 - type: ndcg_at_5 value: 87.964 - type: precision_at_1 value: 82.12 - type: precision_at_10 value: 13.56 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.88 - type: precision_at_5 value: 24.92 - type: recall_at_1 value: 71.241 - type: recall_at_10 value: 96.128 - type: recall_at_100 value: 99.696 - type: recall_at_1000 value: 99.994 - type: recall_at_3 value: 88.181 - type: recall_at_5 value: 92.694 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.59757799655151 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 64.27391998854624 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.243 - type: map_at_10 value: 10.965 - type: map_at_100 value: 12.934999999999999 - type: map_at_1000 value: 13.256 - type: map_at_3 value: 7.907 - type: map_at_5 value: 9.435 - type: mrr_at_1 value: 20.9 - type: mrr_at_10 value: 31.849 - type: mrr_at_100 value: 32.964 - type: mrr_at_1000 value: 33.024 - type: mrr_at_3 value: 28.517 - type: mrr_at_5 value: 30.381999999999998 - type: ndcg_at_1 value: 20.9 - type: ndcg_at_10 value: 18.723 - type: ndcg_at_100 value: 26.384999999999998 - type: ndcg_at_1000 value: 32.114 - type: ndcg_at_3 value: 17.753 - type: ndcg_at_5 value: 15.558 - type: precision_at_1 value: 20.9 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 2.078 - type: precision_at_1000 value: 0.345 - type: precision_at_3 value: 16.900000000000002 - type: precision_at_5 value: 13.88 - type: recall_at_1 value: 4.243 - type: recall_at_10 value: 19.885 - type: recall_at_100 value: 42.17 - type: recall_at_1000 value: 70.12 - type: recall_at_3 value: 10.288 - type: recall_at_5 value: 14.072000000000001 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.84209174935282 - type: cos_sim_spearman value: 81.73248048438833 - type: euclidean_pearson value: 83.02810070308149 - type: euclidean_spearman value: 81.73248295679514 - type: manhattan_pearson value: 82.95368060376002 - type: manhattan_spearman value: 81.60277910998718 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 88.52628804556943 - type: cos_sim_spearman value: 82.5713913555672 - type: euclidean_pearson value: 85.8796774746988 - type: euclidean_spearman value: 82.57137506803424 - type: manhattan_pearson value: 85.79671002960058 - type: manhattan_spearman value: 82.49445981618027 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 86.23682503505542 - type: cos_sim_spearman value: 87.15008956711806 - type: euclidean_pearson value: 86.79805401524959 - type: euclidean_spearman value: 87.15008956711806 - type: manhattan_pearson value: 86.65298502699244 - type: manhattan_spearman value: 86.97677821948562 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.63370304677802 - type: cos_sim_spearman value: 84.97105553540318 - type: euclidean_pearson value: 85.28896108687721 - type: euclidean_spearman value: 84.97105553540318 - type: manhattan_pearson value: 85.09663190337331 - type: manhattan_spearman value: 84.79126831644619 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 90.2614838800733 - type: cos_sim_spearman value: 91.0509162991835 - type: euclidean_pearson value: 90.33098317533373 - type: euclidean_spearman value: 91.05091625871644 - type: manhattan_pearson value: 90.26250435151107 - type: manhattan_spearman value: 90.97999594417519 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.80480973335091 - type: cos_sim_spearman value: 87.313695492969 - type: euclidean_pearson value: 86.49267251576939 - type: euclidean_spearman value: 87.313695492969 - type: manhattan_pearson value: 86.44019901831935 - type: manhattan_spearman value: 87.24205395460392 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.05662789380672 - type: cos_sim_spearman value: 90.02759424426651 - type: euclidean_pearson value: 90.4042483422981 - type: euclidean_spearman value: 90.02759424426651 - type: manhattan_pearson value: 90.51446975000226 - type: manhattan_spearman value: 90.08832889933616 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.5975528273532 - type: cos_sim_spearman value: 67.62969861411354 - type: euclidean_pearson value: 69.224275734323 - type: euclidean_spearman value: 67.62969861411354 - type: manhattan_pearson value: 69.3761447059927 - type: manhattan_spearman value: 67.90921005611467 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.11244327231684 - type: cos_sim_spearman value: 88.37902438979035 - type: euclidean_pearson value: 87.86054279847336 - type: euclidean_spearman value: 88.37902438979035 - type: manhattan_pearson value: 87.77257757320378 - type: manhattan_spearman value: 88.25208966098123 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.87174608143563 - type: mrr value: 96.12836872640794 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.760999999999996 - type: map_at_10 value: 67.258 - type: map_at_100 value: 67.757 - type: map_at_1000 value: 67.78800000000001 - type: map_at_3 value: 64.602 - type: map_at_5 value: 65.64 - type: mrr_at_1 value: 60.667 - type: mrr_at_10 value: 68.441 - type: mrr_at_100 value: 68.825 - type: mrr_at_1000 value: 68.853 - type: mrr_at_3 value: 66.444 - type: mrr_at_5 value: 67.26100000000001 - type: ndcg_at_1 value: 60.667 - type: ndcg_at_10 value: 71.852 - type: ndcg_at_100 value: 73.9 - type: ndcg_at_1000 value: 74.628 - type: ndcg_at_3 value: 67.093 - type: ndcg_at_5 value: 68.58 - type: precision_at_1 value: 60.667 - type: precision_at_10 value: 9.6 - type: precision_at_100 value: 1.0670000000000002 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 26.111 - type: precision_at_5 value: 16.733 - type: recall_at_1 value: 57.760999999999996 - type: recall_at_10 value: 84.967 - type: recall_at_100 value: 93.833 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 71.589 - type: recall_at_5 value: 75.483 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.66633663366336 - type: cos_sim_ap value: 91.17685358899108 - type: cos_sim_f1 value: 82.16818642350559 - type: cos_sim_precision value: 83.26488706365504 - type: cos_sim_recall value: 81.10000000000001 - type: dot_accuracy value: 99.66633663366336 - type: dot_ap value: 91.17663411119032 - type: dot_f1 value: 82.16818642350559 - type: dot_precision value: 83.26488706365504 - type: dot_recall value: 81.10000000000001 - type: euclidean_accuracy value: 99.66633663366336 - type: euclidean_ap value: 91.17685189882275 - type: euclidean_f1 value: 82.16818642350559 - type: euclidean_precision value: 83.26488706365504 - type: euclidean_recall value: 81.10000000000001 - type: manhattan_accuracy value: 99.66633663366336 - type: manhattan_ap value: 91.2241619496737 - type: manhattan_f1 value: 82.20472440944883 - type: manhattan_precision value: 86.51933701657458 - type: manhattan_recall value: 78.3 - type: max_accuracy value: 99.66633663366336 - type: max_ap value: 91.2241619496737 - type: max_f1 value: 82.20472440944883 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.85101268897951 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 42.461184054706905 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.44542568873886 - type: mrr value: 52.33656151854681 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.75982974997539 - type: cos_sim_spearman value: 30.385405026539914 - type: dot_pearson value: 30.75982433546523 - type: dot_spearman value: 30.385405026539914 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22799999999999998 - type: map_at_10 value: 2.064 - type: map_at_100 value: 13.056000000000001 - type: map_at_1000 value: 31.747999999999998 - type: map_at_3 value: 0.67 - type: map_at_5 value: 1.097 - type: mrr_at_1 value: 90.0 - type: mrr_at_10 value: 94.667 - type: mrr_at_100 value: 94.667 - type: mrr_at_1000 value: 94.667 - type: mrr_at_3 value: 94.667 - type: mrr_at_5 value: 94.667 - type: ndcg_at_1 value: 86.0 - type: ndcg_at_10 value: 82.0 - type: ndcg_at_100 value: 64.307 - type: ndcg_at_1000 value: 57.023999999999994 - type: ndcg_at_3 value: 85.816 - type: ndcg_at_5 value: 84.904 - type: precision_at_1 value: 90.0 - type: precision_at_10 value: 85.8 - type: precision_at_100 value: 66.46 - type: precision_at_1000 value: 25.202 - type: precision_at_3 value: 90.0 - type: precision_at_5 value: 89.2 - type: recall_at_1 value: 0.22799999999999998 - type: recall_at_10 value: 2.235 - type: recall_at_100 value: 16.185 - type: recall_at_1000 value: 53.620999999999995 - type: recall_at_3 value: 0.7040000000000001 - type: recall_at_5 value: 1.172 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.75 - type: precision value: 96.45 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.54913294797689 - type: f1 value: 82.46628131021194 - type: precision value: 81.1175337186898 - type: recall value: 85.54913294797689 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.21951219512195 - type: f1 value: 77.33333333333334 - type: precision value: 75.54878048780488 - type: recall value: 81.21951219512195 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.6 - type: f1 value: 98.26666666666665 - type: precision value: 98.1 - type: recall value: 98.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.5 - type: f1 value: 99.33333333333333 - type: precision value: 99.25 - type: recall value: 99.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.2 - type: precision value: 96.89999999999999 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.18333333333334 - type: precision value: 96.88333333333333 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.61194029850746 - type: f1 value: 72.81094527363183 - type: precision value: 70.83333333333333 - type: recall value: 77.61194029850746 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.91666666666667 - type: precision value: 91.08333333333334 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.29268292682927 - type: f1 value: 85.27642276422765 - type: precision value: 84.01277584204414 - type: recall value: 88.29268292682927 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.0 - type: precision value: 94.46666666666668 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.681652490887 - type: f1 value: 91.90765492102065 - type: precision value: 91.05913325232888 - type: recall value: 93.681652490887 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.17391304347827 - type: f1 value: 89.97101449275361 - type: precision value: 88.96811594202899 - type: recall value: 92.17391304347827 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.43478260869566 - type: f1 value: 87.72173913043478 - type: precision value: 86.42028985507245 - type: recall value: 90.43478260869566 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.4 - type: f1 value: 88.03 - type: precision value: 86.95 - type: recall value: 90.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.4 - type: f1 value: 91.45666666666666 - type: precision value: 90.525 - type: recall value: 93.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.9059107358263 - type: f1 value: 78.32557872364869 - type: precision value: 76.78260286824823 - type: recall value: 81.9059107358263 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.58333333333333 - type: precision value: 91.73333333333332 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.10000000000001 - type: f1 value: 74.50500000000001 - type: precision value: 72.58928571428571 - type: recall value: 79.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.6 - type: f1 value: 95.55 - type: precision value: 95.05 - type: recall value: 96.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.0952380952381 - type: f1 value: 77.98458049886621 - type: precision value: 76.1968253968254 - type: recall value: 82.0952380952381 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.9 - type: f1 value: 84.99190476190476 - type: precision value: 83.65 - type: recall value: 87.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.7 - type: f1 value: 94.56666666666666 - type: precision value: 94.01666666666667 - type: recall value: 95.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.6 - type: f1 value: 98.2 - type: precision value: 98.0 - type: recall value: 98.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.38333333333334 - type: precision value: 93.78333333333335 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.4 - type: f1 value: 84.10380952380952 - type: precision value: 82.67 - type: recall value: 87.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.5 - type: f1 value: 94.33333333333334 - type: precision value: 93.78333333333333 - type: recall value: 95.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.4 - type: f1 value: 86.82000000000001 - type: precision value: 85.64500000000001 - type: recall value: 89.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.1 - type: f1 value: 93.56666666666668 - type: precision value: 92.81666666666666 - type: recall value: 95.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.9 - type: f1 value: 98.6 - type: precision value: 98.45 - type: recall value: 98.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.01347708894879 - type: f1 value: 93.51752021563343 - type: precision value: 92.82794249775381 - type: recall value: 95.01347708894879 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.00854700854701 - type: f1 value: 96.08262108262107 - type: precision value: 95.65527065527067 - type: recall value: 97.00854700854701 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5 - type: f1 value: 95.39999999999999 - type: precision value: 94.88333333333333 - type: recall value: 96.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5909090909091 - type: f1 value: 95.49242424242425 - type: precision value: 94.9621212121212 - type: recall value: 96.5909090909091 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.90566037735849 - type: f1 value: 81.85883997204752 - type: precision value: 80.54507337526205 - type: recall value: 84.90566037735849 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.5 - type: f1 value: 96.75 - type: precision value: 96.38333333333333 - type: recall value: 97.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.7704280155642 - type: f1 value: 82.99610894941635 - type: precision value: 81.32295719844358 - type: recall value: 86.7704280155642 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.52136752136752 - type: f1 value: 61.89662189662191 - type: precision value: 59.68660968660969 - type: recall value: 67.52136752136752 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.2 - type: f1 value: 86.32 - type: precision value: 85.015 - type: recall value: 89.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.0 - type: f1 value: 94.78333333333333 - type: precision value: 94.18333333333334 - type: recall value: 96.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.8785046728972 - type: f1 value: 80.54517133956385 - type: precision value: 79.154984423676 - type: recall value: 83.8785046728972 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.60000000000001 - type: f1 value: 92.01333333333334 - type: precision value: 91.28333333333333 - type: recall value: 93.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.1 - type: f1 value: 96.26666666666667 - type: precision value: 95.85000000000001 - type: recall value: 97.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.3 - type: f1 value: 80.67833333333333 - type: precision value: 79.03928571428571 - type: recall value: 84.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.3 - type: f1 value: 96.48333333333332 - type: precision value: 96.08333333333331 - type: recall value: 97.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.7 - type: f1 value: 94.66666666666667 - type: precision value: 94.16666666666667 - type: recall value: 95.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.2 - type: f1 value: 96.36666666666667 - type: precision value: 95.96666666666668 - type: recall value: 97.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.80666666666667 - type: precision value: 92.12833333333333 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.0 - type: f1 value: 96.22333333333334 - type: precision value: 95.875 - type: recall value: 97.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.33333333333333 - type: f1 value: 70.78174603174602 - type: precision value: 69.28333333333332 - type: recall value: 74.33333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 37.6 - type: f1 value: 32.938348952090365 - type: precision value: 31.2811038961039 - type: recall value: 37.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.5 - type: f1 value: 89.13333333333333 - type: precision value: 88.03333333333333 - type: recall value: 91.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.14285714285714 - type: f1 value: 77.67857142857143 - type: precision value: 75.59523809523809 - type: recall value: 82.14285714285714 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.0450054884742 - type: f1 value: 63.070409283362075 - type: precision value: 60.58992781824835 - type: recall value: 69.0450054884742 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.1 - type: f1 value: 57.848333333333336 - type: precision value: 55.69500000000001 - type: recall value: 63.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.01666666666667 - type: precision value: 94.5 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.89999999999999 - type: f1 value: 94.90666666666667 - type: precision value: 94.425 - type: recall value: 95.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.61333333333333 - type: precision value: 83.27 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.4 - type: f1 value: 71.90746031746032 - type: precision value: 70.07027777777778 - type: recall value: 76.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.89999999999999 - type: f1 value: 97.26666666666667 - type: precision value: 96.95 - type: recall value: 97.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.8 - type: f1 value: 74.39555555555555 - type: precision value: 72.59416666666667 - type: recall value: 78.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.19999999999999 - type: f1 value: 93.78999999999999 - type: precision value: 93.125 - type: recall value: 95.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.8 - type: f1 value: 97.1 - type: precision value: 96.75 - type: recall value: 97.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.25666666666666 - type: precision value: 93.64166666666668 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.934306569343065 - type: f1 value: 51.461591936044485 - type: precision value: 49.37434827945776 - type: recall value: 56.934306569343065 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 20.200000000000003 - type: f1 value: 16.91799284049284 - type: precision value: 15.791855158730158 - type: recall value: 20.200000000000003 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.2 - type: f1 value: 95.3 - type: precision value: 94.85 - type: recall value: 96.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.3 - type: f1 value: 95.11666666666667 - type: precision value: 94.53333333333333 - type: recall value: 96.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.88095238095238 - type: f1 value: 87.14285714285714 - type: precision value: 85.96230158730161 - type: recall value: 89.88095238095238 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 24.099999999999998 - type: f1 value: 19.630969083349783 - type: precision value: 18.275094905094907 - type: recall value: 24.099999999999998 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.4368530020704 - type: f1 value: 79.45183870649709 - type: precision value: 77.7432712215321 - type: recall value: 83.4368530020704 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.8 - type: f1 value: 94.53333333333333 - type: precision value: 93.91666666666666 - type: recall value: 95.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.8 - type: f1 value: 98.48333333333332 - type: precision value: 98.33333333333334 - type: recall value: 98.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.5 - type: f1 value: 14.979285714285714 - type: precision value: 14.23235060690943 - type: recall value: 17.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.93939393939394 - type: f1 value: 91.991341991342 - type: precision value: 91.05339105339105 - type: recall value: 93.93939393939394 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.31297709923665 - type: f1 value: 86.76844783715012 - type: precision value: 85.63613231552164 - type: recall value: 89.31297709923665 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.12663755458514 - type: f1 value: 98.93255701115964 - type: precision value: 98.83551673944687 - type: recall value: 99.12663755458514 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.0 - type: f1 value: 89.77999999999999 - type: precision value: 88.78333333333333 - type: recall value: 92.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.89265536723164 - type: f1 value: 95.85687382297553 - type: precision value: 95.33898305084746 - type: recall value: 96.89265536723164 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.6 - type: f1 value: 11.820611790170615 - type: precision value: 11.022616224355355 - type: recall value: 14.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.89999999999999 - type: f1 value: 94.93333333333334 - type: precision value: 94.48666666666666 - type: recall value: 95.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.72333333333334 - type: precision value: 83.44166666666666 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.8 - type: f1 value: 93.47333333333333 - type: precision value: 92.875 - type: recall value: 94.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.6 - type: f1 value: 95.71666666666665 - type: precision value: 95.28333333333335 - type: recall value: 96.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.8 - type: f1 value: 14.511074040901628 - type: precision value: 13.503791000666002 - type: recall value: 17.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.10187667560321 - type: f1 value: 92.46648793565683 - type: precision value: 91.71134941912423 - type: recall value: 94.10187667560321 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.0 - type: f1 value: 96.11666666666666 - type: precision value: 95.68333333333334 - type: recall value: 97.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 72.72727272727273 - type: f1 value: 66.58949745906267 - type: precision value: 63.86693017127799 - type: recall value: 72.72727272727273 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.14084507042254 - type: f1 value: 88.26291079812206 - type: precision value: 87.32394366197182 - type: recall value: 90.14084507042254 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 64.67065868263472 - type: f1 value: 58.2876627696987 - type: precision value: 55.79255774165953 - type: recall value: 64.67065868263472 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.6 - type: f1 value: 94.41666666666667 - type: precision value: 93.85 - type: recall value: 95.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.172413793103445 - type: f1 value: 49.63992493549144 - type: precision value: 47.71405113769646 - type: recall value: 55.172413793103445 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.46478873239437 - type: f1 value: 73.4417616811983 - type: precision value: 71.91607981220658 - type: recall value: 77.46478873239437 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.61538461538461 - type: f1 value: 80.91452991452994 - type: precision value: 79.33760683760683 - type: recall value: 84.61538461538461 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 98.2 - type: f1 value: 97.6 - type: precision value: 97.3 - type: recall value: 98.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.5741127348643 - type: f1 value: 72.00417536534445 - type: precision value: 70.53467872883321 - type: recall value: 75.5741127348643 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.2 - type: f1 value: 55.577460317460314 - type: precision value: 52.98583333333333 - type: recall value: 62.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.18241042345277 - type: f1 value: 90.6468124709167 - type: precision value: 89.95656894679696 - type: recall value: 92.18241042345277 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 95.13333333333333 - type: precision value: 94.66666666666667 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.8 - type: f1 value: 95.85000000000001 - type: precision value: 95.39999999999999 - type: recall value: 96.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.1259842519685 - type: f1 value: 89.76377952755905 - type: precision value: 88.71391076115485 - type: recall value: 92.1259842519685 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.49 - type: precision value: 91.725 - type: recall value: 94.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.5623268698061 - type: f1 value: 73.27364463791058 - type: precision value: 71.51947852086357 - type: recall value: 77.5623268698061 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.56666666666666 - type: precision value: 96.16666666666667 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.34615384615384 - type: f1 value: 61.092032967032964 - type: precision value: 59.27197802197802 - type: recall value: 66.34615384615384 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.41190476190476 - type: precision value: 92.7 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.10000000000001 - type: f1 value: 91.10000000000001 - type: precision value: 90.13333333333333 - type: recall value: 93.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.97333333333334 - type: precision value: 91.14166666666667 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.21698113207547 - type: f1 value: 90.3796046720575 - type: precision value: 89.56367924528303 - type: recall value: 92.21698113207547 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.6 - type: f1 value: 96.91666666666667 - type: precision value: 96.6 - type: recall value: 97.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.44525547445255 - type: f1 value: 96.71532846715328 - type: precision value: 96.35036496350365 - type: recall value: 97.44525547445255 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.34000000000002 - type: precision value: 91.49166666666667 - type: recall value: 94.1 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.2910000000000004 - type: map_at_10 value: 10.373000000000001 - type: map_at_100 value: 15.612 - type: map_at_1000 value: 17.06 - type: map_at_3 value: 6.119 - type: map_at_5 value: 7.917000000000001 - type: mrr_at_1 value: 44.897999999999996 - type: mrr_at_10 value: 56.054 - type: mrr_at_100 value: 56.82000000000001 - type: mrr_at_1000 value: 56.82000000000001 - type: mrr_at_3 value: 52.381 - type: mrr_at_5 value: 53.81 - type: ndcg_at_1 value: 42.857 - type: ndcg_at_10 value: 27.249000000000002 - type: ndcg_at_100 value: 36.529 - type: ndcg_at_1000 value: 48.136 - type: ndcg_at_3 value: 33.938 - type: ndcg_at_5 value: 29.951 - type: precision_at_1 value: 44.897999999999996 - type: precision_at_10 value: 22.653000000000002 - type: precision_at_100 value: 7.000000000000001 - type: precision_at_1000 value: 1.48 - type: precision_at_3 value: 32.653 - type: precision_at_5 value: 27.755000000000003 - type: recall_at_1 value: 3.2910000000000004 - type: recall_at_10 value: 16.16 - type: recall_at_100 value: 43.908 - type: recall_at_1000 value: 79.823 - type: recall_at_3 value: 7.156 - type: recall_at_5 value: 10.204 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.05879999999999 - type: ap value: 14.609748142799111 - type: f1 value: 54.878956295843096 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 64.61799660441426 - type: f1 value: 64.8698191961434 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 51.32860036611885 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 88.34714192048638 - type: cos_sim_ap value: 80.26732975975634 - type: cos_sim_f1 value: 73.53415148134374 - type: cos_sim_precision value: 69.34767360299276 - type: cos_sim_recall value: 78.25857519788919 - type: dot_accuracy value: 88.34714192048638 - type: dot_ap value: 80.26733698491206 - type: dot_f1 value: 73.53415148134374 - type: dot_precision value: 69.34767360299276 - type: dot_recall value: 78.25857519788919 - type: euclidean_accuracy value: 88.34714192048638 - type: euclidean_ap value: 80.26734337771738 - type: euclidean_f1 value: 73.53415148134374 - type: euclidean_precision value: 69.34767360299276 - type: euclidean_recall value: 78.25857519788919 - type: manhattan_accuracy value: 88.30541813196639 - type: manhattan_ap value: 80.19415808104145 - type: manhattan_f1 value: 73.55143870713441 - type: manhattan_precision value: 73.25307511122743 - type: manhattan_recall value: 73.85224274406332 - type: max_accuracy value: 88.34714192048638 - type: max_ap value: 80.26734337771738 - type: max_f1 value: 73.55143870713441 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.81061047075717 - type: cos_sim_ap value: 87.11747055081017 - type: cos_sim_f1 value: 80.04355498817256 - type: cos_sim_precision value: 78.1165262000733 - type: cos_sim_recall value: 82.06806282722513 - type: dot_accuracy value: 89.81061047075717 - type: dot_ap value: 87.11746902745236 - type: dot_f1 value: 80.04355498817256 - type: dot_precision value: 78.1165262000733 - type: dot_recall value: 82.06806282722513 - type: euclidean_accuracy value: 89.81061047075717 - type: euclidean_ap value: 87.11746919324248 - type: euclidean_f1 value: 80.04355498817256 - type: euclidean_precision value: 78.1165262000733 - type: euclidean_recall value: 82.06806282722513 - type: manhattan_accuracy value: 89.79508673885202 - type: manhattan_ap value: 87.11074390832218 - type: manhattan_f1 value: 80.13002540726349 - type: manhattan_precision value: 77.83826945412311 - type: manhattan_recall value: 82.56082537727133 - type: max_accuracy value: 89.81061047075717 - type: max_ap value: 87.11747055081017 - type: max_f1 value: 80.13002540726349 --- ## Multilingual-E5-large-instruct [Multilingual E5 Text Embeddings: A Technical Report](https://arxiv.org/pdf/2402.05672). Liang Wang, Nan Yang, Xiaolong Huang, Linjun Yang, Rangan Majumder, Furu Wei, arXiv 2024 This model has 24 layers and the embedding size is 1024. ## Usage Below are examples to encode queries and passages from the MS-MARCO passage ranking dataset. ### Transformers ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] def get_detailed_instruct(task_description: str, query: str) -> str: return f'Instruct: {task_description}\nQuery: {query}' # Each query must come with a one-sentence instruction that describes the task task = 'Given a web search query, retrieve relevant passages that answer the query' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, '南瓜的家常做法') ] # No need to add instruction for retrieval documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右,放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅" ] input_texts = queries + documents tokenizer = AutoTokenizer.from_pretrained('intfloat/multilingual-e5-large-instruct') model = AutoModel.from_pretrained('intfloat/multilingual-e5-large-instruct') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) # => [[91.92852783203125, 67.580322265625], [70.3814468383789, 92.1330795288086]] ``` ### Sentence Transformers ```python from sentence_transformers import SentenceTransformer def get_detailed_instruct(task_description: str, query: str) -> str: return f'Instruct: {task_description}\nQuery: {query}' # Each query must come with a one-sentence instruction that describes the task task = 'Given a web search query, retrieve relevant passages that answer the query' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, '南瓜的家常做法') ] # No need to add instruction for retrieval documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右,放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅" ] input_texts = queries + documents model = SentenceTransformer('intfloat/multilingual-e5-large-instruct') embeddings = model.encode(input_texts, convert_to_tensor=True, normalize_embeddings=True) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) # [[91.92853546142578, 67.5802993774414], [70.38143157958984, 92.13307189941406]] ``` ### Infinity Usage with [Infinity](https://github.com/michaelfeil/infinity): ```bash docker run --gpus all -v $PWD/data:/app/.cache -e HF_TOKEN=$HF_TOKEN -p "7997":"7997" \ michaelf34/infinity:0.0.68 \ v2 --model-id intfloat/multilingual-e5-large-instruct --revision "main" --dtype float16 --batch-size 32 -engine torch --port 7997 ``` ## Supported Languages This model is initialized from [xlm-roberta-large](https://huggingface.co/xlm-roberta-large) and continually trained on a mixture of multilingual datasets. It supports 100 languages from xlm-roberta, but low-resource languages may see performance degradation. ## Training Details **Initialization**: [xlm-roberta-large](https://huggingface.co/xlm-roberta-large) **First stage**: contrastive pre-training with 1 billion weakly supervised text pairs. **Second stage**: fine-tuning on datasets from the [E5-mistral](https://arxiv.org/abs/2401.00368) paper. ## MTEB Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## FAQ **1. Do I need to add instructions to the query?** Yes, this is how the model is trained, otherwise you will see a performance degradation. The task definition should be a one-sentence instruction that describes the task. This is a way to customize text embeddings for different scenarios through natural language instructions. Please check out [unilm/e5/utils.py](https://github.com/microsoft/unilm/blob/9c0f1ff7ca53431fe47d2637dfe253643d94185b/e5/utils.py#L106) for instructions we used for evaluation. On the other hand, there is no need to add instructions to the document side. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2024multilingual, title={Multilingual E5 Text Embeddings: A Technical Report}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Yang, Linjun and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2402.05672}, year={2024} } ``` ## Limitations Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
intfloat/multilingual-e5-base
intfloat
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "openvino", "xlm-roberta", "mteb", "Sentence Transformers", "sentence-similarity", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "arxiv:2402.05672", "arxiv:2108.08787", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-05-19T10:26:40Z"
2025-02-17T03:23:43+00:00
578,159
263
--- language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - 'no' - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh license: mit tags: - mteb - Sentence Transformers - sentence-similarity - sentence-transformers model-index: - name: multilingual-e5-base results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 78.97014925373135 - type: ap value: 43.69351129103008 - type: f1 value: 73.38075030070492 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.7237687366167 - type: ap value: 82.22089859962671 - type: f1 value: 69.95532758884401 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 79.65517241379312 - type: ap value: 28.507918657094738 - type: f1 value: 66.84516013726119 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.32976445396146 - type: ap value: 20.720481637566014 - type: f1 value: 59.78002763416003 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 90.63775 - type: ap value: 87.22277903861716 - type: f1 value: 90.60378636386807 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.546 - type: f1 value: 44.05666638370923 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 41.828 - type: f1 value: 41.2710255644252 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.534 - type: f1 value: 39.820743174270326 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 39.684 - type: f1 value: 39.11052682815307 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.436 - type: f1 value: 37.07082931930871 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.226000000000006 - type: f1 value: 36.65372077739185 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 22.831000000000003 - type: map_at_10 value: 36.42 - type: map_at_100 value: 37.699 - type: map_at_1000 value: 37.724000000000004 - type: map_at_3 value: 32.207 - type: map_at_5 value: 34.312 - type: mrr_at_1 value: 23.257 - type: mrr_at_10 value: 36.574 - type: mrr_at_100 value: 37.854 - type: mrr_at_1000 value: 37.878 - type: mrr_at_3 value: 32.385000000000005 - type: mrr_at_5 value: 34.48 - type: ndcg_at_1 value: 22.831000000000003 - type: ndcg_at_10 value: 44.230000000000004 - type: ndcg_at_100 value: 49.974000000000004 - type: ndcg_at_1000 value: 50.522999999999996 - type: ndcg_at_3 value: 35.363 - type: ndcg_at_5 value: 39.164 - type: precision_at_1 value: 22.831000000000003 - type: precision_at_10 value: 6.935 - type: precision_at_100 value: 0.9520000000000001 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 14.841 - type: precision_at_5 value: 10.754 - type: recall_at_1 value: 22.831000000000003 - type: recall_at_10 value: 69.346 - type: recall_at_100 value: 95.235 - type: recall_at_1000 value: 99.36 - type: recall_at_3 value: 44.523 - type: recall_at_5 value: 53.769999999999996 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 40.27789869854063 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 35.41979463347428 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 58.22752045109304 - type: mrr value: 71.51112430198303 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 84.71147646622866 - type: cos_sim_spearman value: 85.059167046486 - type: euclidean_pearson value: 75.88421613600647 - type: euclidean_spearman value: 75.12821787150585 - type: manhattan_pearson value: 75.22005646957604 - type: manhattan_spearman value: 74.42880434453272 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.23799582463465 - type: f1 value: 99.12665274878218 - type: precision value: 99.07098121085595 - type: recall value: 99.23799582463465 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 97.88685890380806 - type: f1 value: 97.59336708489249 - type: precision value: 97.44662117543473 - type: recall value: 97.88685890380806 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 97.47142362313821 - type: f1 value: 97.1989377670015 - type: precision value: 97.06384944001847 - type: recall value: 97.47142362313821 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.4728804634018 - type: f1 value: 98.2973494821836 - type: precision value: 98.2095839915745 - type: recall value: 98.4728804634018 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 82.74025974025975 - type: f1 value: 82.67420447730439 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.0380848063507 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 29.45956405670166 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.122 - type: map_at_10 value: 42.03 - type: map_at_100 value: 43.364000000000004 - type: map_at_1000 value: 43.474000000000004 - type: map_at_3 value: 38.804 - type: map_at_5 value: 40.585 - type: mrr_at_1 value: 39.914 - type: mrr_at_10 value: 48.227 - type: mrr_at_100 value: 49.018 - type: mrr_at_1000 value: 49.064 - type: mrr_at_3 value: 45.994 - type: mrr_at_5 value: 47.396 - type: ndcg_at_1 value: 39.914 - type: ndcg_at_10 value: 47.825 - type: ndcg_at_100 value: 52.852 - type: ndcg_at_1000 value: 54.891 - type: ndcg_at_3 value: 43.517 - type: ndcg_at_5 value: 45.493 - type: precision_at_1 value: 39.914 - type: precision_at_10 value: 8.956 - type: precision_at_100 value: 1.388 - type: precision_at_1000 value: 0.182 - type: precision_at_3 value: 20.791999999999998 - type: precision_at_5 value: 14.821000000000002 - type: recall_at_1 value: 32.122 - type: recall_at_10 value: 58.294999999999995 - type: recall_at_100 value: 79.726 - type: recall_at_1000 value: 93.099 - type: recall_at_3 value: 45.017 - type: recall_at_5 value: 51.002 - type: map_at_1 value: 29.677999999999997 - type: map_at_10 value: 38.684000000000005 - type: map_at_100 value: 39.812999999999995 - type: map_at_1000 value: 39.945 - type: map_at_3 value: 35.831 - type: map_at_5 value: 37.446 - type: mrr_at_1 value: 37.771 - type: mrr_at_10 value: 44.936 - type: mrr_at_100 value: 45.583 - type: mrr_at_1000 value: 45.634 - type: mrr_at_3 value: 42.771 - type: mrr_at_5 value: 43.994 - type: ndcg_at_1 value: 37.771 - type: ndcg_at_10 value: 44.059 - type: ndcg_at_100 value: 48.192 - type: ndcg_at_1000 value: 50.375 - type: ndcg_at_3 value: 40.172000000000004 - type: ndcg_at_5 value: 41.899 - type: precision_at_1 value: 37.771 - type: precision_at_10 value: 8.286999999999999 - type: precision_at_100 value: 1.322 - type: precision_at_1000 value: 0.178 - type: precision_at_3 value: 19.406000000000002 - type: precision_at_5 value: 13.745 - type: recall_at_1 value: 29.677999999999997 - type: recall_at_10 value: 53.071 - type: recall_at_100 value: 70.812 - type: recall_at_1000 value: 84.841 - type: recall_at_3 value: 41.016000000000005 - type: recall_at_5 value: 46.22 - type: map_at_1 value: 42.675000000000004 - type: map_at_10 value: 53.93599999999999 - type: map_at_100 value: 54.806999999999995 - type: map_at_1000 value: 54.867 - type: map_at_3 value: 50.934000000000005 - type: map_at_5 value: 52.583 - type: mrr_at_1 value: 48.339 - type: mrr_at_10 value: 57.265 - type: mrr_at_100 value: 57.873 - type: mrr_at_1000 value: 57.906 - type: mrr_at_3 value: 55.193000000000005 - type: mrr_at_5 value: 56.303000000000004 - type: ndcg_at_1 value: 48.339 - type: ndcg_at_10 value: 59.19799999999999 - type: ndcg_at_100 value: 62.743 - type: ndcg_at_1000 value: 63.99399999999999 - type: ndcg_at_3 value: 54.367 - type: ndcg_at_5 value: 56.548 - type: precision_at_1 value: 48.339 - type: precision_at_10 value: 9.216000000000001 - type: precision_at_100 value: 1.1809999999999998 - type: precision_at_1000 value: 0.134 - type: precision_at_3 value: 23.72 - type: precision_at_5 value: 16.025 - type: recall_at_1 value: 42.675000000000004 - type: recall_at_10 value: 71.437 - type: recall_at_100 value: 86.803 - type: recall_at_1000 value: 95.581 - type: recall_at_3 value: 58.434 - type: recall_at_5 value: 63.754 - type: map_at_1 value: 23.518 - type: map_at_10 value: 30.648999999999997 - type: map_at_100 value: 31.508999999999997 - type: map_at_1000 value: 31.604 - type: map_at_3 value: 28.247 - type: map_at_5 value: 29.65 - type: mrr_at_1 value: 25.650000000000002 - type: mrr_at_10 value: 32.771 - type: mrr_at_100 value: 33.554 - type: mrr_at_1000 value: 33.629999999999995 - type: mrr_at_3 value: 30.433 - type: mrr_at_5 value: 31.812 - type: ndcg_at_1 value: 25.650000000000002 - type: ndcg_at_10 value: 34.929 - type: ndcg_at_100 value: 39.382 - type: ndcg_at_1000 value: 41.913 - type: ndcg_at_3 value: 30.292 - type: ndcg_at_5 value: 32.629999999999995 - type: precision_at_1 value: 25.650000000000002 - type: precision_at_10 value: 5.311 - type: precision_at_100 value: 0.792 - type: precision_at_1000 value: 0.105 - type: precision_at_3 value: 12.58 - type: precision_at_5 value: 8.994 - type: recall_at_1 value: 23.518 - type: recall_at_10 value: 46.19 - type: recall_at_100 value: 67.123 - type: recall_at_1000 value: 86.442 - type: recall_at_3 value: 33.678000000000004 - type: recall_at_5 value: 39.244 - type: map_at_1 value: 15.891 - type: map_at_10 value: 22.464000000000002 - type: map_at_100 value: 23.483 - type: map_at_1000 value: 23.613 - type: map_at_3 value: 20.080000000000002 - type: map_at_5 value: 21.526 - type: mrr_at_1 value: 20.025000000000002 - type: mrr_at_10 value: 26.712999999999997 - type: mrr_at_100 value: 27.650000000000002 - type: mrr_at_1000 value: 27.737000000000002 - type: mrr_at_3 value: 24.274 - type: mrr_at_5 value: 25.711000000000002 - type: ndcg_at_1 value: 20.025000000000002 - type: ndcg_at_10 value: 27.028999999999996 - type: ndcg_at_100 value: 32.064 - type: ndcg_at_1000 value: 35.188 - type: ndcg_at_3 value: 22.512999999999998 - type: ndcg_at_5 value: 24.89 - type: precision_at_1 value: 20.025000000000002 - type: precision_at_10 value: 4.776 - type: precision_at_100 value: 0.8500000000000001 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 10.531 - type: precision_at_5 value: 7.811 - type: recall_at_1 value: 15.891 - type: recall_at_10 value: 37.261 - type: recall_at_100 value: 59.12 - type: recall_at_1000 value: 81.356 - type: recall_at_3 value: 24.741 - type: recall_at_5 value: 30.753999999999998 - type: map_at_1 value: 27.544 - type: map_at_10 value: 36.283 - type: map_at_100 value: 37.467 - type: map_at_1000 value: 37.574000000000005 - type: map_at_3 value: 33.528999999999996 - type: map_at_5 value: 35.028999999999996 - type: mrr_at_1 value: 34.166999999999994 - type: mrr_at_10 value: 41.866 - type: mrr_at_100 value: 42.666 - type: mrr_at_1000 value: 42.716 - type: mrr_at_3 value: 39.541 - type: mrr_at_5 value: 40.768 - type: ndcg_at_1 value: 34.166999999999994 - type: ndcg_at_10 value: 41.577 - type: ndcg_at_100 value: 46.687 - type: ndcg_at_1000 value: 48.967 - type: ndcg_at_3 value: 37.177 - type: ndcg_at_5 value: 39.097 - type: precision_at_1 value: 34.166999999999994 - type: precision_at_10 value: 7.420999999999999 - type: precision_at_100 value: 1.165 - type: precision_at_1000 value: 0.154 - type: precision_at_3 value: 17.291999999999998 - type: precision_at_5 value: 12.166 - type: recall_at_1 value: 27.544 - type: recall_at_10 value: 51.99399999999999 - type: recall_at_100 value: 73.738 - type: recall_at_1000 value: 89.33 - type: recall_at_3 value: 39.179 - type: recall_at_5 value: 44.385999999999996 - type: map_at_1 value: 26.661 - type: map_at_10 value: 35.475 - type: map_at_100 value: 36.626999999999995 - type: map_at_1000 value: 36.741 - type: map_at_3 value: 32.818000000000005 - type: map_at_5 value: 34.397 - type: mrr_at_1 value: 32.647999999999996 - type: mrr_at_10 value: 40.784 - type: mrr_at_100 value: 41.602 - type: mrr_at_1000 value: 41.661 - type: mrr_at_3 value: 38.68 - type: mrr_at_5 value: 39.838 - type: ndcg_at_1 value: 32.647999999999996 - type: ndcg_at_10 value: 40.697 - type: ndcg_at_100 value: 45.799 - type: ndcg_at_1000 value: 48.235 - type: ndcg_at_3 value: 36.516 - type: ndcg_at_5 value: 38.515 - type: precision_at_1 value: 32.647999999999996 - type: precision_at_10 value: 7.202999999999999 - type: precision_at_100 value: 1.1360000000000001 - type: precision_at_1000 value: 0.151 - type: precision_at_3 value: 17.314 - type: precision_at_5 value: 12.145999999999999 - type: recall_at_1 value: 26.661 - type: recall_at_10 value: 50.995000000000005 - type: recall_at_100 value: 73.065 - type: recall_at_1000 value: 89.781 - type: recall_at_3 value: 39.073 - type: recall_at_5 value: 44.395 - type: map_at_1 value: 25.946583333333333 - type: map_at_10 value: 33.79725 - type: map_at_100 value: 34.86408333333333 - type: map_at_1000 value: 34.9795 - type: map_at_3 value: 31.259999999999998 - type: map_at_5 value: 32.71541666666666 - type: mrr_at_1 value: 30.863749999999996 - type: mrr_at_10 value: 37.99183333333333 - type: mrr_at_100 value: 38.790499999999994 - type: mrr_at_1000 value: 38.85575000000001 - type: mrr_at_3 value: 35.82083333333333 - type: mrr_at_5 value: 37.07533333333333 - type: ndcg_at_1 value: 30.863749999999996 - type: ndcg_at_10 value: 38.52141666666667 - type: ndcg_at_100 value: 43.17966666666667 - type: ndcg_at_1000 value: 45.64608333333333 - type: ndcg_at_3 value: 34.333000000000006 - type: ndcg_at_5 value: 36.34975 - type: precision_at_1 value: 30.863749999999996 - type: precision_at_10 value: 6.598999999999999 - type: precision_at_100 value: 1.0502500000000001 - type: precision_at_1000 value: 0.14400000000000002 - type: precision_at_3 value: 15.557583333333334 - type: precision_at_5 value: 11.020000000000001 - type: recall_at_1 value: 25.946583333333333 - type: recall_at_10 value: 48.36991666666666 - type: recall_at_100 value: 69.02408333333334 - type: recall_at_1000 value: 86.43858333333331 - type: recall_at_3 value: 36.4965 - type: recall_at_5 value: 41.76258333333334 - type: map_at_1 value: 22.431 - type: map_at_10 value: 28.889 - type: map_at_100 value: 29.642000000000003 - type: map_at_1000 value: 29.742 - type: map_at_3 value: 26.998 - type: map_at_5 value: 28.172000000000004 - type: mrr_at_1 value: 25.307000000000002 - type: mrr_at_10 value: 31.763 - type: mrr_at_100 value: 32.443 - type: mrr_at_1000 value: 32.531 - type: mrr_at_3 value: 29.959000000000003 - type: mrr_at_5 value: 31.063000000000002 - type: ndcg_at_1 value: 25.307000000000002 - type: ndcg_at_10 value: 32.586999999999996 - type: ndcg_at_100 value: 36.5 - type: ndcg_at_1000 value: 39.133 - type: ndcg_at_3 value: 29.25 - type: ndcg_at_5 value: 31.023 - type: precision_at_1 value: 25.307000000000002 - type: precision_at_10 value: 4.954 - type: precision_at_100 value: 0.747 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 12.577 - type: precision_at_5 value: 8.741999999999999 - type: recall_at_1 value: 22.431 - type: recall_at_10 value: 41.134 - type: recall_at_100 value: 59.28600000000001 - type: recall_at_1000 value: 78.857 - type: recall_at_3 value: 31.926 - type: recall_at_5 value: 36.335 - type: map_at_1 value: 17.586 - type: map_at_10 value: 23.304 - type: map_at_100 value: 24.159 - type: map_at_1000 value: 24.281 - type: map_at_3 value: 21.316 - type: map_at_5 value: 22.383 - type: mrr_at_1 value: 21.645 - type: mrr_at_10 value: 27.365000000000002 - type: mrr_at_100 value: 28.108 - type: mrr_at_1000 value: 28.192 - type: mrr_at_3 value: 25.482 - type: mrr_at_5 value: 26.479999999999997 - type: ndcg_at_1 value: 21.645 - type: ndcg_at_10 value: 27.306 - type: ndcg_at_100 value: 31.496000000000002 - type: ndcg_at_1000 value: 34.53 - type: ndcg_at_3 value: 23.73 - type: ndcg_at_5 value: 25.294 - type: precision_at_1 value: 21.645 - type: precision_at_10 value: 4.797 - type: precision_at_100 value: 0.8059999999999999 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 10.850999999999999 - type: precision_at_5 value: 7.736 - type: recall_at_1 value: 17.586 - type: recall_at_10 value: 35.481 - type: recall_at_100 value: 54.534000000000006 - type: recall_at_1000 value: 76.456 - type: recall_at_3 value: 25.335 - type: recall_at_5 value: 29.473 - type: map_at_1 value: 25.095 - type: map_at_10 value: 32.374 - type: map_at_100 value: 33.537 - type: map_at_1000 value: 33.634 - type: map_at_3 value: 30.089 - type: map_at_5 value: 31.433 - type: mrr_at_1 value: 29.198 - type: mrr_at_10 value: 36.01 - type: mrr_at_100 value: 37.022 - type: mrr_at_1000 value: 37.083 - type: mrr_at_3 value: 33.94 - type: mrr_at_5 value: 35.148 - type: ndcg_at_1 value: 29.198 - type: ndcg_at_10 value: 36.729 - type: ndcg_at_100 value: 42.114000000000004 - type: ndcg_at_1000 value: 44.592 - type: ndcg_at_3 value: 32.644 - type: ndcg_at_5 value: 34.652 - type: precision_at_1 value: 29.198 - type: precision_at_10 value: 5.970000000000001 - type: precision_at_100 value: 0.967 - type: precision_at_1000 value: 0.129 - type: precision_at_3 value: 14.396999999999998 - type: precision_at_5 value: 10.093 - type: recall_at_1 value: 25.095 - type: recall_at_10 value: 46.392 - type: recall_at_100 value: 69.706 - type: recall_at_1000 value: 87.738 - type: recall_at_3 value: 35.303000000000004 - type: recall_at_5 value: 40.441 - type: map_at_1 value: 26.857999999999997 - type: map_at_10 value: 34.066 - type: map_at_100 value: 35.671 - type: map_at_1000 value: 35.881 - type: map_at_3 value: 31.304 - type: map_at_5 value: 32.885 - type: mrr_at_1 value: 32.411 - type: mrr_at_10 value: 38.987 - type: mrr_at_100 value: 39.894 - type: mrr_at_1000 value: 39.959 - type: mrr_at_3 value: 36.626999999999995 - type: mrr_at_5 value: 38.011 - type: ndcg_at_1 value: 32.411 - type: ndcg_at_10 value: 39.208 - type: ndcg_at_100 value: 44.626 - type: ndcg_at_1000 value: 47.43 - type: ndcg_at_3 value: 35.091 - type: ndcg_at_5 value: 37.119 - type: precision_at_1 value: 32.411 - type: precision_at_10 value: 7.51 - type: precision_at_100 value: 1.486 - type: precision_at_1000 value: 0.234 - type: precision_at_3 value: 16.14 - type: precision_at_5 value: 11.976 - type: recall_at_1 value: 26.857999999999997 - type: recall_at_10 value: 47.407 - type: recall_at_100 value: 72.236 - type: recall_at_1000 value: 90.77 - type: recall_at_3 value: 35.125 - type: recall_at_5 value: 40.522999999999996 - type: map_at_1 value: 21.3 - type: map_at_10 value: 27.412999999999997 - type: map_at_100 value: 28.29 - type: map_at_1000 value: 28.398 - type: map_at_3 value: 25.169999999999998 - type: map_at_5 value: 26.496 - type: mrr_at_1 value: 23.29 - type: mrr_at_10 value: 29.215000000000003 - type: mrr_at_100 value: 30.073 - type: mrr_at_1000 value: 30.156 - type: mrr_at_3 value: 26.956000000000003 - type: mrr_at_5 value: 28.38 - type: ndcg_at_1 value: 23.29 - type: ndcg_at_10 value: 31.113000000000003 - type: ndcg_at_100 value: 35.701 - type: ndcg_at_1000 value: 38.505 - type: ndcg_at_3 value: 26.727 - type: ndcg_at_5 value: 29.037000000000003 - type: precision_at_1 value: 23.29 - type: precision_at_10 value: 4.787 - type: precision_at_100 value: 0.763 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 11.091 - type: precision_at_5 value: 7.985 - type: recall_at_1 value: 21.3 - type: recall_at_10 value: 40.782000000000004 - type: recall_at_100 value: 62.13999999999999 - type: recall_at_1000 value: 83.012 - type: recall_at_3 value: 29.131 - type: recall_at_5 value: 34.624 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 9.631 - type: map_at_10 value: 16.634999999999998 - type: map_at_100 value: 18.23 - type: map_at_1000 value: 18.419 - type: map_at_3 value: 13.66 - type: map_at_5 value: 15.173 - type: mrr_at_1 value: 21.368000000000002 - type: mrr_at_10 value: 31.56 - type: mrr_at_100 value: 32.58 - type: mrr_at_1000 value: 32.633 - type: mrr_at_3 value: 28.241 - type: mrr_at_5 value: 30.225 - type: ndcg_at_1 value: 21.368000000000002 - type: ndcg_at_10 value: 23.855999999999998 - type: ndcg_at_100 value: 30.686999999999998 - type: ndcg_at_1000 value: 34.327000000000005 - type: ndcg_at_3 value: 18.781 - type: ndcg_at_5 value: 20.73 - type: precision_at_1 value: 21.368000000000002 - type: precision_at_10 value: 7.564 - type: precision_at_100 value: 1.496 - type: precision_at_1000 value: 0.217 - type: precision_at_3 value: 13.876 - type: precision_at_5 value: 11.062 - type: recall_at_1 value: 9.631 - type: recall_at_10 value: 29.517 - type: recall_at_100 value: 53.452 - type: recall_at_1000 value: 74.115 - type: recall_at_3 value: 17.605999999999998 - type: recall_at_5 value: 22.505 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.885 - type: map_at_10 value: 18.798000000000002 - type: map_at_100 value: 26.316 - type: map_at_1000 value: 27.869 - type: map_at_3 value: 13.719000000000001 - type: map_at_5 value: 15.716 - type: mrr_at_1 value: 66 - type: mrr_at_10 value: 74.263 - type: mrr_at_100 value: 74.519 - type: mrr_at_1000 value: 74.531 - type: mrr_at_3 value: 72.458 - type: mrr_at_5 value: 73.321 - type: ndcg_at_1 value: 53.87499999999999 - type: ndcg_at_10 value: 40.355999999999995 - type: ndcg_at_100 value: 44.366 - type: ndcg_at_1000 value: 51.771 - type: ndcg_at_3 value: 45.195 - type: ndcg_at_5 value: 42.187000000000005 - type: precision_at_1 value: 66 - type: precision_at_10 value: 31.75 - type: precision_at_100 value: 10.11 - type: precision_at_1000 value: 1.9800000000000002 - type: precision_at_3 value: 48.167 - type: precision_at_5 value: 40.050000000000004 - type: recall_at_1 value: 8.885 - type: recall_at_10 value: 24.471999999999998 - type: recall_at_100 value: 49.669000000000004 - type: recall_at_1000 value: 73.383 - type: recall_at_3 value: 14.872 - type: recall_at_5 value: 18.262999999999998 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 45.18 - type: f1 value: 40.26878691789978 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 62.751999999999995 - type: map_at_10 value: 74.131 - type: map_at_100 value: 74.407 - type: map_at_1000 value: 74.423 - type: map_at_3 value: 72.329 - type: map_at_5 value: 73.555 - type: mrr_at_1 value: 67.282 - type: mrr_at_10 value: 78.292 - type: mrr_at_100 value: 78.455 - type: mrr_at_1000 value: 78.458 - type: mrr_at_3 value: 76.755 - type: mrr_at_5 value: 77.839 - type: ndcg_at_1 value: 67.282 - type: ndcg_at_10 value: 79.443 - type: ndcg_at_100 value: 80.529 - type: ndcg_at_1000 value: 80.812 - type: ndcg_at_3 value: 76.281 - type: ndcg_at_5 value: 78.235 - type: precision_at_1 value: 67.282 - type: precision_at_10 value: 10.078 - type: precision_at_100 value: 1.082 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 30.178 - type: precision_at_5 value: 19.232 - type: recall_at_1 value: 62.751999999999995 - type: recall_at_10 value: 91.521 - type: recall_at_100 value: 95.997 - type: recall_at_1000 value: 97.775 - type: recall_at_3 value: 83.131 - type: recall_at_5 value: 87.93299999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 18.861 - type: map_at_10 value: 30.252000000000002 - type: map_at_100 value: 32.082 - type: map_at_1000 value: 32.261 - type: map_at_3 value: 25.909 - type: map_at_5 value: 28.296 - type: mrr_at_1 value: 37.346000000000004 - type: mrr_at_10 value: 45.802 - type: mrr_at_100 value: 46.611999999999995 - type: mrr_at_1000 value: 46.659 - type: mrr_at_3 value: 43.056 - type: mrr_at_5 value: 44.637 - type: ndcg_at_1 value: 37.346000000000004 - type: ndcg_at_10 value: 38.169 - type: ndcg_at_100 value: 44.864 - type: ndcg_at_1000 value: 47.974 - type: ndcg_at_3 value: 33.619 - type: ndcg_at_5 value: 35.317 - type: precision_at_1 value: 37.346000000000004 - type: precision_at_10 value: 10.693999999999999 - type: precision_at_100 value: 1.775 - type: precision_at_1000 value: 0.231 - type: precision_at_3 value: 22.325 - type: precision_at_5 value: 16.852 - type: recall_at_1 value: 18.861 - type: recall_at_10 value: 45.672000000000004 - type: recall_at_100 value: 70.60499999999999 - type: recall_at_1000 value: 89.216 - type: recall_at_3 value: 30.361 - type: recall_at_5 value: 36.998999999999995 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 37.852999999999994 - type: map_at_10 value: 59.961 - type: map_at_100 value: 60.78 - type: map_at_1000 value: 60.843 - type: map_at_3 value: 56.39999999999999 - type: map_at_5 value: 58.646 - type: mrr_at_1 value: 75.70599999999999 - type: mrr_at_10 value: 82.321 - type: mrr_at_100 value: 82.516 - type: mrr_at_1000 value: 82.525 - type: mrr_at_3 value: 81.317 - type: mrr_at_5 value: 81.922 - type: ndcg_at_1 value: 75.70599999999999 - type: ndcg_at_10 value: 68.557 - type: ndcg_at_100 value: 71.485 - type: ndcg_at_1000 value: 72.71600000000001 - type: ndcg_at_3 value: 63.524 - type: ndcg_at_5 value: 66.338 - type: precision_at_1 value: 75.70599999999999 - type: precision_at_10 value: 14.463000000000001 - type: precision_at_100 value: 1.677 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 40.806 - type: precision_at_5 value: 26.709 - type: recall_at_1 value: 37.852999999999994 - type: recall_at_10 value: 72.316 - type: recall_at_100 value: 83.842 - type: recall_at_1000 value: 91.999 - type: recall_at_3 value: 61.209 - type: recall_at_5 value: 66.77199999999999 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 85.46039999999999 - type: ap value: 79.9812521351881 - type: f1 value: 85.31722909702084 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.704 - type: map_at_10 value: 35.329 - type: map_at_100 value: 36.494 - type: map_at_1000 value: 36.541000000000004 - type: map_at_3 value: 31.476 - type: map_at_5 value: 33.731 - type: mrr_at_1 value: 23.294999999999998 - type: mrr_at_10 value: 35.859 - type: mrr_at_100 value: 36.968 - type: mrr_at_1000 value: 37.008 - type: mrr_at_3 value: 32.085 - type: mrr_at_5 value: 34.299 - type: ndcg_at_1 value: 23.324 - type: ndcg_at_10 value: 42.274 - type: ndcg_at_100 value: 47.839999999999996 - type: ndcg_at_1000 value: 48.971 - type: ndcg_at_3 value: 34.454 - type: ndcg_at_5 value: 38.464 - type: precision_at_1 value: 23.324 - type: precision_at_10 value: 6.648 - type: precision_at_100 value: 0.9440000000000001 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.674999999999999 - type: precision_at_5 value: 10.850999999999999 - type: recall_at_1 value: 22.704 - type: recall_at_10 value: 63.660000000000004 - type: recall_at_100 value: 89.29899999999999 - type: recall_at_1000 value: 97.88900000000001 - type: recall_at_3 value: 42.441 - type: recall_at_5 value: 52.04 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.1326949384405 - type: f1 value: 92.89743579612082 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.62524654832347 - type: f1 value: 88.65106082263151 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.59039359573046 - type: f1 value: 90.31532892105662 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 86.21046038208581 - type: f1 value: 86.41459529813113 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.3180351380423 - type: f1 value: 86.71383078226444 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 86.24231464737792 - type: f1 value: 86.31845567592403 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 75.27131782945736 - type: f1 value: 57.52079940417103 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 71.2341504649197 - type: f1 value: 51.349951558039244 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 71.27418278852569 - type: f1 value: 50.1714985749095 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 67.68243031631694 - type: f1 value: 50.1066160836192 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 69.2362854069559 - type: f1 value: 48.821279948766424 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 71.71428571428571 - type: f1 value: 53.94611389496195 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.97646267652992 - type: f1 value: 57.26797883561521 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.65501008742435 - type: f1 value: 50.416258382177034 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.45796906523201 - type: f1 value: 53.306690547422185 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.59246805648957 - type: f1 value: 59.818381969051494 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.126429051782104 - type: f1 value: 58.25993593933026 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 50.057162071284466 - type: f1 value: 46.96095728790911 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.64425016812375 - type: f1 value: 62.858291698755764 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.08944182918628 - type: f1 value: 62.44639030604241 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.68056489576328 - type: f1 value: 61.775326758789504 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.11163416274377 - type: f1 value: 69.70789096927015 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.40282447881641 - type: f1 value: 66.38492065671895 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.24613315400134 - type: f1 value: 64.3348019501336 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.78345662407531 - type: f1 value: 62.21279452354622 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.9455279085407 - type: f1 value: 65.48193124964094 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.05110961667788 - type: f1 value: 58.097856564684534 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.95292535305985 - type: f1 value: 62.09182174767901 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.97310020174848 - type: f1 value: 61.14252567730396 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.08069939475453 - type: f1 value: 57.044041742492034 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.63752521856085 - type: f1 value: 63.889340907205316 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.385339609952936 - type: f1 value: 53.449033750088304 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.93073301950234 - type: f1 value: 65.9884357824104 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.94418291862812 - type: f1 value: 66.48740222583132 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.26025554808339 - type: f1 value: 50.19562815100793 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 48.98789509078682 - type: f1 value: 46.65788438676836 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.68728984532616 - type: f1 value: 41.642419349541996 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.19300605245461 - type: f1 value: 55.8626492442437 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.33826496301278 - type: f1 value: 63.89499791648792 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.33960995292536 - type: f1 value: 57.15242464180892 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.09347679892402 - type: f1 value: 59.64733214063841 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.75924680564896 - type: f1 value: 55.96585692366827 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.48486886348352 - type: f1 value: 59.45143559032946 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.56422326832549 - type: f1 value: 54.96368702901926 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.18022864828512 - type: f1 value: 63.05369805040634 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.30329522528581 - type: f1 value: 64.06084612020727 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.36919973100201 - type: f1 value: 65.12154124788887 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.98117014122394 - type: f1 value: 66.41847559806962 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.53799596503026 - type: f1 value: 62.17067330740817 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.01815736381977 - type: f1 value: 66.24988369607843 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.34700739744452 - type: f1 value: 59.957933424941636 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.23402824478815 - type: f1 value: 57.98836976018471 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.54068594485541 - type: f1 value: 65.43849680666855 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.998655010087425 - type: f1 value: 52.83737515406804 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.71217215870882 - type: f1 value: 55.051794977833026 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.724277067921996 - type: f1 value: 56.33485571838306 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.59515803631473 - type: f1 value: 64.96772366193588 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.860793544048406 - type: f1 value: 58.148845819115394 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.40753194351043 - type: f1 value: 63.18903778054698 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.52320107599194 - type: f1 value: 58.356144563398516 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.17014122394083 - type: f1 value: 63.919964062638925 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.15601882985878 - type: f1 value: 67.01451905761371 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.65030262273034 - type: f1 value: 64.14420425129063 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.08742434431743 - type: f1 value: 63.044060042311756 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.52387357094821 - type: f1 value: 56.82398588814534 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.239408204438476 - type: f1 value: 61.92570286170469 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.74915938130463 - type: f1 value: 62.130740689396276 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.00336247478144 - type: f1 value: 63.71080635228055 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.837928715534645 - type: f1 value: 50.390741680320836 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.42098184263618 - type: f1 value: 71.41355113538995 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.95359784801613 - type: f1 value: 71.42699340156742 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.18157363819772 - type: f1 value: 69.74836113037671 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.08137188971082 - type: f1 value: 76.78000685068261 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.5030262273033 - type: f1 value: 71.71620130425673 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.24546065904505 - type: f1 value: 69.07638311730359 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.12911903160726 - type: f1 value: 68.32651736539815 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.89307330195025 - type: f1 value: 71.33986549860187 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.44451916610626 - type: f1 value: 66.90192664503866 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.16274377942166 - type: f1 value: 68.01090953775066 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.75319435104237 - type: f1 value: 70.18035309201403 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.14391392064559 - type: f1 value: 61.48286540778145 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.70275722932078 - type: f1 value: 70.26164779846495 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.93813046402153 - type: f1 value: 58.8852862116525 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.320107599193 - type: f1 value: 72.19836409602924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.65366509751176 - type: f1 value: 74.55188288799579 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.694014794889036 - type: f1 value: 58.11353311721067 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.37457969065231 - type: f1 value: 52.81306134311697 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 48.3086751849361 - type: f1 value: 45.396449765419376 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.151983860121064 - type: f1 value: 60.31762544281696 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.44788164088769 - type: f1 value: 71.68150151736367 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.81439139206455 - type: f1 value: 62.06735559105593 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.04303967720242 - type: f1 value: 66.68298851670133 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.43913920645595 - type: f1 value: 60.25605977560783 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.90316072629456 - type: f1 value: 65.1325924692381 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.63752521856086 - type: f1 value: 59.14284778039585 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.63080026899797 - type: f1 value: 70.89771864626877 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.10827168796234 - type: f1 value: 71.71954219691159 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.59515803631471 - type: f1 value: 70.05040128099003 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.83389374579691 - type: f1 value: 70.84877936562735 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.18628110289173 - type: f1 value: 68.97232927921841 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.99260255548083 - type: f1 value: 72.85139492157732 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.26227303295225 - type: f1 value: 65.08833655469431 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.48621385339611 - type: f1 value: 64.43483199071298 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.14391392064559 - type: f1 value: 72.2580822579741 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.88567585743107 - type: f1 value: 58.3073765932569 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.38399462004034 - type: f1 value: 60.82139544252606 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.58574310692671 - type: f1 value: 60.71443370385374 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.61398789509079 - type: f1 value: 70.99761812049401 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.73705447209146 - type: f1 value: 61.680849331794796 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.66778749159381 - type: f1 value: 71.17320646080115 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.640215198386 - type: f1 value: 63.301805157015444 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.00672494956288 - type: f1 value: 70.26005548582106 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.42030934767989 - type: f1 value: 75.2074842882598 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.69266980497646 - type: f1 value: 70.94103167391192 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 28.91697191169135 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.434000079573313 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.96683513343383 - type: mrr value: 31.967364078714834 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.5280000000000005 - type: map_at_10 value: 11.793 - type: map_at_100 value: 14.496999999999998 - type: map_at_1000 value: 15.783 - type: map_at_3 value: 8.838 - type: map_at_5 value: 10.07 - type: mrr_at_1 value: 43.653 - type: mrr_at_10 value: 51.531000000000006 - type: mrr_at_100 value: 52.205 - type: mrr_at_1000 value: 52.242999999999995 - type: mrr_at_3 value: 49.431999999999995 - type: mrr_at_5 value: 50.470000000000006 - type: ndcg_at_1 value: 42.415000000000006 - type: ndcg_at_10 value: 32.464999999999996 - type: ndcg_at_100 value: 28.927999999999997 - type: ndcg_at_1000 value: 37.629000000000005 - type: ndcg_at_3 value: 37.845 - type: ndcg_at_5 value: 35.147 - type: precision_at_1 value: 43.653 - type: precision_at_10 value: 23.932000000000002 - type: precision_at_100 value: 7.17 - type: precision_at_1000 value: 1.967 - type: precision_at_3 value: 35.397 - type: precision_at_5 value: 29.907 - type: recall_at_1 value: 5.5280000000000005 - type: recall_at_10 value: 15.568000000000001 - type: recall_at_100 value: 28.54 - type: recall_at_1000 value: 59.864 - type: recall_at_3 value: 9.822000000000001 - type: recall_at_5 value: 11.726 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 37.041000000000004 - type: map_at_10 value: 52.664 - type: map_at_100 value: 53.477 - type: map_at_1000 value: 53.505 - type: map_at_3 value: 48.510999999999996 - type: map_at_5 value: 51.036 - type: mrr_at_1 value: 41.338 - type: mrr_at_10 value: 55.071000000000005 - type: mrr_at_100 value: 55.672 - type: mrr_at_1000 value: 55.689 - type: mrr_at_3 value: 51.82 - type: mrr_at_5 value: 53.852 - type: ndcg_at_1 value: 41.338 - type: ndcg_at_10 value: 60.01800000000001 - type: ndcg_at_100 value: 63.409000000000006 - type: ndcg_at_1000 value: 64.017 - type: ndcg_at_3 value: 52.44799999999999 - type: ndcg_at_5 value: 56.571000000000005 - type: precision_at_1 value: 41.338 - type: precision_at_10 value: 9.531 - type: precision_at_100 value: 1.145 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.416 - type: precision_at_5 value: 16.46 - type: recall_at_1 value: 37.041000000000004 - type: recall_at_10 value: 79.76299999999999 - type: recall_at_100 value: 94.39 - type: recall_at_1000 value: 98.851 - type: recall_at_3 value: 60.465 - type: recall_at_5 value: 69.906 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 69.952 - type: map_at_10 value: 83.758 - type: map_at_100 value: 84.406 - type: map_at_1000 value: 84.425 - type: map_at_3 value: 80.839 - type: map_at_5 value: 82.646 - type: mrr_at_1 value: 80.62 - type: mrr_at_10 value: 86.947 - type: mrr_at_100 value: 87.063 - type: mrr_at_1000 value: 87.064 - type: mrr_at_3 value: 85.96000000000001 - type: mrr_at_5 value: 86.619 - type: ndcg_at_1 value: 80.63 - type: ndcg_at_10 value: 87.64800000000001 - type: ndcg_at_100 value: 88.929 - type: ndcg_at_1000 value: 89.054 - type: ndcg_at_3 value: 84.765 - type: ndcg_at_5 value: 86.291 - type: precision_at_1 value: 80.63 - type: precision_at_10 value: 13.314 - type: precision_at_100 value: 1.525 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.1 - type: precision_at_5 value: 24.372 - type: recall_at_1 value: 69.952 - type: recall_at_10 value: 94.955 - type: recall_at_100 value: 99.38 - type: recall_at_1000 value: 99.96000000000001 - type: recall_at_3 value: 86.60600000000001 - type: recall_at_5 value: 90.997 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 42.41329517878427 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 55.171278362748666 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.213 - type: map_at_10 value: 9.895 - type: map_at_100 value: 11.776 - type: map_at_1000 value: 12.084 - type: map_at_3 value: 7.2669999999999995 - type: map_at_5 value: 8.620999999999999 - type: mrr_at_1 value: 20.8 - type: mrr_at_10 value: 31.112000000000002 - type: mrr_at_100 value: 32.274 - type: mrr_at_1000 value: 32.35 - type: mrr_at_3 value: 28.133000000000003 - type: mrr_at_5 value: 29.892999999999997 - type: ndcg_at_1 value: 20.8 - type: ndcg_at_10 value: 17.163999999999998 - type: ndcg_at_100 value: 24.738 - type: ndcg_at_1000 value: 30.316 - type: ndcg_at_3 value: 16.665 - type: ndcg_at_5 value: 14.478 - type: precision_at_1 value: 20.8 - type: precision_at_10 value: 8.74 - type: precision_at_100 value: 1.963 - type: precision_at_1000 value: 0.33 - type: precision_at_3 value: 15.467 - type: precision_at_5 value: 12.6 - type: recall_at_1 value: 4.213 - type: recall_at_10 value: 17.698 - type: recall_at_100 value: 39.838 - type: recall_at_1000 value: 66.893 - type: recall_at_3 value: 9.418 - type: recall_at_5 value: 12.773000000000001 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.90453315738294 - type: cos_sim_spearman value: 78.51197850080254 - type: euclidean_pearson value: 80.09647123597748 - type: euclidean_spearman value: 78.63548011514061 - type: manhattan_pearson value: 80.10645285675231 - type: manhattan_spearman value: 78.57861806068901 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.2616156846401 - type: cos_sim_spearman value: 76.69713867850156 - type: euclidean_pearson value: 77.97948563800394 - type: euclidean_spearman value: 74.2371211567807 - type: manhattan_pearson value: 77.69697879669705 - type: manhattan_spearman value: 73.86529778022278 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 77.0293269315045 - type: cos_sim_spearman value: 78.02555120584198 - type: euclidean_pearson value: 78.25398100379078 - type: euclidean_spearman value: 78.66963870599464 - type: manhattan_pearson value: 78.14314682167348 - type: manhattan_spearman value: 78.57692322969135 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 79.16989925136942 - type: cos_sim_spearman value: 76.5996225327091 - type: euclidean_pearson value: 77.8319003279786 - type: euclidean_spearman value: 76.42824009468998 - type: manhattan_pearson value: 77.69118862737736 - type: manhattan_spearman value: 76.25568104762812 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.42012286935325 - type: cos_sim_spearman value: 88.15654297884122 - type: euclidean_pearson value: 87.34082819427852 - type: euclidean_spearman value: 88.06333589547084 - type: manhattan_pearson value: 87.25115596784842 - type: manhattan_spearman value: 87.9559927695203 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.88222044996712 - type: cos_sim_spearman value: 84.28476589061077 - type: euclidean_pearson value: 83.17399758058309 - type: euclidean_spearman value: 83.85497357244542 - type: manhattan_pearson value: 83.0308397703786 - type: manhattan_spearman value: 83.71554539935046 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 80.20682986257339 - type: cos_sim_spearman value: 79.94567120362092 - type: euclidean_pearson value: 79.43122480368902 - type: euclidean_spearman value: 79.94802077264987 - type: manhattan_pearson value: 79.32653021527081 - type: manhattan_spearman value: 79.80961146709178 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 74.46578144394383 - type: cos_sim_spearman value: 74.52496637472179 - type: euclidean_pearson value: 72.2903807076809 - type: euclidean_spearman value: 73.55549359771645 - type: manhattan_pearson value: 72.09324837709393 - type: manhattan_spearman value: 73.36743103606581 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 71.37272335116 - type: cos_sim_spearman value: 71.26702117766037 - type: euclidean_pearson value: 67.114829954434 - type: euclidean_spearman value: 66.37938893947761 - type: manhattan_pearson value: 66.79688574095246 - type: manhattan_spearman value: 66.17292828079667 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 80.61016770129092 - type: cos_sim_spearman value: 82.08515426632214 - type: euclidean_pearson value: 80.557340361131 - type: euclidean_spearman value: 80.37585812266175 - type: manhattan_pearson value: 80.6782873404285 - type: manhattan_spearman value: 80.6678073032024 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.00150745350108 - type: cos_sim_spearman value: 87.83441972211425 - type: euclidean_pearson value: 87.94826702308792 - type: euclidean_spearman value: 87.46143974860725 - type: manhattan_pearson value: 87.97560344306105 - type: manhattan_spearman value: 87.5267102829796 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 64.76325252267235 - type: cos_sim_spearman value: 63.32615095463905 - type: euclidean_pearson value: 64.07920669155716 - type: euclidean_spearman value: 61.21409893072176 - type: manhattan_pearson value: 64.26308625680016 - type: manhattan_spearman value: 61.2438185254079 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 75.82644463022595 - type: cos_sim_spearman value: 76.50381269945073 - type: euclidean_pearson value: 75.1328548315934 - type: euclidean_spearman value: 75.63761139408453 - type: manhattan_pearson value: 75.18610101241407 - type: manhattan_spearman value: 75.30669266354164 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.49994164686832 - type: cos_sim_spearman value: 86.73743986245549 - type: euclidean_pearson value: 86.8272894387145 - type: euclidean_spearman value: 85.97608491000507 - type: manhattan_pearson value: 86.74960140396779 - type: manhattan_spearman value: 85.79285984190273 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 79.58172210788469 - type: cos_sim_spearman value: 80.17516468334607 - type: euclidean_pearson value: 77.56537843470504 - type: euclidean_spearman value: 77.57264627395521 - type: manhattan_pearson value: 78.09703521695943 - type: manhattan_spearman value: 78.15942760916954 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 79.7589932931751 - type: cos_sim_spearman value: 80.15210089028162 - type: euclidean_pearson value: 77.54135223516057 - type: euclidean_spearman value: 77.52697996368764 - type: manhattan_pearson value: 77.65734439572518 - type: manhattan_spearman value: 77.77702992016121 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 79.16682365511267 - type: cos_sim_spearman value: 79.25311267628506 - type: euclidean_pearson value: 77.54882036762244 - type: euclidean_spearman value: 77.33212935194827 - type: manhattan_pearson value: 77.98405516064015 - type: manhattan_spearman value: 77.85075717865719 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.10473294775917 - type: cos_sim_spearman value: 61.82780474476838 - type: euclidean_pearson value: 45.885111672377256 - type: euclidean_spearman value: 56.88306351932454 - type: manhattan_pearson value: 46.101218127323186 - type: manhattan_spearman value: 56.80953694186333 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 45.781923079584146 - type: cos_sim_spearman value: 55.95098449691107 - type: euclidean_pearson value: 25.4571031323205 - type: euclidean_spearman value: 49.859978118078935 - type: manhattan_pearson value: 25.624938455041384 - type: manhattan_spearman value: 49.99546185049401 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 60.00618133997907 - type: cos_sim_spearman value: 66.57896677718321 - type: euclidean_pearson value: 42.60118466388821 - type: euclidean_spearman value: 62.8210759715209 - type: manhattan_pearson value: 42.63446860604094 - type: manhattan_spearman value: 62.73803068925271 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 28.460759121626943 - type: cos_sim_spearman value: 34.13459007469131 - type: euclidean_pearson value: 6.0917739325525195 - type: euclidean_spearman value: 27.9947262664867 - type: manhattan_pearson value: 6.16877864169911 - type: manhattan_spearman value: 28.00664163971514 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 57.42546621771696 - type: cos_sim_spearman value: 63.699663168970474 - type: euclidean_pearson value: 38.12085278789738 - type: euclidean_spearman value: 58.12329140741536 - type: manhattan_pearson value: 37.97364549443335 - type: manhattan_spearman value: 57.81545502318733 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 46.82241380954213 - type: cos_sim_spearman value: 57.86569456006391 - type: euclidean_pearson value: 31.80480070178813 - type: euclidean_spearman value: 52.484000620130104 - type: manhattan_pearson value: 31.952708554646097 - type: manhattan_spearman value: 52.8560972356195 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 52.00447170498087 - type: cos_sim_spearman value: 60.664116225735164 - type: euclidean_pearson value: 33.87382555421702 - type: euclidean_spearman value: 55.74649067458667 - type: manhattan_pearson value: 33.99117246759437 - type: manhattan_spearman value: 55.98749034923899 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 58.06497233105448 - type: cos_sim_spearman value: 65.62968801135676 - type: euclidean_pearson value: 47.482076613243905 - type: euclidean_spearman value: 62.65137791498299 - type: manhattan_pearson value: 47.57052626104093 - type: manhattan_spearman value: 62.436916516613294 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 70.49397298562575 - type: cos_sim_spearman value: 74.79604041187868 - type: euclidean_pearson value: 49.661891561317795 - type: euclidean_spearman value: 70.31535537621006 - type: manhattan_pearson value: 49.553715741850006 - type: manhattan_spearman value: 70.24779344636806 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 55.640574515348696 - type: cos_sim_spearman value: 54.927959317689 - type: euclidean_pearson value: 29.00139666967476 - type: euclidean_spearman value: 41.86386566971605 - type: manhattan_pearson value: 29.47411067730344 - type: manhattan_spearman value: 42.337438424952786 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 68.14095292259312 - type: cos_sim_spearman value: 73.99017581234789 - type: euclidean_pearson value: 46.46304297872084 - type: euclidean_spearman value: 60.91834114800041 - type: manhattan_pearson value: 47.07072666338692 - type: manhattan_spearman value: 61.70415727977926 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 73.27184653359575 - type: cos_sim_spearman value: 77.76070252418626 - type: euclidean_pearson value: 62.30586577544778 - type: euclidean_spearman value: 75.14246629110978 - type: manhattan_pearson value: 62.328196884927046 - type: manhattan_spearman value: 75.1282792981433 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 71.59448528829957 - type: cos_sim_spearman value: 70.37277734222123 - type: euclidean_pearson value: 57.63145565721123 - type: euclidean_spearman value: 66.10113048304427 - type: manhattan_pearson value: 57.18897811586808 - type: manhattan_spearman value: 66.5595511215901 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 66.37520607720838 - type: cos_sim_spearman value: 69.92282148997948 - type: euclidean_pearson value: 40.55768770125291 - type: euclidean_spearman value: 55.189128944669605 - type: manhattan_pearson value: 41.03566433468883 - type: manhattan_spearman value: 55.61251893174558 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 57.791929533771835 - type: cos_sim_spearman value: 66.45819707662093 - type: euclidean_pearson value: 39.03686018511092 - type: euclidean_spearman value: 56.01282695640428 - type: manhattan_pearson value: 38.91586623619632 - type: manhattan_spearman value: 56.69394943612747 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 47.82224468473866 - type: cos_sim_spearman value: 59.467307194781164 - type: euclidean_pearson value: 27.428459190256145 - type: euclidean_spearman value: 60.83463107397519 - type: manhattan_pearson value: 27.487391578496638 - type: manhattan_spearman value: 61.281380460246496 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 16.306666792752644 - type: cos_sim_spearman value: 39.35486427252405 - type: euclidean_pearson value: -2.7887154897955435 - type: euclidean_spearman value: 27.1296051831719 - type: manhattan_pearson value: -3.202291270581297 - type: manhattan_spearman value: 26.32895849218158 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.67006803805076 - type: cos_sim_spearman value: 73.24670207647144 - type: euclidean_pearson value: 46.91884681500483 - type: euclidean_spearman value: 16.903085094570333 - type: manhattan_pearson value: 46.88391675325812 - type: manhattan_spearman value: 28.17180849095055 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 83.79555591223837 - type: cos_sim_spearman value: 85.63658602085185 - type: euclidean_pearson value: 85.22080894037671 - type: euclidean_spearman value: 85.54113580167038 - type: manhattan_pearson value: 85.1639505960118 - type: manhattan_spearman value: 85.43502665436196 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 80.73900991689766 - type: mrr value: 94.81624131133934 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 55.678000000000004 - type: map_at_10 value: 65.135 - type: map_at_100 value: 65.824 - type: map_at_1000 value: 65.852 - type: map_at_3 value: 62.736000000000004 - type: map_at_5 value: 64.411 - type: mrr_at_1 value: 58.333 - type: mrr_at_10 value: 66.5 - type: mrr_at_100 value: 67.053 - type: mrr_at_1000 value: 67.08 - type: mrr_at_3 value: 64.944 - type: mrr_at_5 value: 65.89399999999999 - type: ndcg_at_1 value: 58.333 - type: ndcg_at_10 value: 69.34700000000001 - type: ndcg_at_100 value: 72.32 - type: ndcg_at_1000 value: 73.014 - type: ndcg_at_3 value: 65.578 - type: ndcg_at_5 value: 67.738 - type: precision_at_1 value: 58.333 - type: precision_at_10 value: 9.033 - type: precision_at_100 value: 1.0670000000000002 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 25.444 - type: precision_at_5 value: 16.933 - type: recall_at_1 value: 55.678000000000004 - type: recall_at_10 value: 80.72200000000001 - type: recall_at_100 value: 93.93299999999999 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 70.783 - type: recall_at_5 value: 75.978 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.74653465346535 - type: cos_sim_ap value: 93.01476369929063 - type: cos_sim_f1 value: 86.93009118541033 - type: cos_sim_precision value: 88.09034907597535 - type: cos_sim_recall value: 85.8 - type: dot_accuracy value: 99.22970297029703 - type: dot_ap value: 51.58725659485144 - type: dot_f1 value: 53.51351351351352 - type: dot_precision value: 58.235294117647065 - type: dot_recall value: 49.5 - type: euclidean_accuracy value: 99.74356435643564 - type: euclidean_ap value: 92.40332894384368 - type: euclidean_f1 value: 86.97838109602817 - type: euclidean_precision value: 87.46208291203236 - type: euclidean_recall value: 86.5 - type: manhattan_accuracy value: 99.73069306930694 - type: manhattan_ap value: 92.01320815721121 - type: manhattan_f1 value: 86.4135864135864 - type: manhattan_precision value: 86.32734530938124 - type: manhattan_recall value: 86.5 - type: max_accuracy value: 99.74653465346535 - type: max_ap value: 93.01476369929063 - type: max_f1 value: 86.97838109602817 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 55.2660514302523 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 30.4637783572547 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.41377758357637 - type: mrr value: 50.138451213818854 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 28.887846011166594 - type: cos_sim_spearman value: 30.10823258355903 - type: dot_pearson value: 12.888049550236385 - type: dot_spearman value: 12.827495903098123 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.21 - type: map_at_10 value: 1.667 - type: map_at_100 value: 9.15 - type: map_at_1000 value: 22.927 - type: map_at_3 value: 0.573 - type: map_at_5 value: 0.915 - type: mrr_at_1 value: 80 - type: mrr_at_10 value: 87.167 - type: mrr_at_100 value: 87.167 - type: mrr_at_1000 value: 87.167 - type: mrr_at_3 value: 85.667 - type: mrr_at_5 value: 87.167 - type: ndcg_at_1 value: 76 - type: ndcg_at_10 value: 69.757 - type: ndcg_at_100 value: 52.402 - type: ndcg_at_1000 value: 47.737 - type: ndcg_at_3 value: 71.866 - type: ndcg_at_5 value: 72.225 - type: precision_at_1 value: 80 - type: precision_at_10 value: 75 - type: precision_at_100 value: 53.959999999999994 - type: precision_at_1000 value: 21.568 - type: precision_at_3 value: 76.667 - type: precision_at_5 value: 78 - type: recall_at_1 value: 0.21 - type: recall_at_10 value: 1.9189999999999998 - type: recall_at_100 value: 12.589 - type: recall_at_1000 value: 45.312000000000005 - type: recall_at_3 value: 0.61 - type: recall_at_5 value: 1.019 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.10000000000001 - type: f1 value: 90.06 - type: precision value: 89.17333333333333 - type: recall value: 92.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.06936416184971 - type: f1 value: 50.87508028259473 - type: precision value: 48.97398843930635 - type: recall value: 56.06936416184971 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 57.3170731707317 - type: f1 value: 52.96080139372822 - type: precision value: 51.67861124382864 - type: recall value: 57.3170731707317 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.67333333333333 - type: precision value: 91.90833333333333 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.7 - type: f1 value: 97.07333333333332 - type: precision value: 96.79500000000002 - type: recall value: 97.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.69999999999999 - type: f1 value: 93.2 - type: precision value: 92.48333333333333 - type: recall value: 94.69999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.9 - type: f1 value: 91.26666666666667 - type: precision value: 90.59444444444445 - type: recall value: 92.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 34.32835820895522 - type: f1 value: 29.074180380150533 - type: precision value: 28.068207322920596 - type: recall value: 34.32835820895522 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.5 - type: f1 value: 74.3945115995116 - type: precision value: 72.82967843459222 - type: recall value: 78.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.34146341463415 - type: f1 value: 61.2469400518181 - type: precision value: 59.63977756660683 - type: recall value: 66.34146341463415 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.9 - type: f1 value: 76.90349206349207 - type: precision value: 75.32921568627451 - type: recall value: 80.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.93317132442284 - type: f1 value: 81.92519105034295 - type: precision value: 80.71283920615635 - type: recall value: 84.93317132442284 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.1304347826087 - type: f1 value: 65.22394755003451 - type: precision value: 62.912422360248435 - type: recall value: 71.1304347826087 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.82608695652173 - type: f1 value: 75.55693581780538 - type: precision value: 73.79420289855072 - type: recall value: 79.82608695652173 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74 - type: f1 value: 70.51022222222223 - type: precision value: 69.29673599347512 - type: recall value: 74 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.7 - type: f1 value: 74.14238095238095 - type: precision value: 72.27214285714285 - type: recall value: 78.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.97466827503016 - type: f1 value: 43.080330405420874 - type: precision value: 41.36505499593557 - type: recall value: 48.97466827503016 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.60000000000001 - type: f1 value: 86.62333333333333 - type: precision value: 85.225 - type: recall value: 89.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 45.2 - type: f1 value: 39.5761253006253 - type: precision value: 37.991358436312 - type: recall value: 45.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.5 - type: f1 value: 86.70333333333333 - type: precision value: 85.53166666666667 - type: recall value: 89.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 50.095238095238095 - type: f1 value: 44.60650460650461 - type: precision value: 42.774116796477045 - type: recall value: 50.095238095238095 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.4 - type: f1 value: 58.35967261904762 - type: precision value: 56.54857142857143 - type: recall value: 63.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.2 - type: f1 value: 87.075 - type: precision value: 86.12095238095239 - type: recall value: 89.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.8 - type: f1 value: 95.90333333333334 - type: precision value: 95.50833333333333 - type: recall value: 96.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.9 - type: f1 value: 88.6288888888889 - type: precision value: 87.61607142857142 - type: recall value: 90.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.2 - type: f1 value: 60.54377630539395 - type: precision value: 58.89434482711381 - type: recall value: 65.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87 - type: f1 value: 84.32412698412699 - type: precision value: 83.25527777777778 - type: recall value: 87 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.7 - type: f1 value: 63.07883541295306 - type: precision value: 61.06117424242426 - type: recall value: 68.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.78333333333335 - type: precision value: 90.86666666666667 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.7 - type: f1 value: 96.96666666666667 - type: precision value: 96.61666666666667 - type: recall value: 97.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.27493261455525 - type: f1 value: 85.90745732255168 - type: precision value: 84.91389637616052 - type: recall value: 88.27493261455525 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.5982905982906 - type: f1 value: 88.4900284900285 - type: precision value: 87.57122507122507 - type: recall value: 90.5982905982906 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.5 - type: f1 value: 86.90769841269842 - type: precision value: 85.80178571428571 - type: recall value: 89.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.5 - type: f1 value: 78.36796536796538 - type: precision value: 76.82196969696969 - type: recall value: 82.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.48846960167715 - type: f1 value: 66.78771089148448 - type: precision value: 64.98302885095339 - type: recall value: 71.48846960167715 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.50333333333333 - type: precision value: 91.77499999999999 - type: recall value: 94.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.20622568093385 - type: f1 value: 66.83278891450098 - type: precision value: 65.35065777283677 - type: recall value: 71.20622568093385 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.717948717948715 - type: f1 value: 43.53146853146853 - type: precision value: 42.04721204721204 - type: recall value: 48.717948717948715 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 58.5 - type: f1 value: 53.8564991863928 - type: precision value: 52.40329436122275 - type: recall value: 58.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.8 - type: f1 value: 88.29 - type: precision value: 87.09166666666667 - type: recall value: 90.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.28971962616822 - type: f1 value: 62.63425307817832 - type: precision value: 60.98065939771546 - type: recall value: 67.28971962616822 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.7 - type: f1 value: 75.5264472455649 - type: precision value: 74.38205086580086 - type: recall value: 78.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.7 - type: f1 value: 86.10809523809525 - type: precision value: 85.07602564102565 - type: recall value: 88.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 56.99999999999999 - type: f1 value: 52.85487521402737 - type: precision value: 51.53985162713104 - type: recall value: 56.99999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94 - type: f1 value: 92.45333333333333 - type: precision value: 91.79166666666667 - type: recall value: 94 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.30000000000001 - type: f1 value: 90.61333333333333 - type: precision value: 89.83333333333331 - type: recall value: 92.30000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.69999999999999 - type: f1 value: 93.34555555555555 - type: precision value: 92.75416666666668 - type: recall value: 94.69999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.2 - type: f1 value: 76.6563035113035 - type: precision value: 75.3014652014652 - type: recall value: 80.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.7 - type: f1 value: 82.78689263765207 - type: precision value: 82.06705086580087 - type: recall value: 84.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 50.33333333333333 - type: f1 value: 45.461523661523664 - type: precision value: 43.93545574795575 - type: recall value: 50.33333333333333 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.6000000000000005 - type: f1 value: 5.442121400446441 - type: precision value: 5.146630385487529 - type: recall value: 6.6000000000000005 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85 - type: f1 value: 81.04666666666667 - type: precision value: 79.25 - type: recall value: 85 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.32142857142857 - type: f1 value: 42.333333333333336 - type: precision value: 40.69196428571429 - type: recall value: 47.32142857142857 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 30.735455543358945 - type: f1 value: 26.73616790022338 - type: precision value: 25.397823220451283 - type: recall value: 30.735455543358945 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 25.1 - type: f1 value: 21.975989896371022 - type: precision value: 21.059885632257203 - type: recall value: 25.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.3 - type: f1 value: 92.75666666666666 - type: precision value: 92.06166666666665 - type: recall value: 94.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.1 - type: f1 value: 92.74 - type: precision value: 92.09166666666667 - type: recall value: 94.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 71.3 - type: f1 value: 66.922442002442 - type: precision value: 65.38249567099568 - type: recall value: 71.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 40.300000000000004 - type: f1 value: 35.78682789299971 - type: precision value: 34.66425128716588 - type: recall value: 40.300000000000004 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96 - type: f1 value: 94.82333333333334 - type: precision value: 94.27833333333334 - type: recall value: 96 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 51.1 - type: f1 value: 47.179074753133584 - type: precision value: 46.06461044702424 - type: recall value: 51.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.7 - type: f1 value: 84.71 - type: precision value: 83.46166666666667 - type: recall value: 87.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.8 - type: f1 value: 94.68333333333334 - type: precision value: 94.13333333333334 - type: recall value: 95.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.39999999999999 - type: f1 value: 82.5577380952381 - type: precision value: 81.36833333333334 - type: recall value: 85.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21.16788321167883 - type: f1 value: 16.948865627297987 - type: precision value: 15.971932568647897 - type: recall value: 21.16788321167883 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.9 - type: f1 value: 5.515526831658907 - type: precision value: 5.141966366966367 - type: recall value: 6.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.2 - type: f1 value: 91.39666666666668 - type: precision value: 90.58666666666667 - type: recall value: 93.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.2 - type: f1 value: 89.95666666666666 - type: precision value: 88.92833333333333 - type: recall value: 92.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.76190476190477 - type: f1 value: 74.93386243386244 - type: precision value: 73.11011904761904 - type: recall value: 79.76190476190477 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.799999999999999 - type: f1 value: 6.921439712248537 - type: precision value: 6.489885109680683 - type: recall value: 8.799999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 45.75569358178054 - type: f1 value: 40.34699501312631 - type: precision value: 38.57886764719063 - type: recall value: 45.75569358178054 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.4 - type: f1 value: 89.08333333333333 - type: precision value: 88.01666666666668 - type: recall value: 91.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.60000000000001 - type: f1 value: 92.06690476190477 - type: precision value: 91.45095238095239 - type: recall value: 93.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.5 - type: f1 value: 6.200363129378736 - type: precision value: 5.89115314822466 - type: recall value: 7.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.59307359307358 - type: f1 value: 68.38933553219267 - type: precision value: 66.62698412698413 - type: recall value: 73.59307359307358 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.8473282442748 - type: f1 value: 64.72373682297346 - type: precision value: 62.82834214131924 - type: recall value: 69.8473282442748 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.5254730713246 - type: f1 value: 96.72489082969432 - type: precision value: 96.33672974284326 - type: recall value: 97.5254730713246 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.6 - type: f1 value: 72.42746031746033 - type: precision value: 71.14036630036631 - type: recall value: 75.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.24293785310734 - type: f1 value: 88.86064030131826 - type: precision value: 87.73540489642184 - type: recall value: 91.24293785310734 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.2 - type: f1 value: 4.383083659794954 - type: precision value: 4.027861324289673 - type: recall value: 6.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.8 - type: f1 value: 84.09428571428572 - type: precision value: 83.00333333333333 - type: recall value: 86.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 60.699999999999996 - type: f1 value: 56.1584972394755 - type: precision value: 54.713456330903135 - type: recall value: 60.699999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.2 - type: f1 value: 80.66190476190475 - type: precision value: 79.19690476190476 - type: recall value: 84.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.2 - type: f1 value: 91.33 - type: precision value: 90.45 - type: recall value: 93.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.3 - type: f1 value: 5.126828976748276 - type: precision value: 4.853614328966668 - type: recall value: 6.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.76943699731903 - type: f1 value: 77.82873739308057 - type: precision value: 76.27622452019234 - type: recall value: 81.76943699731903 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.30000000000001 - type: f1 value: 90.29666666666665 - type: precision value: 89.40333333333334 - type: recall value: 92.30000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 29.249011857707508 - type: f1 value: 24.561866096392947 - type: precision value: 23.356583740215456 - type: recall value: 29.249011857707508 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.46478873239437 - type: f1 value: 73.23943661971832 - type: precision value: 71.66666666666667 - type: recall value: 77.46478873239437 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 20.35928143712575 - type: f1 value: 15.997867865075824 - type: precision value: 14.882104658301346 - type: recall value: 20.35928143712575 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.2 - type: f1 value: 90.25999999999999 - type: precision value: 89.45333333333335 - type: recall value: 92.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 23.15270935960591 - type: f1 value: 19.65673625772148 - type: precision value: 18.793705293464992 - type: recall value: 23.15270935960591 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 59.154929577464785 - type: f1 value: 52.3868463305083 - type: precision value: 50.14938113529662 - type: recall value: 59.154929577464785 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.51282051282051 - type: f1 value: 66.8089133089133 - type: precision value: 65.37645687645687 - type: recall value: 70.51282051282051 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.6 - type: f1 value: 93 - type: precision value: 92.23333333333333 - type: recall value: 94.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 38.62212943632568 - type: f1 value: 34.3278276962583 - type: precision value: 33.07646935732408 - type: recall value: 38.62212943632568 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 28.1 - type: f1 value: 23.579609223054604 - type: precision value: 22.39622774921555 - type: recall value: 28.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.27361563517914 - type: f1 value: 85.12486427795874 - type: precision value: 83.71335504885994 - type: recall value: 88.27361563517914 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.6 - type: f1 value: 86.39928571428571 - type: precision value: 85.4947557997558 - type: recall value: 88.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.5 - type: f1 value: 83.77952380952381 - type: precision value: 82.67602564102565 - type: recall value: 86.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.52755905511812 - type: f1 value: 75.3055868016498 - type: precision value: 73.81889763779527 - type: recall value: 79.52755905511812 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.9 - type: f1 value: 73.76261904761905 - type: precision value: 72.11670995670995 - type: recall value: 77.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 53.8781163434903 - type: f1 value: 47.25804051288816 - type: precision value: 45.0603482390186 - type: recall value: 53.8781163434903 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.10000000000001 - type: f1 value: 88.88 - type: precision value: 87.96333333333334 - type: recall value: 91.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 38.46153846153847 - type: f1 value: 34.43978243978244 - type: precision value: 33.429487179487175 - type: recall value: 38.46153846153847 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.9 - type: f1 value: 86.19888888888887 - type: precision value: 85.07440476190476 - type: recall value: 88.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.9 - type: f1 value: 82.58857142857143 - type: precision value: 81.15666666666667 - type: recall value: 85.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.8 - type: f1 value: 83.36999999999999 - type: precision value: 81.86833333333333 - type: recall value: 86.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.51415094339622 - type: f1 value: 63.195000099481234 - type: precision value: 61.394033442972116 - type: recall value: 68.51415094339622 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.5 - type: f1 value: 86.14603174603175 - type: precision value: 85.1162037037037 - type: recall value: 88.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.62043795620438 - type: f1 value: 94.40389294403892 - type: precision value: 93.7956204379562 - type: recall value: 95.62043795620438 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.8 - type: f1 value: 78.6532178932179 - type: precision value: 77.46348795840176 - type: recall value: 81.8 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.603 - type: map_at_10 value: 8.5 - type: map_at_100 value: 12.985 - type: map_at_1000 value: 14.466999999999999 - type: map_at_3 value: 4.859999999999999 - type: map_at_5 value: 5.817 - type: mrr_at_1 value: 28.571 - type: mrr_at_10 value: 42.331 - type: mrr_at_100 value: 43.592999999999996 - type: mrr_at_1000 value: 43.592999999999996 - type: mrr_at_3 value: 38.435 - type: mrr_at_5 value: 39.966 - type: ndcg_at_1 value: 26.531 - type: ndcg_at_10 value: 21.353 - type: ndcg_at_100 value: 31.087999999999997 - type: ndcg_at_1000 value: 43.163000000000004 - type: ndcg_at_3 value: 22.999 - type: ndcg_at_5 value: 21.451 - type: precision_at_1 value: 28.571 - type: precision_at_10 value: 19.387999999999998 - type: precision_at_100 value: 6.265 - type: precision_at_1000 value: 1.4160000000000001 - type: precision_at_3 value: 24.490000000000002 - type: precision_at_5 value: 21.224 - type: recall_at_1 value: 2.603 - type: recall_at_10 value: 14.474 - type: recall_at_100 value: 40.287 - type: recall_at_1000 value: 76.606 - type: recall_at_3 value: 5.978 - type: recall_at_5 value: 7.819 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 69.7848 - type: ap value: 13.661023167088224 - type: f1 value: 53.61686134460943 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.28183361629882 - type: f1 value: 61.55481034919965 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 35.972128420092396 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.59933241938367 - type: cos_sim_ap value: 72.20760361208136 - type: cos_sim_f1 value: 66.4447731755424 - type: cos_sim_precision value: 62.35539102267469 - type: cos_sim_recall value: 71.10817941952506 - type: dot_accuracy value: 78.98313166835548 - type: dot_ap value: 44.492521645493795 - type: dot_f1 value: 45.814889336016094 - type: dot_precision value: 37.02439024390244 - type: dot_recall value: 60.07915567282321 - type: euclidean_accuracy value: 85.3907134767837 - type: euclidean_ap value: 71.53847289080343 - type: euclidean_f1 value: 65.95952206778834 - type: euclidean_precision value: 61.31006346328196 - type: euclidean_recall value: 71.37203166226914 - type: manhattan_accuracy value: 85.40859510043511 - type: manhattan_ap value: 71.49664104395515 - type: manhattan_f1 value: 65.98569969356485 - type: manhattan_precision value: 63.928748144482924 - type: manhattan_recall value: 68.17941952506597 - type: max_accuracy value: 85.59933241938367 - type: max_ap value: 72.20760361208136 - type: max_f1 value: 66.4447731755424 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.83261536073273 - type: cos_sim_ap value: 85.48178133644264 - type: cos_sim_f1 value: 77.87816307403935 - type: cos_sim_precision value: 75.88953021114926 - type: cos_sim_recall value: 79.97382198952879 - type: dot_accuracy value: 79.76287499514883 - type: dot_ap value: 59.17438838475084 - type: dot_f1 value: 56.34566667855996 - type: dot_precision value: 52.50349092359864 - type: dot_recall value: 60.794579611949494 - type: euclidean_accuracy value: 88.76857996662397 - type: euclidean_ap value: 85.22764834359887 - type: euclidean_f1 value: 77.65379751543554 - type: euclidean_precision value: 75.11152683839401 - type: euclidean_recall value: 80.37419156144134 - type: manhattan_accuracy value: 88.6987231730508 - type: manhattan_ap value: 85.18907981724007 - type: manhattan_f1 value: 77.51967028849757 - type: manhattan_precision value: 75.49992701795358 - type: manhattan_recall value: 79.65044656606098 - type: max_accuracy value: 88.83261536073273 - type: max_ap value: 85.48178133644264 - type: max_f1 value: 77.87816307403935 --- ## Multilingual-E5-base [Multilingual E5 Text Embeddings: A Technical Report](https://arxiv.org/pdf/2402.05672). Liang Wang, Nan Yang, Xiaolong Huang, Linjun Yang, Rangan Majumder, Furu Wei, arXiv 2024 This model has 12 layers and the embedding size is 768. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ", even for non-English texts. # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右,放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅"] tokenizer = AutoTokenizer.from_pretrained('intfloat/multilingual-e5-base') model = AutoModel.from_pretrained('intfloat/multilingual-e5-base') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Supported Languages This model is initialized from [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) and continually trained on a mixture of multilingual datasets. It supports 100 languages from xlm-roberta, but low-resource languages may see performance degradation. ## Training Details **Initialization**: [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) **First stage**: contrastive pre-training with weak supervision | Dataset | Weak supervision | # of text pairs | |--------------------------------------------------------------------------------------------------------|---------------------------------------|-----------------| | Filtered [mC4](https://huggingface.co/datasets/mc4) | (title, page content) | 1B | | [CC News](https://huggingface.co/datasets/intfloat/multilingual_cc_news) | (title, news content) | 400M | | [NLLB](https://huggingface.co/datasets/allenai/nllb) | translation pairs | 2.4B | | [Wikipedia](https://huggingface.co/datasets/intfloat/wikipedia) | (hierarchical section title, passage) | 150M | | Filtered [Reddit](https://www.reddit.com/) | (comment, response) | 800M | | [S2ORC](https://github.com/allenai/s2orc) | (title, abstract) and citation pairs | 100M | | [Stackexchange](https://stackexchange.com/) | (question, answer) | 50M | | [xP3](https://huggingface.co/datasets/bigscience/xP3) | (input prompt, response) | 80M | | [Miscellaneous unsupervised SBERT data](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | - | 10M | **Second stage**: supervised fine-tuning | Dataset | Language | # of text pairs | |----------------------------------------------------------------------------------------|--------------|-----------------| | [MS MARCO](https://microsoft.github.io/msmarco/) | English | 500k | | [NQ](https://github.com/facebookresearch/DPR) | English | 70k | | [Trivia QA](https://github.com/facebookresearch/DPR) | English | 60k | | [NLI from SimCSE](https://github.com/princeton-nlp/SimCSE) | English | <300k | | [ELI5](https://huggingface.co/datasets/eli5) | English | 500k | | [DuReader Retrieval](https://github.com/baidu/DuReader/tree/master/DuReader-Retrieval) | Chinese | 86k | | [KILT Fever](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [KILT HotpotQA](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [SQuAD](https://huggingface.co/datasets/squad) | English | 87k | | [Quora](https://huggingface.co/datasets/quora) | English | 150k | | [Mr. TyDi](https://huggingface.co/datasets/castorini/mr-tydi) | 11 languages | 50k | | [MIRACL](https://huggingface.co/datasets/miracl/miracl) | 16 languages | 40k | For all labeled datasets, we only use its training set for fine-tuning. For other training details, please refer to our paper at [https://arxiv.org/pdf/2402.05672](https://arxiv.org/pdf/2402.05672). ## Benchmark Results on [Mr. TyDi](https://arxiv.org/abs/2108.08787) | Model | Avg MRR@10 | | ar | bn | en | fi | id | ja | ko | ru | sw | te | th | |-----------------------|------------|-------|------| --- | --- | --- | --- | --- | --- | --- |------| --- | --- | | BM25 | 33.3 | | 36.7 | 41.3 | 15.1 | 28.8 | 38.2 | 21.7 | 28.1 | 32.9 | 39.6 | 42.4 | 41.7 | | mDPR | 16.7 | | 26.0 | 25.8 | 16.2 | 11.3 | 14.6 | 18.1 | 21.9 | 18.5 | 7.3 | 10.6 | 13.5 | | BM25 + mDPR | 41.7 | | 49.1 | 53.5 | 28.4 | 36.5 | 45.5 | 35.5 | 36.2 | 42.7 | 40.5 | 42.0 | 49.2 | | | | | multilingual-e5-small | 64.4 | | 71.5 | 66.3 | 54.5 | 57.7 | 63.2 | 55.4 | 54.3 | 60.8 | 65.4 | 89.1 | 70.1 | | multilingual-e5-base | 65.9 | | 72.3 | 65.0 | 58.5 | 60.8 | 64.9 | 56.6 | 55.8 | 62.7 | 69.0 | 86.6 | 72.7 | | multilingual-e5-large | **70.5** | | 77.5 | 73.2 | 60.8 | 66.8 | 68.5 | 62.5 | 61.6 | 65.8 | 72.7 | 90.2 | 76.2 | ## MTEB Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/multilingual-e5-base') input_texts = [ 'query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 i s 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or traini ng for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮 ,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右, 放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油 锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅" ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, bitext mining, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2024multilingual, title={Multilingual E5 Text Embeddings: A Technical Report}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Yang, Linjun and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2402.05672}, year={2024} } ``` ## Limitations Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
avsolatorio/GIST-Embedding-v0
avsolatorio
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "bert", "feature-extraction", "mteb", "sentence-similarity", "en", "arxiv:2402.16829", "arxiv:2212.09741", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-01-31T16:41:20Z"
2024-02-28T00:31:27+00:00
548,257
25
--- language: - en library_name: sentence-transformers license: mit pipeline_tag: sentence-similarity tags: - feature-extraction - mteb - sentence-similarity - sentence-transformers model-index: - name: GIST-Embedding-v0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.95522388059702 - type: ap value: 38.940434354439276 - type: f1 value: 69.88686275888114 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.51357499999999 - type: ap value: 90.30414241486682 - type: f1 value: 93.50552829047328 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 50.446000000000005 - type: f1 value: 49.76432659699279 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 38.265 - type: map_at_10 value: 54.236 - type: map_at_100 value: 54.81399999999999 - type: map_at_1000 value: 54.81700000000001 - type: map_at_3 value: 49.881 - type: map_at_5 value: 52.431000000000004 - type: mrr_at_1 value: 38.265 - type: mrr_at_10 value: 54.152 - type: mrr_at_100 value: 54.730000000000004 - type: mrr_at_1000 value: 54.733 - type: mrr_at_3 value: 49.644 - type: mrr_at_5 value: 52.32599999999999 - type: ndcg_at_1 value: 38.265 - type: ndcg_at_10 value: 62.62 - type: ndcg_at_100 value: 64.96600000000001 - type: ndcg_at_1000 value: 65.035 - type: ndcg_at_3 value: 53.691 - type: ndcg_at_5 value: 58.303000000000004 - type: precision_at_1 value: 38.265 - type: precision_at_10 value: 8.919 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 21.573999999999998 - type: precision_at_5 value: 15.192 - type: recall_at_1 value: 38.265 - type: recall_at_10 value: 89.189 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 64.723 - type: recall_at_5 value: 75.96000000000001 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.287087887491744 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.74244928943812 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.68814324295771 - type: mrr value: 75.46266983247591 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 90.45240209600391 - type: cos_sim_spearman value: 87.95079919934645 - type: euclidean_pearson value: 88.93438602492702 - type: euclidean_spearman value: 88.28152962682988 - type: manhattan_pearson value: 88.92193964325268 - type: manhattan_spearman value: 88.21466063329498 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 15.605427974947808 - type: f1 value: 14.989877233698866 - type: precision value: 14.77906814441261 - type: recall value: 15.605427974947808 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 33.38102575390711 - type: f1 value: 32.41704114719127 - type: precision value: 32.057363829835964 - type: recall value: 33.38102575390711 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 0.1939729823346034 - type: f1 value: 0.17832215223820772 - type: precision value: 0.17639155671715423 - type: recall value: 0.1939729823346034 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 3.0542390731964195 - type: f1 value: 2.762857644374232 - type: precision value: 2.6505178163945935 - type: recall value: 3.0542390731964195 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.29545454545453 - type: f1 value: 87.26415991342238 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.035319537839484 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.667313307057285 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 33.979 - type: map_at_10 value: 46.275 - type: map_at_100 value: 47.975 - type: map_at_1000 value: 48.089 - type: map_at_3 value: 42.507 - type: map_at_5 value: 44.504 - type: mrr_at_1 value: 42.346000000000004 - type: mrr_at_10 value: 53.013 - type: mrr_at_100 value: 53.717000000000006 - type: mrr_at_1000 value: 53.749 - type: mrr_at_3 value: 50.405 - type: mrr_at_5 value: 51.915 - type: ndcg_at_1 value: 42.346000000000004 - type: ndcg_at_10 value: 53.179 - type: ndcg_at_100 value: 58.458 - type: ndcg_at_1000 value: 60.057 - type: ndcg_at_3 value: 48.076 - type: ndcg_at_5 value: 50.283 - type: precision_at_1 value: 42.346000000000004 - type: precision_at_10 value: 10.386 - type: precision_at_100 value: 1.635 - type: precision_at_1000 value: 0.20600000000000002 - type: precision_at_3 value: 23.413999999999998 - type: precision_at_5 value: 16.624 - type: recall_at_1 value: 33.979 - type: recall_at_10 value: 65.553 - type: recall_at_100 value: 87.18599999999999 - type: recall_at_1000 value: 97.25200000000001 - type: recall_at_3 value: 50.068999999999996 - type: recall_at_5 value: 56.882 - type: map_at_1 value: 31.529 - type: map_at_10 value: 42.219 - type: map_at_100 value: 43.408 - type: map_at_1000 value: 43.544 - type: map_at_3 value: 39.178000000000004 - type: map_at_5 value: 40.87 - type: mrr_at_1 value: 39.873 - type: mrr_at_10 value: 48.25 - type: mrr_at_100 value: 48.867 - type: mrr_at_1000 value: 48.908 - type: mrr_at_3 value: 46.03 - type: mrr_at_5 value: 47.355000000000004 - type: ndcg_at_1 value: 39.873 - type: ndcg_at_10 value: 47.933 - type: ndcg_at_100 value: 52.156000000000006 - type: ndcg_at_1000 value: 54.238 - type: ndcg_at_3 value: 43.791999999999994 - type: ndcg_at_5 value: 45.678999999999995 - type: precision_at_1 value: 39.873 - type: precision_at_10 value: 9.032 - type: precision_at_100 value: 1.419 - type: precision_at_1000 value: 0.192 - type: precision_at_3 value: 21.231 - type: precision_at_5 value: 14.981 - type: recall_at_1 value: 31.529 - type: recall_at_10 value: 57.925000000000004 - type: recall_at_100 value: 75.89 - type: recall_at_1000 value: 89.007 - type: recall_at_3 value: 45.363 - type: recall_at_5 value: 50.973 - type: map_at_1 value: 41.289 - type: map_at_10 value: 54.494 - type: map_at_100 value: 55.494 - type: map_at_1000 value: 55.545 - type: map_at_3 value: 51.20099999999999 - type: map_at_5 value: 53.147 - type: mrr_at_1 value: 47.335 - type: mrr_at_10 value: 57.772 - type: mrr_at_100 value: 58.428000000000004 - type: mrr_at_1000 value: 58.453 - type: mrr_at_3 value: 55.434000000000005 - type: mrr_at_5 value: 56.8 - type: ndcg_at_1 value: 47.335 - type: ndcg_at_10 value: 60.382999999999996 - type: ndcg_at_100 value: 64.294 - type: ndcg_at_1000 value: 65.211 - type: ndcg_at_3 value: 55.098 - type: ndcg_at_5 value: 57.776 - type: precision_at_1 value: 47.335 - type: precision_at_10 value: 9.724 - type: precision_at_100 value: 1.26 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 24.786 - type: precision_at_5 value: 16.977999999999998 - type: recall_at_1 value: 41.289 - type: recall_at_10 value: 74.36399999999999 - type: recall_at_100 value: 91.19800000000001 - type: recall_at_1000 value: 97.508 - type: recall_at_3 value: 60.285 - type: recall_at_5 value: 66.814 - type: map_at_1 value: 28.816999999999997 - type: map_at_10 value: 37.856 - type: map_at_100 value: 38.824 - type: map_at_1000 value: 38.902 - type: map_at_3 value: 34.982 - type: map_at_5 value: 36.831 - type: mrr_at_1 value: 31.073 - type: mrr_at_10 value: 39.985 - type: mrr_at_100 value: 40.802 - type: mrr_at_1000 value: 40.861999999999995 - type: mrr_at_3 value: 37.419999999999995 - type: mrr_at_5 value: 39.104 - type: ndcg_at_1 value: 31.073 - type: ndcg_at_10 value: 42.958 - type: ndcg_at_100 value: 47.671 - type: ndcg_at_1000 value: 49.633 - type: ndcg_at_3 value: 37.602000000000004 - type: ndcg_at_5 value: 40.688 - type: precision_at_1 value: 31.073 - type: precision_at_10 value: 6.531000000000001 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 15.857 - type: precision_at_5 value: 11.209 - type: recall_at_1 value: 28.816999999999997 - type: recall_at_10 value: 56.538999999999994 - type: recall_at_100 value: 78.17699999999999 - type: recall_at_1000 value: 92.92200000000001 - type: recall_at_3 value: 42.294 - type: recall_at_5 value: 49.842999999999996 - type: map_at_1 value: 18.397 - type: map_at_10 value: 27.256999999999998 - type: map_at_100 value: 28.541 - type: map_at_1000 value: 28.658 - type: map_at_3 value: 24.565 - type: map_at_5 value: 26.211000000000002 - type: mrr_at_1 value: 22.761 - type: mrr_at_10 value: 32.248 - type: mrr_at_100 value: 33.171 - type: mrr_at_1000 value: 33.227000000000004 - type: mrr_at_3 value: 29.498 - type: mrr_at_5 value: 31.246000000000002 - type: ndcg_at_1 value: 22.761 - type: ndcg_at_10 value: 32.879999999999995 - type: ndcg_at_100 value: 38.913 - type: ndcg_at_1000 value: 41.504999999999995 - type: ndcg_at_3 value: 27.988000000000003 - type: ndcg_at_5 value: 30.548 - type: precision_at_1 value: 22.761 - type: precision_at_10 value: 6.045 - type: precision_at_100 value: 1.044 - type: precision_at_1000 value: 0.13999999999999999 - type: precision_at_3 value: 13.433 - type: precision_at_5 value: 9.925 - type: recall_at_1 value: 18.397 - type: recall_at_10 value: 45.14 - type: recall_at_100 value: 71.758 - type: recall_at_1000 value: 89.854 - type: recall_at_3 value: 31.942999999999998 - type: recall_at_5 value: 38.249 - type: map_at_1 value: 30.604 - type: map_at_10 value: 42.132 - type: map_at_100 value: 43.419000000000004 - type: map_at_1000 value: 43.527 - type: map_at_3 value: 38.614 - type: map_at_5 value: 40.705000000000005 - type: mrr_at_1 value: 37.824999999999996 - type: mrr_at_10 value: 47.696 - type: mrr_at_100 value: 48.483 - type: mrr_at_1000 value: 48.53 - type: mrr_at_3 value: 45.123999999999995 - type: mrr_at_5 value: 46.635 - type: ndcg_at_1 value: 37.824999999999996 - type: ndcg_at_10 value: 48.421 - type: ndcg_at_100 value: 53.568000000000005 - type: ndcg_at_1000 value: 55.574999999999996 - type: ndcg_at_3 value: 42.89 - type: ndcg_at_5 value: 45.683 - type: precision_at_1 value: 37.824999999999996 - type: precision_at_10 value: 8.758000000000001 - type: precision_at_100 value: 1.319 - type: precision_at_1000 value: 0.168 - type: precision_at_3 value: 20.244 - type: precision_at_5 value: 14.533 - type: recall_at_1 value: 30.604 - type: recall_at_10 value: 61.605 - type: recall_at_100 value: 82.787 - type: recall_at_1000 value: 95.78 - type: recall_at_3 value: 46.303 - type: recall_at_5 value: 53.351000000000006 - type: map_at_1 value: 26.262999999999998 - type: map_at_10 value: 36.858999999999995 - type: map_at_100 value: 38.241 - type: map_at_1000 value: 38.346999999999994 - type: map_at_3 value: 33.171 - type: map_at_5 value: 35.371 - type: mrr_at_1 value: 32.42 - type: mrr_at_10 value: 42.361 - type: mrr_at_100 value: 43.219 - type: mrr_at_1000 value: 43.271 - type: mrr_at_3 value: 39.593 - type: mrr_at_5 value: 41.248000000000005 - type: ndcg_at_1 value: 32.42 - type: ndcg_at_10 value: 43.081 - type: ndcg_at_100 value: 48.837 - type: ndcg_at_1000 value: 50.954 - type: ndcg_at_3 value: 37.413000000000004 - type: ndcg_at_5 value: 40.239000000000004 - type: precision_at_1 value: 32.42 - type: precision_at_10 value: 8.071 - type: precision_at_100 value: 1.272 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 17.922 - type: precision_at_5 value: 13.311 - type: recall_at_1 value: 26.262999999999998 - type: recall_at_10 value: 56.062999999999995 - type: recall_at_100 value: 80.636 - type: recall_at_1000 value: 94.707 - type: recall_at_3 value: 40.425 - type: recall_at_5 value: 47.663 - type: map_at_1 value: 27.86616666666667 - type: map_at_10 value: 37.584999999999994 - type: map_at_100 value: 38.80291666666667 - type: map_at_1000 value: 38.91358333333333 - type: map_at_3 value: 34.498 - type: map_at_5 value: 36.269999999999996 - type: mrr_at_1 value: 33.07566666666667 - type: mrr_at_10 value: 41.92366666666666 - type: mrr_at_100 value: 42.73516666666667 - type: mrr_at_1000 value: 42.785666666666664 - type: mrr_at_3 value: 39.39075 - type: mrr_at_5 value: 40.89133333333334 - type: ndcg_at_1 value: 33.07566666666667 - type: ndcg_at_10 value: 43.19875 - type: ndcg_at_100 value: 48.32083333333334 - type: ndcg_at_1000 value: 50.418000000000006 - type: ndcg_at_3 value: 38.10308333333333 - type: ndcg_at_5 value: 40.5985 - type: precision_at_1 value: 33.07566666666667 - type: precision_at_10 value: 7.581916666666666 - type: precision_at_100 value: 1.1975 - type: precision_at_1000 value: 0.15699999999999997 - type: precision_at_3 value: 17.49075 - type: precision_at_5 value: 12.5135 - type: recall_at_1 value: 27.86616666666667 - type: recall_at_10 value: 55.449749999999995 - type: recall_at_100 value: 77.92516666666666 - type: recall_at_1000 value: 92.31358333333333 - type: recall_at_3 value: 41.324416666666664 - type: recall_at_5 value: 47.72533333333333 - type: map_at_1 value: 26.648 - type: map_at_10 value: 33.155 - type: map_at_100 value: 34.149 - type: map_at_1000 value: 34.239000000000004 - type: map_at_3 value: 30.959999999999997 - type: map_at_5 value: 32.172 - type: mrr_at_1 value: 30.061 - type: mrr_at_10 value: 36.229 - type: mrr_at_100 value: 37.088 - type: mrr_at_1000 value: 37.15 - type: mrr_at_3 value: 34.254 - type: mrr_at_5 value: 35.297 - type: ndcg_at_1 value: 30.061 - type: ndcg_at_10 value: 37.247 - type: ndcg_at_100 value: 42.093 - type: ndcg_at_1000 value: 44.45 - type: ndcg_at_3 value: 33.211 - type: ndcg_at_5 value: 35.083999999999996 - type: precision_at_1 value: 30.061 - type: precision_at_10 value: 5.7059999999999995 - type: precision_at_100 value: 0.8880000000000001 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 13.957 - type: precision_at_5 value: 9.663 - type: recall_at_1 value: 26.648 - type: recall_at_10 value: 46.85 - type: recall_at_100 value: 68.87 - type: recall_at_1000 value: 86.508 - type: recall_at_3 value: 35.756 - type: recall_at_5 value: 40.376 - type: map_at_1 value: 19.058 - type: map_at_10 value: 26.722 - type: map_at_100 value: 27.863 - type: map_at_1000 value: 27.988000000000003 - type: map_at_3 value: 24.258 - type: map_at_5 value: 25.531 - type: mrr_at_1 value: 23.09 - type: mrr_at_10 value: 30.711 - type: mrr_at_100 value: 31.628 - type: mrr_at_1000 value: 31.702 - type: mrr_at_3 value: 28.418 - type: mrr_at_5 value: 29.685 - type: ndcg_at_1 value: 23.09 - type: ndcg_at_10 value: 31.643 - type: ndcg_at_100 value: 37.047999999999995 - type: ndcg_at_1000 value: 39.896 - type: ndcg_at_3 value: 27.189999999999998 - type: ndcg_at_5 value: 29.112 - type: precision_at_1 value: 23.09 - type: precision_at_10 value: 5.743 - type: precision_at_100 value: 1 - type: precision_at_1000 value: 0.14300000000000002 - type: precision_at_3 value: 12.790000000000001 - type: precision_at_5 value: 9.195 - type: recall_at_1 value: 19.058 - type: recall_at_10 value: 42.527 - type: recall_at_100 value: 66.833 - type: recall_at_1000 value: 87.008 - type: recall_at_3 value: 29.876 - type: recall_at_5 value: 34.922 - type: map_at_1 value: 28.066999999999997 - type: map_at_10 value: 37.543 - type: map_at_100 value: 38.725 - type: map_at_1000 value: 38.815 - type: map_at_3 value: 34.488 - type: map_at_5 value: 36.222 - type: mrr_at_1 value: 33.116 - type: mrr_at_10 value: 41.743 - type: mrr_at_100 value: 42.628 - type: mrr_at_1000 value: 42.675999999999995 - type: mrr_at_3 value: 39.241 - type: mrr_at_5 value: 40.622 - type: ndcg_at_1 value: 33.116 - type: ndcg_at_10 value: 43.089 - type: ndcg_at_100 value: 48.61 - type: ndcg_at_1000 value: 50.585 - type: ndcg_at_3 value: 37.816 - type: ndcg_at_5 value: 40.256 - type: precision_at_1 value: 33.116 - type: precision_at_10 value: 7.313 - type: precision_at_100 value: 1.1320000000000001 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_3 value: 17.102 - type: precision_at_5 value: 12.09 - type: recall_at_1 value: 28.066999999999997 - type: recall_at_10 value: 55.684 - type: recall_at_100 value: 80.092 - type: recall_at_1000 value: 93.605 - type: recall_at_3 value: 41.277 - type: recall_at_5 value: 47.46 - type: map_at_1 value: 27.094 - type: map_at_10 value: 35.939 - type: map_at_100 value: 37.552 - type: map_at_1000 value: 37.771 - type: map_at_3 value: 32.414 - type: map_at_5 value: 34.505 - type: mrr_at_1 value: 32.609 - type: mrr_at_10 value: 40.521 - type: mrr_at_100 value: 41.479 - type: mrr_at_1000 value: 41.524 - type: mrr_at_3 value: 37.451 - type: mrr_at_5 value: 39.387 - type: ndcg_at_1 value: 32.609 - type: ndcg_at_10 value: 41.83 - type: ndcg_at_100 value: 47.763 - type: ndcg_at_1000 value: 50.102999999999994 - type: ndcg_at_3 value: 36.14 - type: ndcg_at_5 value: 39.153999999999996 - type: precision_at_1 value: 32.609 - type: precision_at_10 value: 7.925 - type: precision_at_100 value: 1.591 - type: precision_at_1000 value: 0.246 - type: precision_at_3 value: 16.337 - type: precision_at_5 value: 12.411 - type: recall_at_1 value: 27.094 - type: recall_at_10 value: 53.32900000000001 - type: recall_at_100 value: 79.52 - type: recall_at_1000 value: 93.958 - type: recall_at_3 value: 37.773 - type: recall_at_5 value: 45.321 - type: map_at_1 value: 22.649 - type: map_at_10 value: 30.569000000000003 - type: map_at_100 value: 31.444 - type: map_at_1000 value: 31.538 - type: map_at_3 value: 27.638 - type: map_at_5 value: 29.171000000000003 - type: mrr_at_1 value: 24.399 - type: mrr_at_10 value: 32.555 - type: mrr_at_100 value: 33.312000000000005 - type: mrr_at_1000 value: 33.376 - type: mrr_at_3 value: 29.820999999999998 - type: mrr_at_5 value: 31.402 - type: ndcg_at_1 value: 24.399 - type: ndcg_at_10 value: 35.741 - type: ndcg_at_100 value: 40.439 - type: ndcg_at_1000 value: 42.809000000000005 - type: ndcg_at_3 value: 30.020999999999997 - type: ndcg_at_5 value: 32.68 - type: precision_at_1 value: 24.399 - type: precision_at_10 value: 5.749 - type: precision_at_100 value: 0.878 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 12.815999999999999 - type: precision_at_5 value: 9.242 - type: recall_at_1 value: 22.649 - type: recall_at_10 value: 49.818 - type: recall_at_100 value: 72.155 - type: recall_at_1000 value: 89.654 - type: recall_at_3 value: 34.528999999999996 - type: recall_at_5 value: 40.849999999999994 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 13.587 - type: map_at_10 value: 23.021 - type: map_at_100 value: 25.095 - type: map_at_1000 value: 25.295 - type: map_at_3 value: 19.463 - type: map_at_5 value: 21.389 - type: mrr_at_1 value: 29.576999999999998 - type: mrr_at_10 value: 41.44 - type: mrr_at_100 value: 42.497 - type: mrr_at_1000 value: 42.529 - type: mrr_at_3 value: 38.284 - type: mrr_at_5 value: 40.249 - type: ndcg_at_1 value: 29.576999999999998 - type: ndcg_at_10 value: 31.491000000000003 - type: ndcg_at_100 value: 39.352 - type: ndcg_at_1000 value: 42.703 - type: ndcg_at_3 value: 26.284999999999997 - type: ndcg_at_5 value: 28.218 - type: precision_at_1 value: 29.576999999999998 - type: precision_at_10 value: 9.713 - type: precision_at_100 value: 1.8079999999999998 - type: precision_at_1000 value: 0.243 - type: precision_at_3 value: 19.608999999999998 - type: precision_at_5 value: 14.957999999999998 - type: recall_at_1 value: 13.587 - type: recall_at_10 value: 37.001 - type: recall_at_100 value: 63.617999999999995 - type: recall_at_1000 value: 82.207 - type: recall_at_3 value: 24.273 - type: recall_at_5 value: 29.813000000000002 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.98 - type: map_at_10 value: 20.447000000000003 - type: map_at_100 value: 29.032999999999998 - type: map_at_1000 value: 30.8 - type: map_at_3 value: 15.126999999999999 - type: map_at_5 value: 17.327 - type: mrr_at_1 value: 71.25 - type: mrr_at_10 value: 78.014 - type: mrr_at_100 value: 78.303 - type: mrr_at_1000 value: 78.309 - type: mrr_at_3 value: 76.375 - type: mrr_at_5 value: 77.58699999999999 - type: ndcg_at_1 value: 57.99999999999999 - type: ndcg_at_10 value: 41.705 - type: ndcg_at_100 value: 47.466 - type: ndcg_at_1000 value: 55.186 - type: ndcg_at_3 value: 47.089999999999996 - type: ndcg_at_5 value: 43.974000000000004 - type: precision_at_1 value: 71.25 - type: precision_at_10 value: 32.65 - type: precision_at_100 value: 10.89 - type: precision_at_1000 value: 2.197 - type: precision_at_3 value: 50.5 - type: precision_at_5 value: 42.199999999999996 - type: recall_at_1 value: 9.98 - type: recall_at_10 value: 25.144 - type: recall_at_100 value: 53.754999999999995 - type: recall_at_1000 value: 78.56400000000001 - type: recall_at_3 value: 15.964 - type: recall_at_5 value: 19.186 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 54.67999999999999 - type: f1 value: 49.48247525503583 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 74.798 - type: map_at_10 value: 82.933 - type: map_at_100 value: 83.157 - type: map_at_1000 value: 83.173 - type: map_at_3 value: 81.80199999999999 - type: map_at_5 value: 82.55 - type: mrr_at_1 value: 80.573 - type: mrr_at_10 value: 87.615 - type: mrr_at_100 value: 87.69 - type: mrr_at_1000 value: 87.69200000000001 - type: mrr_at_3 value: 86.86399999999999 - type: mrr_at_5 value: 87.386 - type: ndcg_at_1 value: 80.573 - type: ndcg_at_10 value: 86.64500000000001 - type: ndcg_at_100 value: 87.407 - type: ndcg_at_1000 value: 87.68299999999999 - type: ndcg_at_3 value: 84.879 - type: ndcg_at_5 value: 85.921 - type: precision_at_1 value: 80.573 - type: precision_at_10 value: 10.348 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 32.268 - type: precision_at_5 value: 20.084 - type: recall_at_1 value: 74.798 - type: recall_at_10 value: 93.45400000000001 - type: recall_at_100 value: 96.42500000000001 - type: recall_at_1000 value: 98.158 - type: recall_at_3 value: 88.634 - type: recall_at_5 value: 91.295 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 20.567 - type: map_at_10 value: 32.967999999999996 - type: map_at_100 value: 35.108 - type: map_at_1000 value: 35.272999999999996 - type: map_at_3 value: 28.701999999999998 - type: map_at_5 value: 31.114000000000004 - type: mrr_at_1 value: 40.432 - type: mrr_at_10 value: 48.956 - type: mrr_at_100 value: 49.832 - type: mrr_at_1000 value: 49.87 - type: mrr_at_3 value: 46.759 - type: mrr_at_5 value: 47.886 - type: ndcg_at_1 value: 40.432 - type: ndcg_at_10 value: 40.644000000000005 - type: ndcg_at_100 value: 48.252 - type: ndcg_at_1000 value: 51.099000000000004 - type: ndcg_at_3 value: 36.992000000000004 - type: ndcg_at_5 value: 38.077 - type: precision_at_1 value: 40.432 - type: precision_at_10 value: 11.296000000000001 - type: precision_at_100 value: 1.9009999999999998 - type: precision_at_1000 value: 0.241 - type: precision_at_3 value: 24.537 - type: precision_at_5 value: 17.963 - type: recall_at_1 value: 20.567 - type: recall_at_10 value: 47.052 - type: recall_at_100 value: 75.21600000000001 - type: recall_at_1000 value: 92.285 - type: recall_at_3 value: 33.488 - type: recall_at_5 value: 39.334 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 38.196999999999996 - type: map_at_10 value: 60.697 - type: map_at_100 value: 61.624 - type: map_at_1000 value: 61.692 - type: map_at_3 value: 57.421 - type: map_at_5 value: 59.455000000000005 - type: mrr_at_1 value: 76.39399999999999 - type: mrr_at_10 value: 82.504 - type: mrr_at_100 value: 82.71300000000001 - type: mrr_at_1000 value: 82.721 - type: mrr_at_3 value: 81.494 - type: mrr_at_5 value: 82.137 - type: ndcg_at_1 value: 76.39399999999999 - type: ndcg_at_10 value: 68.92200000000001 - type: ndcg_at_100 value: 72.13199999999999 - type: ndcg_at_1000 value: 73.392 - type: ndcg_at_3 value: 64.226 - type: ndcg_at_5 value: 66.815 - type: precision_at_1 value: 76.39399999999999 - type: precision_at_10 value: 14.442 - type: precision_at_100 value: 1.694 - type: precision_at_1000 value: 0.186 - type: precision_at_3 value: 41.211 - type: precision_at_5 value: 26.766000000000002 - type: recall_at_1 value: 38.196999999999996 - type: recall_at_10 value: 72.208 - type: recall_at_100 value: 84.71300000000001 - type: recall_at_1000 value: 92.971 - type: recall_at_3 value: 61.816 - type: recall_at_5 value: 66.914 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 89.6556 - type: ap value: 85.27600392682054 - type: f1 value: 89.63353655386406 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.482 - type: map_at_10 value: 33.701 - type: map_at_100 value: 34.861 - type: map_at_1000 value: 34.914 - type: map_at_3 value: 29.793999999999997 - type: map_at_5 value: 32.072 - type: mrr_at_1 value: 22.163 - type: mrr_at_10 value: 34.371 - type: mrr_at_100 value: 35.471000000000004 - type: mrr_at_1000 value: 35.518 - type: mrr_at_3 value: 30.554 - type: mrr_at_5 value: 32.799 - type: ndcg_at_1 value: 22.163 - type: ndcg_at_10 value: 40.643 - type: ndcg_at_100 value: 46.239999999999995 - type: ndcg_at_1000 value: 47.526 - type: ndcg_at_3 value: 32.714999999999996 - type: ndcg_at_5 value: 36.791000000000004 - type: precision_at_1 value: 22.163 - type: precision_at_10 value: 6.4799999999999995 - type: precision_at_100 value: 0.928 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.002 - type: precision_at_5 value: 10.453 - type: recall_at_1 value: 21.482 - type: recall_at_10 value: 61.953 - type: recall_at_100 value: 87.86500000000001 - type: recall_at_1000 value: 97.636 - type: recall_at_3 value: 40.441 - type: recall_at_5 value: 50.27 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.3032375740994 - type: f1 value: 95.01515022686607 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 78.10077519379846 - type: f1 value: 58.240739725625644 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.0053799596503 - type: f1 value: 74.11733965804146 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.64021519838602 - type: f1 value: 79.8513960091438 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.92425767945184 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.249612382060754 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.35584955492918 - type: mrr value: 33.545865224584674 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.978 - type: map_at_10 value: 14.749 - type: map_at_100 value: 19.192 - type: map_at_1000 value: 20.815 - type: map_at_3 value: 10.927000000000001 - type: map_at_5 value: 12.726 - type: mrr_at_1 value: 49.536 - type: mrr_at_10 value: 57.806999999999995 - type: mrr_at_100 value: 58.373 - type: mrr_at_1000 value: 58.407 - type: mrr_at_3 value: 55.779 - type: mrr_at_5 value: 57.095 - type: ndcg_at_1 value: 46.749 - type: ndcg_at_10 value: 37.644 - type: ndcg_at_100 value: 35.559000000000005 - type: ndcg_at_1000 value: 44.375 - type: ndcg_at_3 value: 43.354 - type: ndcg_at_5 value: 41.022999999999996 - type: precision_at_1 value: 48.607 - type: precision_at_10 value: 28.08 - type: precision_at_100 value: 9.155000000000001 - type: precision_at_1000 value: 2.2270000000000003 - type: precision_at_3 value: 40.764 - type: precision_at_5 value: 35.728 - type: recall_at_1 value: 6.978 - type: recall_at_10 value: 17.828 - type: recall_at_100 value: 36.010999999999996 - type: recall_at_1000 value: 68.34700000000001 - type: recall_at_3 value: 11.645999999999999 - type: recall_at_5 value: 14.427000000000001 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 30.219 - type: map_at_10 value: 45.633 - type: map_at_100 value: 46.752 - type: map_at_1000 value: 46.778999999999996 - type: map_at_3 value: 41.392 - type: map_at_5 value: 43.778 - type: mrr_at_1 value: 34.327999999999996 - type: mrr_at_10 value: 48.256 - type: mrr_at_100 value: 49.076 - type: mrr_at_1000 value: 49.092999999999996 - type: mrr_at_3 value: 44.786 - type: mrr_at_5 value: 46.766000000000005 - type: ndcg_at_1 value: 34.299 - type: ndcg_at_10 value: 53.434000000000005 - type: ndcg_at_100 value: 58.03 - type: ndcg_at_1000 value: 58.633 - type: ndcg_at_3 value: 45.433 - type: ndcg_at_5 value: 49.379 - type: precision_at_1 value: 34.299 - type: precision_at_10 value: 8.911 - type: precision_at_100 value: 1.145 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 20.896 - type: precision_at_5 value: 14.832 - type: recall_at_1 value: 30.219 - type: recall_at_10 value: 74.59400000000001 - type: recall_at_100 value: 94.392 - type: recall_at_1000 value: 98.832 - type: recall_at_3 value: 53.754000000000005 - type: recall_at_5 value: 62.833000000000006 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.139 - type: map_at_10 value: 85.141 - type: map_at_100 value: 85.78099999999999 - type: map_at_1000 value: 85.795 - type: map_at_3 value: 82.139 - type: map_at_5 value: 84.075 - type: mrr_at_1 value: 81.98 - type: mrr_at_10 value: 88.056 - type: mrr_at_100 value: 88.152 - type: mrr_at_1000 value: 88.152 - type: mrr_at_3 value: 87.117 - type: mrr_at_5 value: 87.78099999999999 - type: ndcg_at_1 value: 82.02000000000001 - type: ndcg_at_10 value: 88.807 - type: ndcg_at_100 value: 89.99000000000001 - type: ndcg_at_1000 value: 90.068 - type: ndcg_at_3 value: 85.989 - type: ndcg_at_5 value: 87.627 - type: precision_at_1 value: 82.02000000000001 - type: precision_at_10 value: 13.472999999999999 - type: precision_at_100 value: 1.534 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.553 - type: precision_at_5 value: 24.788 - type: recall_at_1 value: 71.139 - type: recall_at_10 value: 95.707 - type: recall_at_100 value: 99.666 - type: recall_at_1000 value: 99.983 - type: recall_at_3 value: 87.64699999999999 - type: recall_at_5 value: 92.221 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 59.11035509193503 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.44241881422526 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.122999999999999 - type: map_at_10 value: 14.45 - type: map_at_100 value: 17.108999999999998 - type: map_at_1000 value: 17.517 - type: map_at_3 value: 10.213999999999999 - type: map_at_5 value: 12.278 - type: mrr_at_1 value: 25.3 - type: mrr_at_10 value: 37.791999999999994 - type: mrr_at_100 value: 39.086 - type: mrr_at_1000 value: 39.121 - type: mrr_at_3 value: 34.666999999999994 - type: mrr_at_5 value: 36.472 - type: ndcg_at_1 value: 25.3 - type: ndcg_at_10 value: 23.469 - type: ndcg_at_100 value: 33.324 - type: ndcg_at_1000 value: 39.357 - type: ndcg_at_3 value: 22.478 - type: ndcg_at_5 value: 19.539 - type: precision_at_1 value: 25.3 - type: precision_at_10 value: 12.3 - type: precision_at_100 value: 2.654 - type: precision_at_1000 value: 0.40800000000000003 - type: precision_at_3 value: 21.667 - type: precision_at_5 value: 17.5 - type: recall_at_1 value: 5.122999999999999 - type: recall_at_10 value: 24.937 - type: recall_at_100 value: 53.833 - type: recall_at_1000 value: 82.85 - type: recall_at_3 value: 13.178 - type: recall_at_5 value: 17.747 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 86.76549431206278 - type: cos_sim_spearman value: 81.28563534883214 - type: euclidean_pearson value: 84.17180713818567 - type: euclidean_spearman value: 81.1684082302606 - type: manhattan_pearson value: 84.12189753972959 - type: manhattan_spearman value: 81.1134998997958 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.75137587182017 - type: cos_sim_spearman value: 76.155337187325 - type: euclidean_pearson value: 83.54551546726665 - type: euclidean_spearman value: 76.30324990565346 - type: manhattan_pearson value: 83.52192617483797 - type: manhattan_spearman value: 76.30017227216015 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 87.13890050398628 - type: cos_sim_spearman value: 87.84898360302155 - type: euclidean_pearson value: 86.89491809082031 - type: euclidean_spearman value: 87.99935689905651 - type: manhattan_pearson value: 86.86526424376366 - type: manhattan_spearman value: 87.96850732980495 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 86.01978753231558 - type: cos_sim_spearman value: 83.38989083933329 - type: euclidean_pearson value: 85.28405032045376 - type: euclidean_spearman value: 83.51703914276501 - type: manhattan_pearson value: 85.25775133078966 - type: manhattan_spearman value: 83.52815667821727 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.28482294437876 - type: cos_sim_spearman value: 89.42976214499576 - type: euclidean_pearson value: 88.72677957272468 - type: euclidean_spearman value: 89.30001736116229 - type: manhattan_pearson value: 88.64119331622562 - type: manhattan_spearman value: 89.21771022634893 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.79810159351987 - type: cos_sim_spearman value: 85.34918402034273 - type: euclidean_pearson value: 84.76058606229002 - type: euclidean_spearman value: 85.45159829941214 - type: manhattan_pearson value: 84.73926491888156 - type: manhattan_spearman value: 85.42568221985898 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.92796712570272 - type: cos_sim_spearman value: 88.58925922945812 - type: euclidean_pearson value: 88.97231215531797 - type: euclidean_spearman value: 88.27036385068719 - type: manhattan_pearson value: 88.95761469412228 - type: manhattan_spearman value: 88.23980432487681 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 66.85679810182282 - type: cos_sim_spearman value: 67.80696709003128 - type: euclidean_pearson value: 68.77524185947989 - type: euclidean_spearman value: 68.032438075422 - type: manhattan_pearson value: 68.60489100404182 - type: manhattan_spearman value: 67.75418889226138 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.33287880999367 - type: cos_sim_spearman value: 87.32401087204754 - type: euclidean_pearson value: 87.27961069148029 - type: euclidean_spearman value: 87.3547683085868 - type: manhattan_pearson value: 87.24405442789622 - type: manhattan_spearman value: 87.32896271166672 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.71553665286558 - type: mrr value: 96.42436176749902 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 61.094 - type: map_at_10 value: 71.066 - type: map_at_100 value: 71.608 - type: map_at_1000 value: 71.629 - type: map_at_3 value: 68.356 - type: map_at_5 value: 70.15 - type: mrr_at_1 value: 64 - type: mrr_at_10 value: 71.82300000000001 - type: mrr_at_100 value: 72.251 - type: mrr_at_1000 value: 72.269 - type: mrr_at_3 value: 69.833 - type: mrr_at_5 value: 71.11699999999999 - type: ndcg_at_1 value: 64 - type: ndcg_at_10 value: 75.286 - type: ndcg_at_100 value: 77.40700000000001 - type: ndcg_at_1000 value: 77.806 - type: ndcg_at_3 value: 70.903 - type: ndcg_at_5 value: 73.36399999999999 - type: precision_at_1 value: 64 - type: precision_at_10 value: 9.9 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 27.667 - type: precision_at_5 value: 18.333 - type: recall_at_1 value: 61.094 - type: recall_at_10 value: 87.256 - type: recall_at_100 value: 96.5 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 75.6 - type: recall_at_5 value: 81.789 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.82871287128712 - type: cos_sim_ap value: 95.9325677692287 - type: cos_sim_f1 value: 91.13924050632912 - type: cos_sim_precision value: 92.3076923076923 - type: cos_sim_recall value: 90 - type: dot_accuracy value: 99.7980198019802 - type: dot_ap value: 94.56107207796 - type: dot_f1 value: 89.41908713692946 - type: dot_precision value: 92.88793103448276 - type: dot_recall value: 86.2 - type: euclidean_accuracy value: 99.82871287128712 - type: euclidean_ap value: 95.94390332507025 - type: euclidean_f1 value: 91.17797042325346 - type: euclidean_precision value: 93.02809573361083 - type: euclidean_recall value: 89.4 - type: manhattan_accuracy value: 99.82871287128712 - type: manhattan_ap value: 95.97587114452257 - type: manhattan_f1 value: 91.25821121778675 - type: manhattan_precision value: 92.23697650663942 - type: manhattan_recall value: 90.3 - type: max_accuracy value: 99.82871287128712 - type: max_ap value: 95.97587114452257 - type: max_f1 value: 91.25821121778675 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.13974351708839 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.594544722932234 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.718738983377726 - type: mrr value: 55.61655154486037 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.37028359646597 - type: cos_sim_spearman value: 30.866534307244443 - type: dot_pearson value: 29.89037691541816 - type: dot_spearman value: 29.941267567971718 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.20400000000000001 - type: map_at_10 value: 1.7340000000000002 - type: map_at_100 value: 9.966 - type: map_at_1000 value: 25.119000000000003 - type: map_at_3 value: 0.596 - type: map_at_5 value: 0.941 - type: mrr_at_1 value: 76 - type: mrr_at_10 value: 85.85199999999999 - type: mrr_at_100 value: 85.85199999999999 - type: mrr_at_1000 value: 85.85199999999999 - type: mrr_at_3 value: 84.667 - type: mrr_at_5 value: 85.56700000000001 - type: ndcg_at_1 value: 71 - type: ndcg_at_10 value: 69.60300000000001 - type: ndcg_at_100 value: 54.166000000000004 - type: ndcg_at_1000 value: 51.085 - type: ndcg_at_3 value: 71.95 - type: ndcg_at_5 value: 71.17599999999999 - type: precision_at_1 value: 76 - type: precision_at_10 value: 74.2 - type: precision_at_100 value: 55.96 - type: precision_at_1000 value: 22.584 - type: precision_at_3 value: 77.333 - type: precision_at_5 value: 75.6 - type: recall_at_1 value: 0.20400000000000001 - type: recall_at_10 value: 1.992 - type: recall_at_100 value: 13.706999999999999 - type: recall_at_1000 value: 48.732 - type: recall_at_3 value: 0.635 - type: recall_at_5 value: 1.034 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8 - type: f1 value: 6.298401229470593 - type: precision value: 5.916991709050532 - type: recall value: 8 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.341040462427745 - type: f1 value: 14.621650026274303 - type: precision value: 13.9250609139035 - type: recall value: 17.341040462427745 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.536585365853659 - type: f1 value: 6.30972482801751 - type: precision value: 5.796517326875398 - type: recall value: 8.536585365853659 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.4 - type: f1 value: 4.221126743626743 - type: precision value: 3.822815143403898 - type: recall value: 6.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 19.8 - type: f1 value: 18.13768093781855 - type: precision value: 17.54646004378763 - type: recall value: 19.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.700000000000001 - type: f1 value: 12.367662337662336 - type: precision value: 11.934237966189185 - type: recall value: 13.700000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.299999999999999 - type: f1 value: 10.942180289268338 - type: precision value: 10.153968847262192 - type: recall value: 14.299999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 22.388059701492537 - type: f1 value: 17.00157733660433 - type: precision value: 15.650551589876702 - type: recall value: 22.388059701492537 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 22 - type: f1 value: 17.4576947358322 - type: precision value: 16.261363669827777 - type: recall value: 22 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.292682926829269 - type: f1 value: 5.544048456005624 - type: precision value: 5.009506603002538 - type: recall value: 8.292682926829269 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.4 - type: f1 value: 4.148897174789229 - type: precision value: 3.862217259449564 - type: recall value: 5.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.5893074119076545 - type: f1 value: 4.375041810373159 - type: precision value: 4.181207113088141 - type: recall value: 5.5893074119076545 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.17391304347826 - type: f1 value: 6.448011891490153 - type: precision value: 5.9719116632160105 - type: recall value: 8.17391304347826 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.8695652173913043 - type: f1 value: 0.582815734989648 - type: precision value: 0.5580885233059146 - type: recall value: 0.8695652173913043 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.1 - type: f1 value: 3.5000615825615826 - type: precision value: 3.2073523577994707 - type: recall value: 5.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.3 - type: f1 value: 0.10109884927372195 - type: precision value: 0.10055127118392897 - type: recall value: 0.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.8600723763570564 - type: f1 value: 2.8177402725050493 - type: precision value: 2.5662687819699213 - type: recall value: 3.8600723763570564 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0 - type: f1 value: 0 - type: precision value: 0 - type: recall value: 0 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 15.299999999999999 - type: f1 value: 11.377964359824292 - type: precision value: 10.361140908892764 - type: recall value: 15.299999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.3 - type: f1 value: 0.9600820232399179 - type: precision value: 0.9151648856810397 - type: recall value: 1.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.095238095238095 - type: f1 value: 11.40081541819044 - type: precision value: 10.645867976820359 - type: recall value: 14.095238095238095 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4 - type: f1 value: 2.3800704501963432 - type: precision value: 2.0919368034607455 - type: recall value: 4 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.3 - type: f1 value: 0.2002053388090349 - type: precision value: 0.2001027749229188 - type: recall value: 0.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.700000000000001 - type: f1 value: 10.29755634495992 - type: precision value: 9.876637220292393 - type: recall value: 11.700000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.7000000000000002 - type: f1 value: 0.985815849620051 - type: precision value: 0.8884689922480621 - type: recall value: 1.7000000000000002 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.599999999999998 - type: f1 value: 14.086312656126182 - type: precision value: 13.192360560816125 - type: recall value: 17.599999999999998 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.1 - type: f1 value: 4.683795729173087 - type: precision value: 4.31687579027912 - type: recall value: 6.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.4 - type: f1 value: 0.20966666666666667 - type: precision value: 0.20500700280112047 - type: recall value: 0.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.6 - type: f1 value: 0.2454665118079752 - type: precision value: 0.2255125167991618 - type: recall value: 0.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21 - type: f1 value: 18.965901242066018 - type: precision value: 18.381437375171 - type: recall value: 21 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.5390835579514826 - type: f1 value: 0.4048898457205192 - type: precision value: 0.4046018763809678 - type: recall value: 0.5390835579514826 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.282051282051282 - type: f1 value: 0.5098554872310529 - type: precision value: 0.4715099715099715 - type: recall value: 1.282051282051282 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.7 - type: f1 value: 8.045120643200706 - type: precision value: 7.387598023074453 - type: recall value: 10.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 2.272727272727273 - type: f1 value: 1.44184724004356 - type: precision value: 1.4082306862044767 - type: recall value: 2.272727272727273 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.20964360587002098 - type: f1 value: 0.001335309591528796 - type: precision value: 0.0006697878781789807 - type: recall value: 0.20964360587002098 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.1 - type: f1 value: 5.522254020507502 - type: precision value: 5.081849426723903 - type: recall value: 7.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 36.57587548638132 - type: f1 value: 30.325515383881147 - type: precision value: 28.59255854392041 - type: recall value: 36.57587548638132 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 16.23931623931624 - type: f1 value: 13.548783761549718 - type: precision value: 13.0472896359184 - type: recall value: 16.23931623931624 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 16.3 - type: f1 value: 13.3418584934734 - type: precision value: 12.506853047473756 - type: recall value: 16.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1 - type: f1 value: 0.7764001197963462 - type: precision value: 0.7551049317943337 - type: recall value: 1 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.9719626168224296 - type: f1 value: 3.190729401654313 - type: precision value: 3.001159168296747 - type: recall value: 3.9719626168224296 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.4000000000000004 - type: f1 value: 2.4847456001574653 - type: precision value: 2.308739271803959 - type: recall value: 3.4000000000000004 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 36.9 - type: f1 value: 31.390407955063697 - type: precision value: 29.631294298308614 - type: recall value: 36.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.2 - type: f1 value: 12.551591810861895 - type: precision value: 12.100586917562724 - type: recall value: 14.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.2 - type: f1 value: 7.5561895648211435 - type: precision value: 7.177371101110253 - type: recall value: 9.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21.2 - type: f1 value: 18.498268429117875 - type: precision value: 17.693915156965357 - type: recall value: 21.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.2 - type: f1 value: 2.886572782530936 - type: precision value: 2.5806792595351915 - type: recall value: 4.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.800000000000001 - type: f1 value: 4.881091920308238 - type: precision value: 4.436731163345769 - type: recall value: 6.800000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 22.1 - type: f1 value: 18.493832677140738 - type: precision value: 17.52055858924503 - type: recall value: 22.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6 - type: f1 value: 4.58716840215435 - type: precision value: 4.303119297298687 - type: recall value: 6 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.5 - type: f1 value: 3.813678559437776 - type: precision value: 3.52375763382276 - type: recall value: 5.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.2 - type: f1 value: 0.06701509872241579 - type: precision value: 0.05017452006980803 - type: recall value: 0.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 12.5 - type: f1 value: 9.325396825396826 - type: precision value: 8.681972789115646 - type: recall value: 12.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.43907793633369924 - type: f1 value: 0.26369680618309754 - type: precision value: 0.24710650393580552 - type: recall value: 0.43907793633369924 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.7000000000000002 - type: f1 value: 1.0240727731562105 - type: precision value: 0.9379457073996874 - type: recall value: 1.7000000000000002 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 24.6 - type: f1 value: 21.527732683982684 - type: precision value: 20.460911398969852 - type: recall value: 24.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 23.400000000000002 - type: f1 value: 18.861948871033608 - type: precision value: 17.469730524988158 - type: recall value: 23.400000000000002 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.3 - type: f1 value: 0.8081609699284277 - type: precision value: 0.8041232161030668 - type: recall value: 1.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.399999999999999 - type: f1 value: 11.982642360594898 - type: precision value: 11.423911681034546 - type: recall value: 14.399999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.7 - type: f1 value: 6.565099922088448 - type: precision value: 6.009960806394631 - type: recall value: 8.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.1 - type: f1 value: 5.483244116053285 - type: precision value: 5.08036675810842 - type: recall value: 7.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.3999999999999995 - type: f1 value: 3.2643948695904146 - type: precision value: 3.031506651474311 - type: recall value: 4.3999999999999995 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.1 - type: f1 value: 5.2787766765398345 - type: precision value: 4.883891459552525 - type: recall value: 7.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.5 - type: f1 value: 7.022436974789914 - type: precision value: 6.517919923571304 - type: recall value: 8.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.51824817518248 - type: f1 value: 14.159211038143834 - type: precision value: 13.419131771033424 - type: recall value: 17.51824817518248 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.3 - type: f1 value: 0.1008802791411487 - type: precision value: 0.10044111373948113 - type: recall value: 0.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.3 - type: f1 value: 10.0642631078894 - type: precision value: 9.714481189937882 - type: recall value: 11.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.7000000000000001 - type: f1 value: 0.5023625310859353 - type: precision value: 0.5011883541295307 - type: recall value: 0.7000000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.7857142857142856 - type: f1 value: 0.6731500547238763 - type: precision value: 0.6364087301587301 - type: recall value: 1.7857142857142856 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.000000000000001 - type: f1 value: 4.850226809905071 - type: precision value: 4.3549672188068485 - type: recall value: 7.000000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.383022774327122 - type: f1 value: 4.080351427081423 - type: precision value: 3.7431771127423294 - type: recall value: 5.383022774327122 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.9 - type: f1 value: 2.975065835065835 - type: precision value: 2.7082951373488764 - type: recall value: 3.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 13.8 - type: f1 value: 10.976459812917616 - type: precision value: 10.214566903851944 - type: recall value: 13.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.9 - type: f1 value: 3.5998112099809334 - type: precision value: 3.391430386128988 - type: recall value: 4.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 2.1645021645021645 - type: f1 value: 0.28969205674033943 - type: precision value: 0.1648931376979724 - type: recall value: 2.1645021645021645 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.541984732824428 - type: f1 value: 8.129327179123026 - type: precision value: 7.860730567672363 - type: recall value: 9.541984732824428 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.5822416302765648 - type: f1 value: 0.3960292169899156 - type: precision value: 0.36794436357755134 - type: recall value: 0.5822416302765648 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 25.900000000000002 - type: f1 value: 20.98162273769728 - type: precision value: 19.591031936732236 - type: recall value: 25.900000000000002 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.322033898305085 - type: f1 value: 7.1764632211739166 - type: precision value: 6.547619047619047 - type: recall value: 9.322033898305085 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.3999999999999995 - type: f1 value: 3.0484795026022216 - type: precision value: 2.8132647991077686 - type: recall value: 4.3999999999999995 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 18.8 - type: f1 value: 15.52276497119774 - type: precision value: 14.63296284434154 - type: recall value: 18.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10 - type: f1 value: 7.351901305737391 - type: precision value: 6.759061952118555 - type: recall value: 10 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.1 - type: f1 value: 2.1527437641723353 - type: precision value: 2.0008336640383417 - type: recall value: 3.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.6 - type: f1 value: 8.471815215313617 - type: precision value: 7.942319409218233 - type: recall value: 10.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.3 - type: f1 value: 2.7338036427188244 - type: precision value: 2.5492261384839052 - type: recall value: 4.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.40214477211796246 - type: f1 value: 0.28150134048257375 - type: precision value: 0.2751516861859743 - type: recall value: 0.40214477211796246 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3 - type: f1 value: 1.5834901411814404 - type: precision value: 1.3894010894944848 - type: recall value: 3 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.905138339920949 - type: f1 value: 6.6397047981096735 - type: precision value: 6.32664437012263 - type: recall value: 7.905138339920949 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.5211267605633805 - type: f1 value: 2.173419196807775 - type: precision value: 2.14388897487489 - type: recall value: 3.5211267605633805 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.23952095808383234 - type: f1 value: 0.001262128032547595 - type: precision value: 0.0006327654461278806 - type: recall value: 0.23952095808383234 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.4 - type: f1 value: 8.370422351826372 - type: precision value: 7.943809523809523 - type: recall value: 10.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.41871921182266 - type: f1 value: 3.4763895108722696 - type: precision value: 3.1331846246882176 - type: recall value: 5.41871921182266 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.15492957746479 - type: f1 value: 7.267458920187794 - type: precision value: 6.893803787858966 - type: recall value: 9.15492957746479 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.487179487179487 - type: f1 value: 6.902767160316073 - type: precision value: 6.450346503818517 - type: recall value: 9.487179487179487 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.1 - type: f1 value: 0.0002042900919305414 - type: precision value: 0.00010224948875255625 - type: recall value: 0.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 5.010438413361169 - type: f1 value: 3.8116647214505277 - type: precision value: 3.5454644309619634 - type: recall value: 5.010438413361169 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.2 - type: f1 value: 5.213158915433869 - type: precision value: 5.080398110661268 - type: recall value: 6.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.9771986970684038 - type: f1 value: 0.5061388123277374 - type: precision value: 0.43431053203040165 - type: recall value: 0.9771986970684038 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 7.3 - type: f1 value: 5.6313180921027755 - type: precision value: 5.303887400540395 - type: recall value: 7.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.5999999999999996 - type: f1 value: 3.2180089485458607 - type: precision value: 3.1006756756756753 - type: recall value: 3.5999999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 22.04724409448819 - type: f1 value: 17.92525934258218 - type: precision value: 16.48251629836593 - type: recall value: 22.04724409448819 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.5 - type: f1 value: 0.1543743186232414 - type: precision value: 0.13554933572174951 - type: recall value: 0.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.8310249307479225 - type: f1 value: 0.5102255597841558 - type: precision value: 0.4859595744731704 - type: recall value: 0.8310249307479225 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 6.9 - type: f1 value: 4.7258390633390635 - type: precision value: 4.288366570275279 - type: recall value: 6.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.307692307692307 - type: f1 value: 14.763313609467454 - type: precision value: 14.129273504273504 - type: recall value: 17.307692307692307 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.3 - type: f1 value: 0.0022196828248667185 - type: precision value: 0.0011148527298850575 - type: recall value: 0.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.3 - type: f1 value: 0.3 - type: precision value: 0.3 - type: recall value: 0.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.6 - type: f1 value: 0.500206611570248 - type: precision value: 0.5001034126163392 - type: recall value: 0.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 0.4716981132075472 - type: f1 value: 0.2953377695417789 - type: precision value: 0.2754210459668228 - type: recall value: 0.4716981132075472 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.3999999999999995 - type: f1 value: 3.6228414442700156 - type: precision value: 3.4318238993710692 - type: recall value: 4.3999999999999995 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 1.2773722627737227 - type: f1 value: 1.0043318098096732 - type: precision value: 0.9735777358593729 - type: recall value: 1.2773722627737227 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 3.9 - type: f1 value: 2.6164533097276226 - type: precision value: 2.3558186153594085 - type: recall value: 3.9 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.5779999999999998 - type: map_at_10 value: 8.339 - type: map_at_100 value: 14.601 - type: map_at_1000 value: 16.104 - type: map_at_3 value: 4.06 - type: map_at_5 value: 6.049 - type: mrr_at_1 value: 18.367 - type: mrr_at_10 value: 35.178 - type: mrr_at_100 value: 36.464999999999996 - type: mrr_at_1000 value: 36.464999999999996 - type: mrr_at_3 value: 29.932 - type: mrr_at_5 value: 34.32 - type: ndcg_at_1 value: 16.326999999999998 - type: ndcg_at_10 value: 20.578 - type: ndcg_at_100 value: 34.285 - type: ndcg_at_1000 value: 45.853 - type: ndcg_at_3 value: 19.869999999999997 - type: ndcg_at_5 value: 22.081999999999997 - type: precision_at_1 value: 18.367 - type: precision_at_10 value: 19.796 - type: precision_at_100 value: 7.714 - type: precision_at_1000 value: 1.547 - type: precision_at_3 value: 23.128999999999998 - type: precision_at_5 value: 24.898 - type: recall_at_1 value: 1.5779999999999998 - type: recall_at_10 value: 14.801 - type: recall_at_100 value: 48.516999999999996 - type: recall_at_1000 value: 83.30300000000001 - type: recall_at_3 value: 5.267 - type: recall_at_5 value: 9.415999999999999 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 72.4186 - type: ap value: 14.536282543597242 - type: f1 value: 55.47661372005608 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.318053197509904 - type: f1 value: 59.68272481532353 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 52.155753554312 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.99409906419503 - type: cos_sim_ap value: 76.91824322304332 - type: cos_sim_f1 value: 70.97865694950546 - type: cos_sim_precision value: 70.03081664098613 - type: cos_sim_recall value: 71.95250659630607 - type: dot_accuracy value: 85.37879239434942 - type: dot_ap value: 71.86454698478344 - type: dot_f1 value: 66.48115355426259 - type: dot_precision value: 63.84839650145773 - type: dot_recall value: 69.34036939313984 - type: euclidean_accuracy value: 87.00005960541218 - type: euclidean_ap value: 76.9165913835565 - type: euclidean_f1 value: 71.23741557283039 - type: euclidean_precision value: 68.89327088982007 - type: euclidean_recall value: 73.7467018469657 - type: manhattan_accuracy value: 87.06562555880075 - type: manhattan_ap value: 76.85445703747546 - type: manhattan_f1 value: 70.95560571858539 - type: manhattan_precision value: 67.61472275334609 - type: manhattan_recall value: 74.64379947229551 - type: max_accuracy value: 87.06562555880075 - type: max_ap value: 76.91824322304332 - type: max_f1 value: 71.23741557283039 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.93934101758063 - type: cos_sim_ap value: 86.1071528049007 - type: cos_sim_f1 value: 78.21588263552714 - type: cos_sim_precision value: 75.20073900376609 - type: cos_sim_recall value: 81.48290729904527 - type: dot_accuracy value: 88.2504754142896 - type: dot_ap value: 84.19709379723844 - type: dot_f1 value: 76.92307692307693 - type: dot_precision value: 71.81969949916528 - type: dot_recall value: 82.80720665229443 - type: euclidean_accuracy value: 88.97232894787906 - type: euclidean_ap value: 86.02763993294909 - type: euclidean_f1 value: 78.18372741427383 - type: euclidean_precision value: 73.79861918107868 - type: euclidean_recall value: 83.12288266091777 - type: manhattan_accuracy value: 88.86948422400745 - type: manhattan_ap value: 86.0009157821563 - type: manhattan_f1 value: 78.10668017659404 - type: manhattan_precision value: 73.68564795848695 - type: manhattan_recall value: 83.09208500153989 - type: max_accuracy value: 88.97232894787906 - type: max_ap value: 86.1071528049007 - type: max_f1 value: 78.21588263552714 --- <h1 align="center">GIST Embedding v0</h1> *GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning* The model is fine-tuned on top of the [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) using the [MEDI dataset](https://github.com/xlang-ai/instructor-embedding.git) augmented with mined triplets from the [MTEB Classification](https://huggingface.co/mteb) training dataset (excluding data from the Amazon Polarity Classification task). The model does not require any instruction for generating embeddings. This means that queries for retrieval tasks can be directly encoded without crafting instructions. Technical paper: [GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning](https://arxiv.org/abs/2402.16829) # Data The dataset used is a compilation of the MEDI and MTEB Classification training datasets. Third-party datasets may be subject to additional terms and conditions under their associated licenses. A HuggingFace Dataset version of the compiled dataset, and the specific revision used to train the model, is available: - Dataset: [avsolatorio/medi-data-mteb_avs_triplets](https://huggingface.co/datasets/avsolatorio/medi-data-mteb_avs_triplets) - Revision: 238a0499b6e6b690cc64ea56fde8461daa8341bb The dataset contains a `task_type` key, which can be used to select only the mteb classification tasks (prefixed with `mteb_`). The **MEDI Dataset** is published in the following paper: [One Embedder, Any Task: Instruction-Finetuned Text Embeddings](https://arxiv.org/abs/2212.09741). The MTEB Benchmark results of the GIST embedding model, compared with the base model, suggest that the fine-tuning dataset has perturbed the model considerably, which resulted in significant improvements in certain tasks while adversely degrading performance in some. The retrieval performance for the TRECCOVID task is of note. The fine-tuning dataset does not contain significant knowledge about COVID-19, which could have caused the observed performance degradation. We found some evidence, detailed in the paper, that thematic coverage of the fine-tuning data can affect downstream performance. # Usage The model can be easily loaded using the Sentence Transformers library. ```Python import torch.nn.functional as F from sentence_transformers import SentenceTransformer revision = None # Replace with the specific revision to ensure reproducibility if the model is updated. model = SentenceTransformer("avsolatorio/GIST-Embedding-v0", revision=revision) texts = [ "Illustration of the REaLTabFormer model. The left block shows the non-relational tabular data model using GPT-2 with a causal LM head. In contrast, the right block shows how a relational dataset's child table is modeled using a sequence-to-sequence (Seq2Seq) model. The Seq2Seq model uses the observations in the parent table to condition the generation of the observations in the child table. The trained GPT-2 model on the parent table, with weights frozen, is also used as the encoder in the Seq2Seq model.", "Predicting human mobility holds significant practical value, with applications ranging from enhancing disaster risk planning to simulating epidemic spread. In this paper, we present the GeoFormer, a decoder-only transformer model adapted from the GPT architecture to forecast human mobility.", "As the economies of Southeast Asia continue adopting digital technologies, policy makers increasingly ask how to prepare the workforce for emerging labor demands. However, little is known about the skills that workers need to adapt to these changes" ] # Compute embeddings embeddings = model.encode(texts, convert_to_tensor=True) # Compute cosine-similarity for each pair of sentences scores = F.cosine_similarity(embeddings.unsqueeze(1), embeddings.unsqueeze(0), dim=-1) print(scores.cpu().numpy()) ``` # Training Parameters Below are the training parameters used to fine-tune the model: ``` Epochs = 80 Warmup ratio = 0.1 Learning rate = 5e-6 Batch size = 32 Checkpoint step = 103500 Contrastive loss temperature = 0.01 ``` # Evaluation The model was evaluated using the [MTEB Evaluation](https://huggingface.co/mteb) suite. # Citation Please cite our work if you use GISTEmbed or the datasets we published in your projects or research. 🤗 ``` @article{solatorio2024gistembed, title={GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning}, author={Aivin V. Solatorio}, journal={arXiv preprint arXiv:2402.16829}, year={2024}, URL={https://arxiv.org/abs/2402.16829} eprint={2402.16829}, archivePrefix={arXiv}, primaryClass={cs.LG} } ``` # Acknowledgements This work is supported by the "KCP IV - Exploring Data Use in the Development Economics Literature using Large Language Models (AI and LLMs)" project funded by the [Knowledge for Change Program (KCP)](https://www.worldbank.org/en/programs/knowledge-for-change) of the World Bank - RA-P503405-RESE-TF0C3444. The findings, interpretations, and conclusions expressed in this material are entirely those of the authors. They do not necessarily represent the views of the International Bank for Reconstruction and Development/World Bank and its affiliated organizations, or those of the Executive Directors of the World Bank or the governments they represent.
[ "BIOSSES", "SCIFACT" ]
Alibaba-NLP/gte-Qwen2-1.5B-instruct
Alibaba-NLP
sentence-similarity
[ "sentence-transformers", "safetensors", "qwen2", "text-generation", "mteb", "transformers", "Qwen2", "sentence-similarity", "custom_code", "arxiv:2308.03281", "license:apache-2.0", "model-index", "autotrain_compatible", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-06-29T08:02:40Z"
2025-01-11T07:09:40+00:00
533,442
190
--- license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2 - sentence-similarity model-index: - name: gte-qwen2-7B-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 83.98507462686567 - type: ap value: 50.93015252587014 - type: f1 value: 78.50416599051215 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.61065 - type: ap value: 94.89174052954196 - type: f1 value: 96.60942596940565 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 55.614000000000004 - type: f1 value: 54.90553480294904 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 45.164 - type: map_at_10 value: 61.519 - type: map_at_100 value: 61.769 - type: map_at_1000 value: 61.769 - type: map_at_3 value: 57.443999999999996 - type: map_at_5 value: 60.058 - type: mrr_at_1 value: 46.088 - type: mrr_at_10 value: 61.861 - type: mrr_at_100 value: 62.117999999999995 - type: mrr_at_1000 value: 62.117999999999995 - type: mrr_at_3 value: 57.729 - type: mrr_at_5 value: 60.392 - type: ndcg_at_1 value: 45.164 - type: ndcg_at_10 value: 69.72 - type: ndcg_at_100 value: 70.719 - type: ndcg_at_1000 value: 70.719 - type: ndcg_at_3 value: 61.517999999999994 - type: ndcg_at_5 value: 66.247 - type: precision_at_1 value: 45.164 - type: precision_at_10 value: 9.545 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 24.443 - type: precision_at_5 value: 16.97 - type: recall_at_1 value: 45.164 - type: recall_at_10 value: 95.448 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 73.329 - type: recall_at_5 value: 84.851 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 50.511868162026175 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 45.007803189284004 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.55292107723382 - type: mrr value: 77.66158818097877 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.65459047085452 - type: cos_sim_spearman value: 82.10729255710761 - type: euclidean_pearson value: 82.78079159312476 - type: euclidean_spearman value: 80.50002701880933 - type: manhattan_pearson value: 82.41372641383016 - type: manhattan_spearman value: 80.57412509272639 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.30844155844156 - type: f1 value: 87.25307322443255 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 43.20754608934859 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 38.818037697335505 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 35.423 - type: map_at_10 value: 47.198 - type: map_at_100 value: 48.899 - type: map_at_1000 value: 49.004 - type: map_at_3 value: 43.114999999999995 - type: map_at_5 value: 45.491 - type: mrr_at_1 value: 42.918 - type: mrr_at_10 value: 53.299 - type: mrr_at_100 value: 54.032000000000004 - type: mrr_at_1000 value: 54.055 - type: mrr_at_3 value: 50.453 - type: mrr_at_5 value: 52.205999999999996 - type: ndcg_at_1 value: 42.918 - type: ndcg_at_10 value: 53.98 - type: ndcg_at_100 value: 59.57 - type: ndcg_at_1000 value: 60.879000000000005 - type: ndcg_at_3 value: 48.224000000000004 - type: ndcg_at_5 value: 50.998 - type: precision_at_1 value: 42.918 - type: precision_at_10 value: 10.299999999999999 - type: precision_at_100 value: 1.687 - type: precision_at_1000 value: 0.211 - type: precision_at_3 value: 22.842000000000002 - type: precision_at_5 value: 16.681 - type: recall_at_1 value: 35.423 - type: recall_at_10 value: 66.824 - type: recall_at_100 value: 89.564 - type: recall_at_1000 value: 97.501 - type: recall_at_3 value: 50.365 - type: recall_at_5 value: 57.921 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 33.205 - type: map_at_10 value: 44.859 - type: map_at_100 value: 46.135 - type: map_at_1000 value: 46.259 - type: map_at_3 value: 41.839 - type: map_at_5 value: 43.662 - type: mrr_at_1 value: 41.146 - type: mrr_at_10 value: 50.621 - type: mrr_at_100 value: 51.207 - type: mrr_at_1000 value: 51.246 - type: mrr_at_3 value: 48.535000000000004 - type: mrr_at_5 value: 49.818 - type: ndcg_at_1 value: 41.146 - type: ndcg_at_10 value: 50.683 - type: ndcg_at_100 value: 54.82 - type: ndcg_at_1000 value: 56.69 - type: ndcg_at_3 value: 46.611000000000004 - type: ndcg_at_5 value: 48.66 - type: precision_at_1 value: 41.146 - type: precision_at_10 value: 9.439 - type: precision_at_100 value: 1.465 - type: precision_at_1000 value: 0.194 - type: precision_at_3 value: 22.59 - type: precision_at_5 value: 15.86 - type: recall_at_1 value: 33.205 - type: recall_at_10 value: 61.028999999999996 - type: recall_at_100 value: 78.152 - type: recall_at_1000 value: 89.59700000000001 - type: recall_at_3 value: 49.05 - type: recall_at_5 value: 54.836 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 41.637 - type: map_at_10 value: 55.162 - type: map_at_100 value: 56.142 - type: map_at_1000 value: 56.188 - type: map_at_3 value: 51.564 - type: map_at_5 value: 53.696 - type: mrr_at_1 value: 47.524 - type: mrr_at_10 value: 58.243 - type: mrr_at_100 value: 58.879999999999995 - type: mrr_at_1000 value: 58.9 - type: mrr_at_3 value: 55.69499999999999 - type: mrr_at_5 value: 57.284 - type: ndcg_at_1 value: 47.524 - type: ndcg_at_10 value: 61.305 - type: ndcg_at_100 value: 65.077 - type: ndcg_at_1000 value: 65.941 - type: ndcg_at_3 value: 55.422000000000004 - type: ndcg_at_5 value: 58.516 - type: precision_at_1 value: 47.524 - type: precision_at_10 value: 9.918000000000001 - type: precision_at_100 value: 1.276 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.765 - type: precision_at_5 value: 17.204 - type: recall_at_1 value: 41.637 - type: recall_at_10 value: 76.185 - type: recall_at_100 value: 92.149 - type: recall_at_1000 value: 98.199 - type: recall_at_3 value: 60.856 - type: recall_at_5 value: 68.25099999999999 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 26.27 - type: map_at_10 value: 37.463 - type: map_at_100 value: 38.434000000000005 - type: map_at_1000 value: 38.509 - type: map_at_3 value: 34.226 - type: map_at_5 value: 36.161 - type: mrr_at_1 value: 28.588 - type: mrr_at_10 value: 39.383 - type: mrr_at_100 value: 40.23 - type: mrr_at_1000 value: 40.281 - type: mrr_at_3 value: 36.422 - type: mrr_at_5 value: 38.252 - type: ndcg_at_1 value: 28.588 - type: ndcg_at_10 value: 43.511 - type: ndcg_at_100 value: 48.274 - type: ndcg_at_1000 value: 49.975 - type: ndcg_at_3 value: 37.319 - type: ndcg_at_5 value: 40.568 - type: precision_at_1 value: 28.588 - type: precision_at_10 value: 6.893000000000001 - type: precision_at_100 value: 0.9900000000000001 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 16.347 - type: precision_at_5 value: 11.661000000000001 - type: recall_at_1 value: 26.27 - type: recall_at_10 value: 60.284000000000006 - type: recall_at_100 value: 81.902 - type: recall_at_1000 value: 94.43 - type: recall_at_3 value: 43.537 - type: recall_at_5 value: 51.475 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 18.168 - type: map_at_10 value: 28.410000000000004 - type: map_at_100 value: 29.78 - type: map_at_1000 value: 29.892999999999997 - type: map_at_3 value: 25.238 - type: map_at_5 value: 26.96 - type: mrr_at_1 value: 23.507 - type: mrr_at_10 value: 33.382 - type: mrr_at_100 value: 34.404 - type: mrr_at_1000 value: 34.467999999999996 - type: mrr_at_3 value: 30.637999999999998 - type: mrr_at_5 value: 32.199 - type: ndcg_at_1 value: 23.507 - type: ndcg_at_10 value: 34.571000000000005 - type: ndcg_at_100 value: 40.663 - type: ndcg_at_1000 value: 43.236000000000004 - type: ndcg_at_3 value: 29.053 - type: ndcg_at_5 value: 31.563999999999997 - type: precision_at_1 value: 23.507 - type: precision_at_10 value: 6.654 - type: precision_at_100 value: 1.113 - type: precision_at_1000 value: 0.146 - type: precision_at_3 value: 14.427999999999999 - type: precision_at_5 value: 10.498000000000001 - type: recall_at_1 value: 18.168 - type: recall_at_10 value: 48.443000000000005 - type: recall_at_100 value: 74.47 - type: recall_at_1000 value: 92.494 - type: recall_at_3 value: 33.379999999999995 - type: recall_at_5 value: 39.76 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 32.39 - type: map_at_10 value: 44.479 - type: map_at_100 value: 45.977000000000004 - type: map_at_1000 value: 46.087 - type: map_at_3 value: 40.976 - type: map_at_5 value: 43.038 - type: mrr_at_1 value: 40.135 - type: mrr_at_10 value: 50.160000000000004 - type: mrr_at_100 value: 51.052 - type: mrr_at_1000 value: 51.087 - type: mrr_at_3 value: 47.818 - type: mrr_at_5 value: 49.171 - type: ndcg_at_1 value: 40.135 - type: ndcg_at_10 value: 50.731 - type: ndcg_at_100 value: 56.452000000000005 - type: ndcg_at_1000 value: 58.123000000000005 - type: ndcg_at_3 value: 45.507 - type: ndcg_at_5 value: 48.11 - type: precision_at_1 value: 40.135 - type: precision_at_10 value: 9.192 - type: precision_at_100 value: 1.397 - type: precision_at_1000 value: 0.169 - type: precision_at_3 value: 21.816 - type: precision_at_5 value: 15.476 - type: recall_at_1 value: 32.39 - type: recall_at_10 value: 63.597 - type: recall_at_100 value: 86.737 - type: recall_at_1000 value: 97.039 - type: recall_at_3 value: 48.906 - type: recall_at_5 value: 55.659000000000006 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.397 - type: map_at_10 value: 39.871 - type: map_at_100 value: 41.309000000000005 - type: map_at_1000 value: 41.409 - type: map_at_3 value: 36.047000000000004 - type: map_at_5 value: 38.104 - type: mrr_at_1 value: 34.703 - type: mrr_at_10 value: 44.773 - type: mrr_at_100 value: 45.64 - type: mrr_at_1000 value: 45.678999999999995 - type: mrr_at_3 value: 41.705 - type: mrr_at_5 value: 43.406 - type: ndcg_at_1 value: 34.703 - type: ndcg_at_10 value: 46.271 - type: ndcg_at_100 value: 52.037 - type: ndcg_at_1000 value: 53.81700000000001 - type: ndcg_at_3 value: 39.966 - type: ndcg_at_5 value: 42.801 - type: precision_at_1 value: 34.703 - type: precision_at_10 value: 8.744 - type: precision_at_100 value: 1.348 - type: precision_at_1000 value: 0.167 - type: precision_at_3 value: 19.102 - type: precision_at_5 value: 13.836 - type: recall_at_1 value: 28.397 - type: recall_at_10 value: 60.299 - type: recall_at_100 value: 84.595 - type: recall_at_1000 value: 96.155 - type: recall_at_3 value: 43.065 - type: recall_at_5 value: 50.371 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.044333333333338 - type: map_at_10 value: 38.78691666666666 - type: map_at_100 value: 40.113 - type: map_at_1000 value: 40.22125 - type: map_at_3 value: 35.52966666666667 - type: map_at_5 value: 37.372749999999996 - type: mrr_at_1 value: 33.159083333333335 - type: mrr_at_10 value: 42.913583333333335 - type: mrr_at_100 value: 43.7845 - type: mrr_at_1000 value: 43.830333333333336 - type: mrr_at_3 value: 40.29816666666667 - type: mrr_at_5 value: 41.81366666666667 - type: ndcg_at_1 value: 33.159083333333335 - type: ndcg_at_10 value: 44.75750000000001 - type: ndcg_at_100 value: 50.13658333333334 - type: ndcg_at_1000 value: 52.037 - type: ndcg_at_3 value: 39.34258333333334 - type: ndcg_at_5 value: 41.93708333333333 - type: precision_at_1 value: 33.159083333333335 - type: precision_at_10 value: 7.952416666666667 - type: precision_at_100 value: 1.2571666666666668 - type: precision_at_1000 value: 0.16099999999999998 - type: precision_at_3 value: 18.303833333333337 - type: precision_at_5 value: 13.057083333333333 - type: recall_at_1 value: 28.044333333333338 - type: recall_at_10 value: 58.237249999999996 - type: recall_at_100 value: 81.35391666666666 - type: recall_at_1000 value: 94.21283333333334 - type: recall_at_3 value: 43.32341666666667 - type: recall_at_5 value: 49.94908333333333 - type: map_at_1 value: 18.398 - type: map_at_10 value: 27.929 - type: map_at_100 value: 29.032999999999998 - type: map_at_1000 value: 29.126 - type: map_at_3 value: 25.070999999999998 - type: map_at_5 value: 26.583000000000002 - type: mrr_at_1 value: 19.963 - type: mrr_at_10 value: 29.997 - type: mrr_at_100 value: 30.9 - type: mrr_at_1000 value: 30.972 - type: mrr_at_3 value: 27.264 - type: mrr_at_5 value: 28.826 - type: ndcg_at_1 value: 19.963 - type: ndcg_at_10 value: 33.678999999999995 - type: ndcg_at_100 value: 38.931 - type: ndcg_at_1000 value: 41.379 - type: ndcg_at_3 value: 28.000000000000004 - type: ndcg_at_5 value: 30.637999999999998 - type: precision_at_1 value: 19.963 - type: precision_at_10 value: 5.7299999999999995 - type: precision_at_100 value: 0.902 - type: precision_at_1000 value: 0.122 - type: precision_at_3 value: 12.631 - type: precision_at_5 value: 9.057 - type: recall_at_1 value: 18.398 - type: recall_at_10 value: 49.254 - type: recall_at_100 value: 73.182 - type: recall_at_1000 value: 91.637 - type: recall_at_3 value: 34.06 - type: recall_at_5 value: 40.416000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 27.838 - type: map_at_10 value: 36.04 - type: map_at_100 value: 37.113 - type: map_at_1000 value: 37.204 - type: map_at_3 value: 33.585 - type: map_at_5 value: 34.845 - type: mrr_at_1 value: 30.982 - type: mrr_at_10 value: 39.105000000000004 - type: mrr_at_100 value: 39.98 - type: mrr_at_1000 value: 40.042 - type: mrr_at_3 value: 36.912 - type: mrr_at_5 value: 38.062000000000005 - type: ndcg_at_1 value: 30.982 - type: ndcg_at_10 value: 40.982 - type: ndcg_at_100 value: 46.092 - type: ndcg_at_1000 value: 48.25 - type: ndcg_at_3 value: 36.41 - type: ndcg_at_5 value: 38.379999999999995 - type: precision_at_1 value: 30.982 - type: precision_at_10 value: 6.534 - type: precision_at_100 value: 0.9820000000000001 - type: precision_at_1000 value: 0.124 - type: precision_at_3 value: 15.745999999999999 - type: precision_at_5 value: 10.828 - type: recall_at_1 value: 27.838 - type: recall_at_10 value: 52.971000000000004 - type: recall_at_100 value: 76.357 - type: recall_at_1000 value: 91.973 - type: recall_at_3 value: 40.157 - type: recall_at_5 value: 45.147999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 19.059 - type: map_at_10 value: 27.454 - type: map_at_100 value: 28.736 - type: map_at_1000 value: 28.865000000000002 - type: map_at_3 value: 24.773999999999997 - type: map_at_5 value: 26.266000000000002 - type: mrr_at_1 value: 23.125 - type: mrr_at_10 value: 31.267 - type: mrr_at_100 value: 32.32 - type: mrr_at_1000 value: 32.394 - type: mrr_at_3 value: 28.894 - type: mrr_at_5 value: 30.281000000000002 - type: ndcg_at_1 value: 23.125 - type: ndcg_at_10 value: 32.588 - type: ndcg_at_100 value: 38.432 - type: ndcg_at_1000 value: 41.214 - type: ndcg_at_3 value: 27.938000000000002 - type: ndcg_at_5 value: 30.127 - type: precision_at_1 value: 23.125 - type: precision_at_10 value: 5.9639999999999995 - type: precision_at_100 value: 1.047 - type: precision_at_1000 value: 0.148 - type: precision_at_3 value: 13.294 - type: precision_at_5 value: 9.628 - type: recall_at_1 value: 19.059 - type: recall_at_10 value: 44.25 - type: recall_at_100 value: 69.948 - type: recall_at_1000 value: 89.35300000000001 - type: recall_at_3 value: 31.114000000000004 - type: recall_at_5 value: 36.846000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 28.355999999999998 - type: map_at_10 value: 39.055 - type: map_at_100 value: 40.486 - type: map_at_1000 value: 40.571 - type: map_at_3 value: 35.69 - type: map_at_5 value: 37.605 - type: mrr_at_1 value: 33.302 - type: mrr_at_10 value: 42.986000000000004 - type: mrr_at_100 value: 43.957 - type: mrr_at_1000 value: 43.996 - type: mrr_at_3 value: 40.111999999999995 - type: mrr_at_5 value: 41.735 - type: ndcg_at_1 value: 33.302 - type: ndcg_at_10 value: 44.962999999999994 - type: ndcg_at_100 value: 50.917 - type: ndcg_at_1000 value: 52.622 - type: ndcg_at_3 value: 39.182 - type: ndcg_at_5 value: 41.939 - type: precision_at_1 value: 33.302 - type: precision_at_10 value: 7.779999999999999 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.145 - type: precision_at_3 value: 18.035 - type: precision_at_5 value: 12.873000000000001 - type: recall_at_1 value: 28.355999999999998 - type: recall_at_10 value: 58.782000000000004 - type: recall_at_100 value: 84.02199999999999 - type: recall_at_1000 value: 95.511 - type: recall_at_3 value: 43.126999999999995 - type: recall_at_5 value: 50.14999999999999 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 27.391 - type: map_at_10 value: 37.523 - type: map_at_100 value: 39.312000000000005 - type: map_at_1000 value: 39.54 - type: map_at_3 value: 34.231 - type: map_at_5 value: 36.062 - type: mrr_at_1 value: 32.016 - type: mrr_at_10 value: 41.747 - type: mrr_at_100 value: 42.812 - type: mrr_at_1000 value: 42.844 - type: mrr_at_3 value: 39.129999999999995 - type: mrr_at_5 value: 40.524 - type: ndcg_at_1 value: 32.016 - type: ndcg_at_10 value: 43.826 - type: ndcg_at_100 value: 50.373999999999995 - type: ndcg_at_1000 value: 52.318 - type: ndcg_at_3 value: 38.479 - type: ndcg_at_5 value: 40.944 - type: precision_at_1 value: 32.016 - type: precision_at_10 value: 8.280999999999999 - type: precision_at_100 value: 1.6760000000000002 - type: precision_at_1000 value: 0.25 - type: precision_at_3 value: 18.05 - type: precision_at_5 value: 13.083 - type: recall_at_1 value: 27.391 - type: recall_at_10 value: 56.928999999999995 - type: recall_at_100 value: 85.169 - type: recall_at_1000 value: 96.665 - type: recall_at_3 value: 42.264 - type: recall_at_5 value: 48.556 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 19.681 - type: map_at_10 value: 32.741 - type: map_at_100 value: 34.811 - type: map_at_1000 value: 35.003 - type: map_at_3 value: 27.697 - type: map_at_5 value: 30.372 - type: mrr_at_1 value: 44.951 - type: mrr_at_10 value: 56.34400000000001 - type: mrr_at_100 value: 56.961 - type: mrr_at_1000 value: 56.987 - type: mrr_at_3 value: 53.681 - type: mrr_at_5 value: 55.407 - type: ndcg_at_1 value: 44.951 - type: ndcg_at_10 value: 42.905 - type: ndcg_at_100 value: 49.95 - type: ndcg_at_1000 value: 52.917 - type: ndcg_at_3 value: 36.815 - type: ndcg_at_5 value: 38.817 - type: precision_at_1 value: 44.951 - type: precision_at_10 value: 12.989999999999998 - type: precision_at_100 value: 2.068 - type: precision_at_1000 value: 0.263 - type: precision_at_3 value: 27.275 - type: precision_at_5 value: 20.365 - type: recall_at_1 value: 19.681 - type: recall_at_10 value: 48.272999999999996 - type: recall_at_100 value: 71.87400000000001 - type: recall_at_1000 value: 87.929 - type: recall_at_3 value: 32.653999999999996 - type: recall_at_5 value: 39.364 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 10.231 - type: map_at_10 value: 22.338 - type: map_at_100 value: 31.927 - type: map_at_1000 value: 33.87 - type: map_at_3 value: 15.559999999999999 - type: map_at_5 value: 18.239 - type: mrr_at_1 value: 75.0 - type: mrr_at_10 value: 81.303 - type: mrr_at_100 value: 81.523 - type: mrr_at_1000 value: 81.53 - type: mrr_at_3 value: 80.083 - type: mrr_at_5 value: 80.758 - type: ndcg_at_1 value: 64.625 - type: ndcg_at_10 value: 48.687000000000005 - type: ndcg_at_100 value: 52.791 - type: ndcg_at_1000 value: 60.041999999999994 - type: ndcg_at_3 value: 53.757999999999996 - type: ndcg_at_5 value: 50.76500000000001 - type: precision_at_1 value: 75.0 - type: precision_at_10 value: 38.3 - type: precision_at_100 value: 12.025 - type: precision_at_1000 value: 2.3970000000000002 - type: precision_at_3 value: 55.417 - type: precision_at_5 value: 47.5 - type: recall_at_1 value: 10.231 - type: recall_at_10 value: 27.697 - type: recall_at_100 value: 57.409 - type: recall_at_1000 value: 80.547 - type: recall_at_3 value: 16.668 - type: recall_at_5 value: 20.552 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 61.365 - type: f1 value: 56.7540827912991 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 83.479 - type: map_at_10 value: 88.898 - type: map_at_100 value: 89.11 - type: map_at_1000 value: 89.12400000000001 - type: map_at_3 value: 88.103 - type: map_at_5 value: 88.629 - type: mrr_at_1 value: 89.934 - type: mrr_at_10 value: 93.91000000000001 - type: mrr_at_100 value: 93.937 - type: mrr_at_1000 value: 93.938 - type: mrr_at_3 value: 93.62700000000001 - type: mrr_at_5 value: 93.84599999999999 - type: ndcg_at_1 value: 89.934 - type: ndcg_at_10 value: 91.574 - type: ndcg_at_100 value: 92.238 - type: ndcg_at_1000 value: 92.45 - type: ndcg_at_3 value: 90.586 - type: ndcg_at_5 value: 91.16300000000001 - type: precision_at_1 value: 89.934 - type: precision_at_10 value: 10.555 - type: precision_at_100 value: 1.1159999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 33.588 - type: precision_at_5 value: 20.642 - type: recall_at_1 value: 83.479 - type: recall_at_10 value: 94.971 - type: recall_at_100 value: 97.397 - type: recall_at_1000 value: 98.666 - type: recall_at_3 value: 92.24799999999999 - type: recall_at_5 value: 93.797 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 27.16 - type: map_at_10 value: 45.593 - type: map_at_100 value: 47.762 - type: map_at_1000 value: 47.899 - type: map_at_3 value: 39.237 - type: map_at_5 value: 42.970000000000006 - type: mrr_at_1 value: 52.623 - type: mrr_at_10 value: 62.637 - type: mrr_at_100 value: 63.169 - type: mrr_at_1000 value: 63.185 - type: mrr_at_3 value: 59.928000000000004 - type: mrr_at_5 value: 61.702999999999996 - type: ndcg_at_1 value: 52.623 - type: ndcg_at_10 value: 54.701 - type: ndcg_at_100 value: 61.263 - type: ndcg_at_1000 value: 63.134 - type: ndcg_at_3 value: 49.265 - type: ndcg_at_5 value: 51.665000000000006 - type: precision_at_1 value: 52.623 - type: precision_at_10 value: 15.185 - type: precision_at_100 value: 2.202 - type: precision_at_1000 value: 0.254 - type: precision_at_3 value: 32.767 - type: precision_at_5 value: 24.722 - type: recall_at_1 value: 27.16 - type: recall_at_10 value: 63.309000000000005 - type: recall_at_100 value: 86.722 - type: recall_at_1000 value: 97.505 - type: recall_at_3 value: 45.045 - type: recall_at_5 value: 54.02400000000001 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 42.573 - type: map_at_10 value: 59.373 - type: map_at_100 value: 60.292 - type: map_at_1000 value: 60.358999999999995 - type: map_at_3 value: 56.159000000000006 - type: map_at_5 value: 58.123999999999995 - type: mrr_at_1 value: 85.14500000000001 - type: mrr_at_10 value: 89.25999999999999 - type: mrr_at_100 value: 89.373 - type: mrr_at_1000 value: 89.377 - type: mrr_at_3 value: 88.618 - type: mrr_at_5 value: 89.036 - type: ndcg_at_1 value: 85.14500000000001 - type: ndcg_at_10 value: 68.95 - type: ndcg_at_100 value: 71.95 - type: ndcg_at_1000 value: 73.232 - type: ndcg_at_3 value: 64.546 - type: ndcg_at_5 value: 66.945 - type: precision_at_1 value: 85.14500000000001 - type: precision_at_10 value: 13.865 - type: precision_at_100 value: 1.619 - type: precision_at_1000 value: 0.179 - type: precision_at_3 value: 39.703 - type: precision_at_5 value: 25.718000000000004 - type: recall_at_1 value: 42.573 - type: recall_at_10 value: 69.325 - type: recall_at_100 value: 80.932 - type: recall_at_1000 value: 89.446 - type: recall_at_3 value: 59.553999999999995 - type: recall_at_5 value: 64.294 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 95.8336 - type: ap value: 93.78862962194073 - type: f1 value: 95.83192650728371 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 23.075000000000003 - type: map_at_10 value: 36.102000000000004 - type: map_at_100 value: 37.257 - type: map_at_1000 value: 37.3 - type: map_at_3 value: 32.144 - type: map_at_5 value: 34.359 - type: mrr_at_1 value: 23.711 - type: mrr_at_10 value: 36.671 - type: mrr_at_100 value: 37.763999999999996 - type: mrr_at_1000 value: 37.801 - type: mrr_at_3 value: 32.775 - type: mrr_at_5 value: 34.977000000000004 - type: ndcg_at_1 value: 23.711 - type: ndcg_at_10 value: 43.361 - type: ndcg_at_100 value: 48.839 - type: ndcg_at_1000 value: 49.88 - type: ndcg_at_3 value: 35.269 - type: ndcg_at_5 value: 39.224 - type: precision_at_1 value: 23.711 - type: precision_at_10 value: 6.866999999999999 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.105 - type: precision_at_3 value: 15.096000000000002 - type: precision_at_5 value: 11.083 - type: recall_at_1 value: 23.075000000000003 - type: recall_at_10 value: 65.756 - type: recall_at_100 value: 90.88199999999999 - type: recall_at_1000 value: 98.739 - type: recall_at_3 value: 43.691 - type: recall_at_5 value: 53.15800000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.69493844049248 - type: f1 value: 97.55048089616261 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 88.75968992248062 - type: f1 value: 72.26321223399123 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 82.40080699394754 - type: f1 value: 79.62590029057968 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 84.49562878278414 - type: f1 value: 84.0040193313333 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 39.386760057101945 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 37.89687154075537 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.94151656057482 - type: mrr value: 35.32684700746953 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 6.239999999999999 - type: map_at_10 value: 14.862 - type: map_at_100 value: 18.955 - type: map_at_1000 value: 20.694000000000003 - type: map_at_3 value: 10.683 - type: map_at_5 value: 12.674 - type: mrr_at_1 value: 50.15500000000001 - type: mrr_at_10 value: 59.697 - type: mrr_at_100 value: 60.095 - type: mrr_at_1000 value: 60.129999999999995 - type: mrr_at_3 value: 58.35900000000001 - type: mrr_at_5 value: 58.839 - type: ndcg_at_1 value: 48.452 - type: ndcg_at_10 value: 39.341 - type: ndcg_at_100 value: 35.866 - type: ndcg_at_1000 value: 45.111000000000004 - type: ndcg_at_3 value: 44.527 - type: ndcg_at_5 value: 42.946 - type: precision_at_1 value: 50.15500000000001 - type: precision_at_10 value: 29.536 - type: precision_at_100 value: 9.142 - type: precision_at_1000 value: 2.2849999999999997 - type: precision_at_3 value: 41.899 - type: precision_at_5 value: 37.647000000000006 - type: recall_at_1 value: 6.239999999999999 - type: recall_at_10 value: 19.278000000000002 - type: recall_at_100 value: 36.074 - type: recall_at_1000 value: 70.017 - type: recall_at_3 value: 12.066 - type: recall_at_5 value: 15.254000000000001 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 39.75 - type: map_at_10 value: 56.443 - type: map_at_100 value: 57.233999999999995 - type: map_at_1000 value: 57.249 - type: map_at_3 value: 52.032999999999994 - type: map_at_5 value: 54.937999999999995 - type: mrr_at_1 value: 44.728 - type: mrr_at_10 value: 58.939 - type: mrr_at_100 value: 59.489000000000004 - type: mrr_at_1000 value: 59.499 - type: mrr_at_3 value: 55.711999999999996 - type: mrr_at_5 value: 57.89 - type: ndcg_at_1 value: 44.728 - type: ndcg_at_10 value: 63.998999999999995 - type: ndcg_at_100 value: 67.077 - type: ndcg_at_1000 value: 67.40899999999999 - type: ndcg_at_3 value: 56.266000000000005 - type: ndcg_at_5 value: 60.88 - type: precision_at_1 value: 44.728 - type: precision_at_10 value: 10.09 - type: precision_at_100 value: 1.1809999999999998 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 25.145 - type: precision_at_5 value: 17.822 - type: recall_at_1 value: 39.75 - type: recall_at_10 value: 84.234 - type: recall_at_100 value: 97.055 - type: recall_at_1000 value: 99.517 - type: recall_at_3 value: 64.851 - type: recall_at_5 value: 75.343 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 72.085 - type: map_at_10 value: 86.107 - type: map_at_100 value: 86.727 - type: map_at_1000 value: 86.74 - type: map_at_3 value: 83.21 - type: map_at_5 value: 85.06 - type: mrr_at_1 value: 82.94 - type: mrr_at_10 value: 88.845 - type: mrr_at_100 value: 88.926 - type: mrr_at_1000 value: 88.927 - type: mrr_at_3 value: 87.993 - type: mrr_at_5 value: 88.62299999999999 - type: ndcg_at_1 value: 82.97 - type: ndcg_at_10 value: 89.645 - type: ndcg_at_100 value: 90.717 - type: ndcg_at_1000 value: 90.78 - type: ndcg_at_3 value: 86.99900000000001 - type: ndcg_at_5 value: 88.52600000000001 - type: precision_at_1 value: 82.97 - type: precision_at_10 value: 13.569 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.043 - type: precision_at_5 value: 24.992 - type: recall_at_1 value: 72.085 - type: recall_at_10 value: 96.262 - type: recall_at_100 value: 99.77000000000001 - type: recall_at_1000 value: 99.997 - type: recall_at_3 value: 88.652 - type: recall_at_5 value: 93.01899999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 55.82153952668092 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.094465801879295 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.688 - type: map_at_10 value: 15.201999999999998 - type: map_at_100 value: 18.096 - type: map_at_1000 value: 18.481 - type: map_at_3 value: 10.734 - type: map_at_5 value: 12.94 - type: mrr_at_1 value: 28.000000000000004 - type: mrr_at_10 value: 41.101 - type: mrr_at_100 value: 42.202 - type: mrr_at_1000 value: 42.228 - type: mrr_at_3 value: 37.683 - type: mrr_at_5 value: 39.708 - type: ndcg_at_1 value: 28.000000000000004 - type: ndcg_at_10 value: 24.976000000000003 - type: ndcg_at_100 value: 35.129 - type: ndcg_at_1000 value: 40.77 - type: ndcg_at_3 value: 23.787 - type: ndcg_at_5 value: 20.816000000000003 - type: precision_at_1 value: 28.000000000000004 - type: precision_at_10 value: 13.04 - type: precision_at_100 value: 2.761 - type: precision_at_1000 value: 0.41000000000000003 - type: precision_at_3 value: 22.6 - type: precision_at_5 value: 18.52 - type: recall_at_1 value: 5.688 - type: recall_at_10 value: 26.43 - type: recall_at_100 value: 56.02 - type: recall_at_1000 value: 83.21 - type: recall_at_3 value: 13.752 - type: recall_at_5 value: 18.777 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.15084859283178 - type: cos_sim_spearman value: 80.49030614009419 - type: euclidean_pearson value: 81.84574978672468 - type: euclidean_spearman value: 79.89787150656818 - type: manhattan_pearson value: 81.63076538567131 - type: manhattan_spearman value: 79.69867352121841 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.64097921490992 - type: cos_sim_spearman value: 77.25370084896514 - type: euclidean_pearson value: 82.71210826468788 - type: euclidean_spearman value: 78.50445584994826 - type: manhattan_pearson value: 82.92580164330298 - type: manhattan_spearman value: 78.69686891301019 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 87.24596417308994 - type: cos_sim_spearman value: 87.79454220555091 - type: euclidean_pearson value: 87.40242561671164 - type: euclidean_spearman value: 88.25955597373556 - type: manhattan_pearson value: 87.25160240485849 - type: manhattan_spearman value: 88.155794979818 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 84.44914233422564 - type: cos_sim_spearman value: 82.91015471820322 - type: euclidean_pearson value: 84.7206656630327 - type: euclidean_spearman value: 83.86408872059216 - type: manhattan_pearson value: 84.72816725158454 - type: manhattan_spearman value: 84.01603388572788 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.6168026237477 - type: cos_sim_spearman value: 88.45414278092397 - type: euclidean_pearson value: 88.57023240882022 - type: euclidean_spearman value: 89.04102190922094 - type: manhattan_pearson value: 88.66695535796354 - type: manhattan_spearman value: 89.19898476680969 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.27925826089424 - type: cos_sim_spearman value: 85.45291099550461 - type: euclidean_pearson value: 83.63853036580834 - type: euclidean_spearman value: 84.33468035821484 - type: manhattan_pearson value: 83.72778773251596 - type: manhattan_spearman value: 84.51583132445376 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.67375185692552 - type: cos_sim_spearman value: 90.32542469203855 - type: euclidean_pearson value: 89.63513717951847 - type: euclidean_spearman value: 89.87760271003745 - type: manhattan_pearson value: 89.28381452982924 - type: manhattan_spearman value: 89.53568197785721 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 66.24644693819846 - type: cos_sim_spearman value: 66.09889420525377 - type: euclidean_pearson value: 63.72551583520747 - type: euclidean_spearman value: 63.01385470780679 - type: manhattan_pearson value: 64.09258157214097 - type: manhattan_spearman value: 63.080517752822594 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.27321463839989 - type: cos_sim_spearman value: 86.37572865993327 - type: euclidean_pearson value: 86.36268020198149 - type: euclidean_spearman value: 86.31089339478922 - type: manhattan_pearson value: 86.4260445761947 - type: manhattan_spearman value: 86.45885895320457 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 86.52456702387798 - type: mrr value: 96.34556529164372 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 61.99400000000001 - type: map_at_10 value: 73.38799999999999 - type: map_at_100 value: 73.747 - type: map_at_1000 value: 73.75 - type: map_at_3 value: 70.04599999999999 - type: map_at_5 value: 72.095 - type: mrr_at_1 value: 65.0 - type: mrr_at_10 value: 74.42800000000001 - type: mrr_at_100 value: 74.722 - type: mrr_at_1000 value: 74.725 - type: mrr_at_3 value: 72.056 - type: mrr_at_5 value: 73.60600000000001 - type: ndcg_at_1 value: 65.0 - type: ndcg_at_10 value: 78.435 - type: ndcg_at_100 value: 79.922 - type: ndcg_at_1000 value: 80.00500000000001 - type: ndcg_at_3 value: 73.05199999999999 - type: ndcg_at_5 value: 75.98 - type: precision_at_1 value: 65.0 - type: precision_at_10 value: 10.5 - type: precision_at_100 value: 1.123 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 28.555999999999997 - type: precision_at_5 value: 19.0 - type: recall_at_1 value: 61.99400000000001 - type: recall_at_10 value: 92.72200000000001 - type: recall_at_100 value: 99.333 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 78.739 - type: recall_at_5 value: 85.828 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.79009900990098 - type: cos_sim_ap value: 95.3203137438653 - type: cos_sim_f1 value: 89.12386706948641 - type: cos_sim_precision value: 89.75659229208925 - type: cos_sim_recall value: 88.5 - type: dot_accuracy value: 99.67821782178218 - type: dot_ap value: 89.94069840000675 - type: dot_f1 value: 83.45902463549521 - type: dot_precision value: 83.9231547017189 - type: dot_recall value: 83.0 - type: euclidean_accuracy value: 99.78613861386138 - type: euclidean_ap value: 95.10648259135526 - type: euclidean_f1 value: 88.77338877338877 - type: euclidean_precision value: 92.42424242424242 - type: euclidean_recall value: 85.39999999999999 - type: manhattan_accuracy value: 99.7950495049505 - type: manhattan_ap value: 95.29987661320946 - type: manhattan_f1 value: 89.21313183949972 - type: manhattan_precision value: 93.14472252448314 - type: manhattan_recall value: 85.6 - type: max_accuracy value: 99.7950495049505 - type: max_ap value: 95.3203137438653 - type: max_f1 value: 89.21313183949972 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 67.65446577183913 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 46.30749237193961 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.91481849959949 - type: mrr value: 55.853506175197346 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.08196549170419 - type: cos_sim_spearman value: 31.16661390597077 - type: dot_pearson value: 29.892258410943466 - type: dot_spearman value: 30.51328811965085 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.23900000000000002 - type: map_at_10 value: 2.173 - type: map_at_100 value: 14.24 - type: map_at_1000 value: 35.309000000000005 - type: map_at_3 value: 0.7100000000000001 - type: map_at_5 value: 1.163 - type: mrr_at_1 value: 92.0 - type: mrr_at_10 value: 96.0 - type: mrr_at_100 value: 96.0 - type: mrr_at_1000 value: 96.0 - type: mrr_at_3 value: 96.0 - type: mrr_at_5 value: 96.0 - type: ndcg_at_1 value: 90.0 - type: ndcg_at_10 value: 85.382 - type: ndcg_at_100 value: 68.03 - type: ndcg_at_1000 value: 61.021 - type: ndcg_at_3 value: 89.765 - type: ndcg_at_5 value: 88.444 - type: precision_at_1 value: 92.0 - type: precision_at_10 value: 88.0 - type: precision_at_100 value: 70.02000000000001 - type: precision_at_1000 value: 26.984 - type: precision_at_3 value: 94.0 - type: precision_at_5 value: 92.80000000000001 - type: recall_at_1 value: 0.23900000000000002 - type: recall_at_10 value: 2.313 - type: recall_at_100 value: 17.049 - type: recall_at_1000 value: 57.489999999999995 - type: recall_at_3 value: 0.737 - type: recall_at_5 value: 1.221 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.75 - type: map_at_10 value: 11.29 - type: map_at_100 value: 18.032999999999998 - type: map_at_1000 value: 19.746 - type: map_at_3 value: 6.555 - type: map_at_5 value: 8.706999999999999 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 50.55 - type: mrr_at_100 value: 51.659 - type: mrr_at_1000 value: 51.659 - type: mrr_at_3 value: 47.278999999999996 - type: mrr_at_5 value: 49.728 - type: ndcg_at_1 value: 32.653 - type: ndcg_at_10 value: 27.894000000000002 - type: ndcg_at_100 value: 39.769 - type: ndcg_at_1000 value: 51.495999999999995 - type: ndcg_at_3 value: 32.954 - type: ndcg_at_5 value: 31.502999999999997 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 23.265 - type: precision_at_100 value: 7.898 - type: precision_at_1000 value: 1.58 - type: precision_at_3 value: 34.694 - type: precision_at_5 value: 31.429000000000002 - type: recall_at_1 value: 2.75 - type: recall_at_10 value: 16.953 - type: recall_at_100 value: 48.68 - type: recall_at_1000 value: 85.18599999999999 - type: recall_at_3 value: 7.710999999999999 - type: recall_at_5 value: 11.484 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 82.66099999999999 - type: ap value: 25.555698090238337 - type: f1 value: 66.48402012461622 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 72.94567062818335 - type: f1 value: 73.28139189595674 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.581627240203474 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.78089050485785 - type: cos_sim_ap value: 79.64487116574168 - type: cos_sim_f1 value: 72.46563021970964 - type: cos_sim_precision value: 70.62359128474831 - type: cos_sim_recall value: 74.40633245382587 - type: dot_accuracy value: 86.2609524944865 - type: dot_ap value: 75.513046857613 - type: dot_f1 value: 68.58213616489695 - type: dot_precision value: 65.12455516014235 - type: dot_recall value: 72.42744063324538 - type: euclidean_accuracy value: 87.6080348095607 - type: euclidean_ap value: 79.00204933649795 - type: euclidean_f1 value: 72.14495342605589 - type: euclidean_precision value: 69.85421299728193 - type: euclidean_recall value: 74.5910290237467 - type: manhattan_accuracy value: 87.59611372712642 - type: manhattan_ap value: 78.78523756706264 - type: manhattan_f1 value: 71.86499137718648 - type: manhattan_precision value: 67.39833641404806 - type: manhattan_recall value: 76.96569920844327 - type: max_accuracy value: 87.78089050485785 - type: max_ap value: 79.64487116574168 - type: max_f1 value: 72.46563021970964 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.98719292117825 - type: cos_sim_ap value: 87.58146137353202 - type: cos_sim_f1 value: 80.28543232369239 - type: cos_sim_precision value: 79.1735289714029 - type: cos_sim_recall value: 81.42901139513397 - type: dot_accuracy value: 88.9199363526992 - type: dot_ap value: 84.98499998630417 - type: dot_f1 value: 78.21951400757969 - type: dot_precision value: 75.58523624874336 - type: dot_recall value: 81.04404065291038 - type: euclidean_accuracy value: 89.77374160748244 - type: euclidean_ap value: 87.35151562835209 - type: euclidean_f1 value: 79.92160922940393 - type: euclidean_precision value: 76.88531587933979 - type: euclidean_recall value: 83.20757622420696 - type: manhattan_accuracy value: 89.72717041176699 - type: manhattan_ap value: 87.34065592142515 - type: manhattan_f1 value: 79.85603419187943 - type: manhattan_precision value: 77.82243332115455 - type: manhattan_recall value: 81.99876809362489 - type: max_accuracy value: 89.98719292117825 - type: max_ap value: 87.58146137353202 - type: max_f1 value: 80.28543232369239 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 53.45954203592337 - type: cos_sim_spearman value: 58.42154680418638 - type: euclidean_pearson value: 56.41543791722753 - type: euclidean_spearman value: 58.39328016640146 - type: manhattan_pearson value: 56.318510356833876 - type: manhattan_spearman value: 58.28423447818184 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 50.78356460675945 - type: cos_sim_spearman value: 55.6530411663269 - type: euclidean_pearson value: 56.50763660417816 - type: euclidean_spearman value: 55.733823335669065 - type: manhattan_pearson value: 56.45323093512866 - type: manhattan_spearman value: 55.63248619032702 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.209999999999994 - type: f1 value: 46.08892432018655 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 70.25573992001478 - type: cos_sim_spearman value: 73.85247134951433 - type: euclidean_pearson value: 72.60033082168442 - type: euclidean_spearman value: 73.72445893756499 - type: manhattan_pearson value: 72.59932284620231 - type: manhattan_spearman value: 73.68002490614583 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 45.21317724305628 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 42.49825170976724 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 88.15661686810597 - type: mrr value: 90.11222222222223 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 88.1204726064383 - type: mrr value: 90.20142857142858 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 27.224999999999998 - type: map_at_10 value: 40.169 - type: map_at_100 value: 42.0 - type: map_at_1000 value: 42.109 - type: map_at_3 value: 35.76 - type: map_at_5 value: 38.221 - type: mrr_at_1 value: 40.56 - type: mrr_at_10 value: 49.118 - type: mrr_at_100 value: 50.092999999999996 - type: mrr_at_1000 value: 50.133 - type: mrr_at_3 value: 46.507 - type: mrr_at_5 value: 47.973 - type: ndcg_at_1 value: 40.56 - type: ndcg_at_10 value: 46.972 - type: ndcg_at_100 value: 54.04 - type: ndcg_at_1000 value: 55.862 - type: ndcg_at_3 value: 41.36 - type: ndcg_at_5 value: 43.704 - type: precision_at_1 value: 40.56 - type: precision_at_10 value: 10.302999999999999 - type: precision_at_100 value: 1.606 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 23.064 - type: precision_at_5 value: 16.764000000000003 - type: recall_at_1 value: 27.224999999999998 - type: recall_at_10 value: 58.05200000000001 - type: recall_at_100 value: 87.092 - type: recall_at_1000 value: 99.099 - type: recall_at_3 value: 41.373 - type: recall_at_5 value: 48.453 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 77.40228502705953 - type: cos_sim_ap value: 86.22359172956327 - type: cos_sim_f1 value: 78.96328293736501 - type: cos_sim_precision value: 73.36945615091311 - type: cos_sim_recall value: 85.48047696983868 - type: dot_accuracy value: 75.53818400481059 - type: dot_ap value: 83.70164011305312 - type: dot_f1 value: 77.67298719348754 - type: dot_precision value: 67.49482401656314 - type: dot_recall value: 91.46598082768296 - type: euclidean_accuracy value: 77.94347564642213 - type: euclidean_ap value: 86.4652108728609 - type: euclidean_f1 value: 79.15555555555555 - type: euclidean_precision value: 75.41816641964853 - type: euclidean_recall value: 83.28267477203647 - type: manhattan_accuracy value: 77.45039085989175 - type: manhattan_ap value: 86.09986583900665 - type: manhattan_f1 value: 78.93669264438988 - type: manhattan_precision value: 72.63261296660117 - type: manhattan_recall value: 86.43909282207154 - type: max_accuracy value: 77.94347564642213 - type: max_ap value: 86.4652108728609 - type: max_f1 value: 79.15555555555555 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 69.336 - type: map_at_10 value: 77.16 - type: map_at_100 value: 77.47500000000001 - type: map_at_1000 value: 77.482 - type: map_at_3 value: 75.42999999999999 - type: map_at_5 value: 76.468 - type: mrr_at_1 value: 69.44200000000001 - type: mrr_at_10 value: 77.132 - type: mrr_at_100 value: 77.43299999999999 - type: mrr_at_1000 value: 77.44 - type: mrr_at_3 value: 75.395 - type: mrr_at_5 value: 76.459 - type: ndcg_at_1 value: 69.547 - type: ndcg_at_10 value: 80.794 - type: ndcg_at_100 value: 82.245 - type: ndcg_at_1000 value: 82.40899999999999 - type: ndcg_at_3 value: 77.303 - type: ndcg_at_5 value: 79.168 - type: precision_at_1 value: 69.547 - type: precision_at_10 value: 9.305 - type: precision_at_100 value: 0.9979999999999999 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 27.749000000000002 - type: precision_at_5 value: 17.576 - type: recall_at_1 value: 69.336 - type: recall_at_10 value: 92.097 - type: recall_at_100 value: 98.736 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 82.64 - type: recall_at_5 value: 87.144 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 26.817999999999998 - type: map_at_10 value: 82.67 - type: map_at_100 value: 85.304 - type: map_at_1000 value: 85.334 - type: map_at_3 value: 57.336 - type: map_at_5 value: 72.474 - type: mrr_at_1 value: 91.45 - type: mrr_at_10 value: 94.272 - type: mrr_at_100 value: 94.318 - type: mrr_at_1000 value: 94.32000000000001 - type: mrr_at_3 value: 94.0 - type: mrr_at_5 value: 94.17699999999999 - type: ndcg_at_1 value: 91.45 - type: ndcg_at_10 value: 89.404 - type: ndcg_at_100 value: 91.724 - type: ndcg_at_1000 value: 91.973 - type: ndcg_at_3 value: 88.104 - type: ndcg_at_5 value: 87.25699999999999 - type: precision_at_1 value: 91.45 - type: precision_at_10 value: 42.585 - type: precision_at_100 value: 4.838 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 78.8 - type: precision_at_5 value: 66.66 - type: recall_at_1 value: 26.817999999999998 - type: recall_at_10 value: 90.67 - type: recall_at_100 value: 98.36200000000001 - type: recall_at_1000 value: 99.583 - type: recall_at_3 value: 59.614999999999995 - type: recall_at_5 value: 77.05199999999999 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 47.699999999999996 - type: map_at_10 value: 57.589999999999996 - type: map_at_100 value: 58.226 - type: map_at_1000 value: 58.251 - type: map_at_3 value: 55.233 - type: map_at_5 value: 56.633 - type: mrr_at_1 value: 47.699999999999996 - type: mrr_at_10 value: 57.589999999999996 - type: mrr_at_100 value: 58.226 - type: mrr_at_1000 value: 58.251 - type: mrr_at_3 value: 55.233 - type: mrr_at_5 value: 56.633 - type: ndcg_at_1 value: 47.699999999999996 - type: ndcg_at_10 value: 62.505 - type: ndcg_at_100 value: 65.517 - type: ndcg_at_1000 value: 66.19800000000001 - type: ndcg_at_3 value: 57.643 - type: ndcg_at_5 value: 60.181 - type: precision_at_1 value: 47.699999999999996 - type: precision_at_10 value: 7.8 - type: precision_at_100 value: 0.919 - type: precision_at_1000 value: 0.097 - type: precision_at_3 value: 21.532999999999998 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 47.699999999999996 - type: recall_at_10 value: 78.0 - type: recall_at_100 value: 91.9 - type: recall_at_1000 value: 97.3 - type: recall_at_3 value: 64.60000000000001 - type: recall_at_5 value: 70.8 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 44.84801846864178 - type: f1 value: 37.47347897956339 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 85.81613508442777 - type: ap value: 52.68244615477374 - type: f1 value: 80.0445640948843 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 69.57786502217138 - type: cos_sim_spearman value: 75.39106054489906 - type: euclidean_pearson value: 73.72082954602402 - type: euclidean_spearman value: 75.14421475913619 - type: manhattan_pearson value: 73.62463076633642 - type: manhattan_spearman value: 75.01301565104112 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 29.143797057999134 - type: mrr value: 28.08174603174603 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 70.492 - type: map_at_10 value: 79.501 - type: map_at_100 value: 79.728 - type: map_at_1000 value: 79.735 - type: map_at_3 value: 77.77 - type: map_at_5 value: 78.851 - type: mrr_at_1 value: 72.822 - type: mrr_at_10 value: 80.001 - type: mrr_at_100 value: 80.19 - type: mrr_at_1000 value: 80.197 - type: mrr_at_3 value: 78.484 - type: mrr_at_5 value: 79.42099999999999 - type: ndcg_at_1 value: 72.822 - type: ndcg_at_10 value: 83.013 - type: ndcg_at_100 value: 84.013 - type: ndcg_at_1000 value: 84.20400000000001 - type: ndcg_at_3 value: 79.728 - type: ndcg_at_5 value: 81.542 - type: precision_at_1 value: 72.822 - type: precision_at_10 value: 9.917 - type: precision_at_100 value: 1.042 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 29.847 - type: precision_at_5 value: 18.871 - type: recall_at_1 value: 70.492 - type: recall_at_10 value: 93.325 - type: recall_at_100 value: 97.822 - type: recall_at_1000 value: 99.319 - type: recall_at_3 value: 84.636 - type: recall_at_5 value: 88.93100000000001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.88298587760592 - type: f1 value: 73.89001762017176 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.76328177538669 - type: f1 value: 80.24718532423358 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 49.6 - type: map_at_10 value: 55.620999999999995 - type: map_at_100 value: 56.204 - type: map_at_1000 value: 56.251 - type: map_at_3 value: 54.132999999999996 - type: map_at_5 value: 54.933 - type: mrr_at_1 value: 49.7 - type: mrr_at_10 value: 55.67100000000001 - type: mrr_at_100 value: 56.254000000000005 - type: mrr_at_1000 value: 56.301 - type: mrr_at_3 value: 54.18300000000001 - type: mrr_at_5 value: 54.983000000000004 - type: ndcg_at_1 value: 49.6 - type: ndcg_at_10 value: 58.645 - type: ndcg_at_100 value: 61.789 - type: ndcg_at_1000 value: 63.219 - type: ndcg_at_3 value: 55.567 - type: ndcg_at_5 value: 57.008 - type: precision_at_1 value: 49.6 - type: precision_at_10 value: 6.819999999999999 - type: precision_at_100 value: 0.836 - type: precision_at_1000 value: 0.095 - type: precision_at_3 value: 19.900000000000002 - type: precision_at_5 value: 12.64 - type: recall_at_1 value: 49.6 - type: recall_at_10 value: 68.2 - type: recall_at_100 value: 83.6 - type: recall_at_1000 value: 95.3 - type: recall_at_3 value: 59.699999999999996 - type: recall_at_5 value: 63.2 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 74.45666666666666 - type: f1 value: 74.32582402190089 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 80.67135896047645 - type: cos_sim_ap value: 87.60421240712051 - type: cos_sim_f1 value: 82.1304131408661 - type: cos_sim_precision value: 77.68361581920904 - type: cos_sim_recall value: 87.11721224920802 - type: dot_accuracy value: 79.04710341093666 - type: dot_ap value: 85.6370059719336 - type: dot_f1 value: 80.763723150358 - type: dot_precision value: 73.69337979094077 - type: dot_recall value: 89.33474128827878 - type: euclidean_accuracy value: 81.05035192203573 - type: euclidean_ap value: 87.7880240053663 - type: euclidean_f1 value: 82.50244379276637 - type: euclidean_precision value: 76.7970882620564 - type: euclidean_recall value: 89.1235480464625 - type: manhattan_accuracy value: 80.61721710882512 - type: manhattan_ap value: 87.43568120591175 - type: manhattan_f1 value: 81.89526184538653 - type: manhattan_precision value: 77.5992438563327 - type: manhattan_recall value: 86.6948257655755 - type: max_accuracy value: 81.05035192203573 - type: max_ap value: 87.7880240053663 - type: max_f1 value: 82.50244379276637 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 93.5 - type: ap value: 91.31357903446782 - type: f1 value: 93.48088994006616 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 36.93293453538077 - type: cos_sim_spearman value: 42.45972506308574 - type: euclidean_pearson value: 42.34945133152159 - type: euclidean_spearman value: 42.331610303674644 - type: manhattan_pearson value: 42.31455070249498 - type: manhattan_spearman value: 42.19887982891834 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 33.683290790043785 - type: cos_sim_spearman value: 35.149171171202994 - type: euclidean_pearson value: 32.33806561267862 - type: euclidean_spearman value: 34.483576387347966 - type: manhattan_pearson value: 32.47629754599608 - type: manhattan_spearman value: 34.66434471867615 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 66.46322760516104 - type: cos_sim_spearman value: 67.398478319726 - type: euclidean_pearson value: 64.7223480293625 - type: euclidean_spearman value: 66.83118568812951 - type: manhattan_pearson value: 64.88440039828305 - type: manhattan_spearman value: 66.80429458952257 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 79.08991383232105 - type: cos_sim_spearman value: 79.39715677296854 - type: euclidean_pearson value: 78.63201279320496 - type: euclidean_spearman value: 79.40262660785731 - type: manhattan_pearson value: 78.98138363146906 - type: manhattan_spearman value: 79.79968413014194 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.43289278789972 - type: mrr value: 77.53012460908535 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 27.733999999999998 - type: map_at_10 value: 78.24799999999999 - type: map_at_100 value: 81.765 - type: map_at_1000 value: 81.824 - type: map_at_3 value: 54.92 - type: map_at_5 value: 67.61399999999999 - type: mrr_at_1 value: 90.527 - type: mrr_at_10 value: 92.843 - type: mrr_at_100 value: 92.927 - type: mrr_at_1000 value: 92.93 - type: mrr_at_3 value: 92.45100000000001 - type: mrr_at_5 value: 92.693 - type: ndcg_at_1 value: 90.527 - type: ndcg_at_10 value: 85.466 - type: ndcg_at_100 value: 88.846 - type: ndcg_at_1000 value: 89.415 - type: ndcg_at_3 value: 86.768 - type: ndcg_at_5 value: 85.46000000000001 - type: precision_at_1 value: 90.527 - type: precision_at_10 value: 42.488 - type: precision_at_100 value: 5.024 - type: precision_at_1000 value: 0.516 - type: precision_at_3 value: 75.907 - type: precision_at_5 value: 63.727000000000004 - type: recall_at_1 value: 27.733999999999998 - type: recall_at_10 value: 84.346 - type: recall_at_100 value: 95.536 - type: recall_at_1000 value: 98.42999999999999 - type: recall_at_3 value: 56.455 - type: recall_at_5 value: 70.755 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 49.952000000000005 - type: f1 value: 48.264617195258054 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 68.23769904483508 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 62.50294403136556 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 54.0 - type: map_at_10 value: 63.668 - type: map_at_100 value: 64.217 - type: map_at_1000 value: 64.23100000000001 - type: map_at_3 value: 61.7 - type: map_at_5 value: 62.870000000000005 - type: mrr_at_1 value: 54.0 - type: mrr_at_10 value: 63.668 - type: mrr_at_100 value: 64.217 - type: mrr_at_1000 value: 64.23100000000001 - type: mrr_at_3 value: 61.7 - type: mrr_at_5 value: 62.870000000000005 - type: ndcg_at_1 value: 54.0 - type: ndcg_at_10 value: 68.11399999999999 - type: ndcg_at_100 value: 70.723 - type: ndcg_at_1000 value: 71.123 - type: ndcg_at_3 value: 64.074 - type: ndcg_at_5 value: 66.178 - type: precision_at_1 value: 54.0 - type: precision_at_10 value: 8.200000000000001 - type: precision_at_100 value: 0.941 - type: precision_at_1000 value: 0.097 - type: precision_at_3 value: 23.633000000000003 - type: precision_at_5 value: 15.2 - type: recall_at_1 value: 54.0 - type: recall_at_10 value: 82.0 - type: recall_at_100 value: 94.1 - type: recall_at_1000 value: 97.3 - type: recall_at_3 value: 70.89999999999999 - type: recall_at_5 value: 76.0 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 86.63000000000001 - type: ap value: 69.99457882599567 - type: f1 value: 85.07735617998541 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 44.594104491193555 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 63.97614314115309 - type: f1 value: 52.15634261679283 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: map_at_1 value: 32.646 - type: map_at_10 value: 47.963 - type: map_at_100 value: 48.789 - type: map_at_1000 value: 48.797000000000004 - type: map_at_3 value: 43.196 - type: map_at_5 value: 46.016 - type: mrr_at_1 value: 33.073 - type: mrr_at_10 value: 48.126000000000005 - type: mrr_at_100 value: 48.946 - type: mrr_at_1000 value: 48.953 - type: mrr_at_3 value: 43.374 - type: mrr_at_5 value: 46.147 - type: ndcg_at_1 value: 32.646 - type: ndcg_at_10 value: 56.481 - type: ndcg_at_100 value: 59.922 - type: ndcg_at_1000 value: 60.07 - type: ndcg_at_3 value: 46.675 - type: ndcg_at_5 value: 51.76500000000001 - type: precision_at_1 value: 32.646 - type: precision_at_10 value: 8.371 - type: precision_at_100 value: 0.9860000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 18.919 - type: precision_at_5 value: 13.825999999999999 - type: recall_at_1 value: 32.646 - type: recall_at_10 value: 83.71300000000001 - type: recall_at_100 value: 98.578 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 56.757000000000005 - type: recall_at_5 value: 69.132 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 68.56 - type: ap value: 23.310493680488513 - type: f1 value: 58.85369533105693 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 88.5 - type: cos_sim_ap value: 72.42140924378361 - type: cos_sim_f1 value: 66.0919540229885 - type: cos_sim_precision value: 72.78481012658227 - type: cos_sim_recall value: 60.526315789473685 - type: dot_accuracy value: 88.5 - type: dot_ap value: 72.42140924378361 - type: dot_f1 value: 66.0919540229885 - type: dot_precision value: 72.78481012658227 - type: dot_recall value: 60.526315789473685 - type: euclidean_accuracy value: 88.5 - type: euclidean_ap value: 72.42140924378361 - type: euclidean_f1 value: 66.0919540229885 - type: euclidean_precision value: 72.78481012658227 - type: euclidean_recall value: 60.526315789473685 - type: manhattan_accuracy value: 88.5 - type: manhattan_ap value: 72.49745515311696 - type: manhattan_f1 value: 66.0968660968661 - type: manhattan_precision value: 72.04968944099379 - type: manhattan_recall value: 61.05263157894737 - type: max_accuracy value: 88.5 - type: max_ap value: 72.49745515311696 - type: max_f1 value: 66.0968660968661 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 90.32269765590145 - type: cos_sim_spearman value: 89.73666311491672 - type: euclidean_pearson value: 88.2933868516544 - type: euclidean_spearman value: 89.73666311491672 - type: manhattan_pearson value: 88.33474590219448 - type: manhattan_spearman value: 89.8548364866583 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: map_at_1 value: 7.632999999999999 - type: map_at_10 value: 16.426 - type: map_at_100 value: 22.651 - type: map_at_1000 value: 24.372 - type: map_at_3 value: 11.706 - type: map_at_5 value: 13.529 - type: mrr_at_1 value: 60.75000000000001 - type: mrr_at_10 value: 68.613 - type: mrr_at_100 value: 69.001 - type: mrr_at_1000 value: 69.021 - type: mrr_at_3 value: 67.0 - type: mrr_at_5 value: 67.925 - type: ndcg_at_1 value: 49.875 - type: ndcg_at_10 value: 36.978 - type: ndcg_at_100 value: 40.031 - type: ndcg_at_1000 value: 47.566 - type: ndcg_at_3 value: 41.148 - type: ndcg_at_5 value: 38.702 - type: precision_at_1 value: 60.75000000000001 - type: precision_at_10 value: 29.7 - type: precision_at_100 value: 9.278 - type: precision_at_1000 value: 2.099 - type: precision_at_3 value: 44.0 - type: precision_at_5 value: 37.6 - type: recall_at_1 value: 7.632999999999999 - type: recall_at_10 value: 22.040000000000003 - type: recall_at_100 value: 44.024 - type: recall_at_1000 value: 67.848 - type: recall_at_3 value: 13.093 - type: recall_at_5 value: 15.973 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: map_at_1 value: 15.473 - type: map_at_10 value: 24.579 - type: map_at_100 value: 26.387 - type: map_at_1000 value: 26.57 - type: map_at_3 value: 21.278 - type: map_at_5 value: 23.179 - type: mrr_at_1 value: 30.709999999999997 - type: mrr_at_10 value: 38.994 - type: mrr_at_100 value: 39.993 - type: mrr_at_1000 value: 40.044999999999995 - type: mrr_at_3 value: 36.342999999999996 - type: mrr_at_5 value: 37.846999999999994 - type: ndcg_at_1 value: 30.709999999999997 - type: ndcg_at_10 value: 31.608999999999998 - type: ndcg_at_100 value: 38.807 - type: ndcg_at_1000 value: 42.208 - type: ndcg_at_3 value: 28.086 - type: ndcg_at_5 value: 29.323 - type: precision_at_1 value: 30.709999999999997 - type: precision_at_10 value: 8.688 - type: precision_at_100 value: 1.608 - type: precision_at_1000 value: 0.22100000000000003 - type: precision_at_3 value: 18.724 - type: precision_at_5 value: 13.950999999999999 - type: recall_at_1 value: 15.473 - type: recall_at_10 value: 38.361000000000004 - type: recall_at_100 value: 65.2 - type: recall_at_1000 value: 85.789 - type: recall_at_3 value: 25.401 - type: recall_at_5 value: 30.875999999999998 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: map_at_1 value: 38.096000000000004 - type: map_at_10 value: 51.44499999999999 - type: map_at_100 value: 52.325 - type: map_at_1000 value: 52.397000000000006 - type: map_at_3 value: 48.626999999999995 - type: map_at_5 value: 50.342 - type: mrr_at_1 value: 76.19200000000001 - type: mrr_at_10 value: 81.191 - type: mrr_at_100 value: 81.431 - type: mrr_at_1000 value: 81.443 - type: mrr_at_3 value: 80.30199999999999 - type: mrr_at_5 value: 80.85900000000001 - type: ndcg_at_1 value: 76.19200000000001 - type: ndcg_at_10 value: 60.9 - type: ndcg_at_100 value: 64.14699999999999 - type: ndcg_at_1000 value: 65.647 - type: ndcg_at_3 value: 56.818000000000005 - type: ndcg_at_5 value: 59.019999999999996 - type: precision_at_1 value: 76.19200000000001 - type: precision_at_10 value: 12.203 - type: precision_at_100 value: 1.478 - type: precision_at_1000 value: 0.168 - type: precision_at_3 value: 34.616 - type: precision_at_5 value: 22.515 - type: recall_at_1 value: 38.096000000000004 - type: recall_at_10 value: 61.013 - type: recall_at_100 value: 73.90299999999999 - type: recall_at_1000 value: 83.91 - type: recall_at_3 value: 51.92400000000001 - type: recall_at_5 value: 56.286 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: map_at_1 value: 1.548 - type: map_at_10 value: 11.049000000000001 - type: map_at_100 value: 28.874 - type: map_at_1000 value: 34.931 - type: map_at_3 value: 4.162 - type: map_at_5 value: 6.396 - type: mrr_at_1 value: 90.69800000000001 - type: mrr_at_10 value: 92.093 - type: mrr_at_100 value: 92.345 - type: mrr_at_1000 value: 92.345 - type: mrr_at_3 value: 91.86 - type: mrr_at_5 value: 91.86 - type: ndcg_at_1 value: 74.031 - type: ndcg_at_10 value: 63.978 - type: ndcg_at_100 value: 53.101 - type: ndcg_at_1000 value: 60.675999999999995 - type: ndcg_at_3 value: 71.421 - type: ndcg_at_5 value: 68.098 - type: precision_at_1 value: 90.69800000000001 - type: precision_at_10 value: 71.86 - type: precision_at_100 value: 31.395 - type: precision_at_1000 value: 5.981 - type: precision_at_3 value: 84.49600000000001 - type: precision_at_5 value: 79.07 - type: recall_at_1 value: 1.548 - type: recall_at_10 value: 12.149000000000001 - type: recall_at_100 value: 40.794999999999995 - type: recall_at_1000 value: 67.974 - type: recall_at_3 value: 4.244 - type: recall_at_5 value: 6.608 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.55413584398119 - type: f1 value: 69.65610882318181 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.37188971082716 - type: f1 value: 75.64847309941361 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: map_at_1 value: 4.919 - type: map_at_10 value: 10.834000000000001 - type: map_at_100 value: 13.38 - type: map_at_1000 value: 14.581 - type: map_at_3 value: 8.198 - type: map_at_5 value: 9.428 - type: mrr_at_1 value: 41.176 - type: mrr_at_10 value: 50.083 - type: mrr_at_100 value: 50.559 - type: mrr_at_1000 value: 50.604000000000006 - type: mrr_at_3 value: 47.936 - type: mrr_at_5 value: 49.407000000000004 - type: ndcg_at_1 value: 39.628 - type: ndcg_at_10 value: 30.098000000000003 - type: ndcg_at_100 value: 27.061 - type: ndcg_at_1000 value: 35.94 - type: ndcg_at_3 value: 35.135 - type: ndcg_at_5 value: 33.335 - type: precision_at_1 value: 41.176 - type: precision_at_10 value: 22.259999999999998 - type: precision_at_100 value: 6.712 - type: precision_at_1000 value: 1.9060000000000001 - type: precision_at_3 value: 33.23 - type: precision_at_5 value: 29.04 - type: recall_at_1 value: 4.919 - type: recall_at_10 value: 14.196 - type: recall_at_100 value: 26.948 - type: recall_at_1000 value: 59.211000000000006 - type: recall_at_3 value: 9.44 - type: recall_at_5 value: 11.569 - task: type: Retrieval dataset: name: MTEB NQ-PL type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: map_at_1 value: 25.35 - type: map_at_10 value: 37.884 - type: map_at_100 value: 38.955 - type: map_at_1000 value: 39.007999999999996 - type: map_at_3 value: 34.239999999999995 - type: map_at_5 value: 36.398 - type: mrr_at_1 value: 28.737000000000002 - type: mrr_at_10 value: 39.973 - type: mrr_at_100 value: 40.844 - type: mrr_at_1000 value: 40.885 - type: mrr_at_3 value: 36.901 - type: mrr_at_5 value: 38.721 - type: ndcg_at_1 value: 28.708 - type: ndcg_at_10 value: 44.204 - type: ndcg_at_100 value: 48.978 - type: ndcg_at_1000 value: 50.33 - type: ndcg_at_3 value: 37.36 - type: ndcg_at_5 value: 40.912 - type: precision_at_1 value: 28.708 - type: precision_at_10 value: 7.367 - type: precision_at_100 value: 1.0030000000000001 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 17.034 - type: precision_at_5 value: 12.293999999999999 - type: recall_at_1 value: 25.35 - type: recall_at_10 value: 61.411 - type: recall_at_100 value: 82.599 - type: recall_at_1000 value: 92.903 - type: recall_at_3 value: 43.728 - type: recall_at_5 value: 51.854 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 69.04141326382856 - type: ap value: 77.49422763833996 - type: f1 value: 66.73472657783407 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 81.0 - type: cos_sim_ap value: 91.47194213011349 - type: cos_sim_f1 value: 84.73767885532592 - type: cos_sim_precision value: 81.49847094801224 - type: cos_sim_recall value: 88.24503311258279 - type: dot_accuracy value: 81.0 - type: dot_ap value: 91.47194213011349 - type: dot_f1 value: 84.73767885532592 - type: dot_precision value: 81.49847094801224 - type: dot_recall value: 88.24503311258279 - type: euclidean_accuracy value: 81.0 - type: euclidean_ap value: 91.47194213011349 - type: euclidean_f1 value: 84.73767885532592 - type: euclidean_precision value: 81.49847094801224 - type: euclidean_recall value: 88.24503311258279 - type: manhattan_accuracy value: 81.0 - type: manhattan_ap value: 91.46464475050571 - type: manhattan_f1 value: 84.48687350835321 - type: manhattan_precision value: 81.31699846860643 - type: manhattan_recall value: 87.91390728476821 - type: max_accuracy value: 81.0 - type: max_ap value: 91.47194213011349 - type: max_f1 value: 84.73767885532592 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 97.6808905380334 - type: cos_sim_ap value: 99.27948611836348 - type: cos_sim_f1 value: 96.15975422427034 - type: cos_sim_precision value: 96.90402476780186 - type: cos_sim_recall value: 95.42682926829268 - type: dot_accuracy value: 97.6808905380334 - type: dot_ap value: 99.2794861183635 - type: dot_f1 value: 96.15975422427034 - type: dot_precision value: 96.90402476780186 - type: dot_recall value: 95.42682926829268 - type: euclidean_accuracy value: 97.6808905380334 - type: euclidean_ap value: 99.2794861183635 - type: euclidean_f1 value: 96.15975422427034 - type: euclidean_precision value: 96.90402476780186 - type: euclidean_recall value: 95.42682926829268 - type: manhattan_accuracy value: 97.6808905380334 - type: manhattan_ap value: 99.28715055268721 - type: manhattan_f1 value: 96.14791987673343 - type: manhattan_precision value: 97.19626168224299 - type: manhattan_recall value: 95.1219512195122 - type: max_accuracy value: 97.6808905380334 - type: max_ap value: 99.28715055268721 - type: max_f1 value: 96.15975422427034 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 86.16343490304708 - type: f1 value: 83.3442579486744 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 68.40080971659918 - type: f1 value: 53.13720751142237 - task: type: Retrieval dataset: name: MTEB Quora-PL type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: map_at_1 value: 63.322 - type: map_at_10 value: 76.847 - type: map_at_100 value: 77.616 - type: map_at_1000 value: 77.644 - type: map_at_3 value: 73.624 - type: map_at_5 value: 75.603 - type: mrr_at_1 value: 72.88 - type: mrr_at_10 value: 80.376 - type: mrr_at_100 value: 80.604 - type: mrr_at_1000 value: 80.61 - type: mrr_at_3 value: 78.92 - type: mrr_at_5 value: 79.869 - type: ndcg_at_1 value: 72.89999999999999 - type: ndcg_at_10 value: 81.43 - type: ndcg_at_100 value: 83.394 - type: ndcg_at_1000 value: 83.685 - type: ndcg_at_3 value: 77.62599999999999 - type: ndcg_at_5 value: 79.656 - type: precision_at_1 value: 72.89999999999999 - type: precision_at_10 value: 12.548 - type: precision_at_100 value: 1.4869999999999999 - type: precision_at_1000 value: 0.155 - type: precision_at_3 value: 34.027 - type: precision_at_5 value: 22.654 - type: recall_at_1 value: 63.322 - type: recall_at_10 value: 90.664 - type: recall_at_100 value: 97.974 - type: recall_at_1000 value: 99.636 - type: recall_at_3 value: 80.067 - type: recall_at_5 value: 85.526 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: map_at_1 value: 3.95 - type: map_at_10 value: 9.658999999999999 - type: map_at_100 value: 11.384 - type: map_at_1000 value: 11.677 - type: map_at_3 value: 7.055 - type: map_at_5 value: 8.244 - type: mrr_at_1 value: 19.5 - type: mrr_at_10 value: 28.777 - type: mrr_at_100 value: 29.936 - type: mrr_at_1000 value: 30.009999999999998 - type: mrr_at_3 value: 25.55 - type: mrr_at_5 value: 27.284999999999997 - type: ndcg_at_1 value: 19.5 - type: ndcg_at_10 value: 16.589000000000002 - type: ndcg_at_100 value: 23.879 - type: ndcg_at_1000 value: 29.279 - type: ndcg_at_3 value: 15.719 - type: ndcg_at_5 value: 13.572000000000001 - type: precision_at_1 value: 19.5 - type: precision_at_10 value: 8.62 - type: precision_at_100 value: 1.924 - type: precision_at_1000 value: 0.322 - type: precision_at_3 value: 14.6 - type: precision_at_5 value: 11.78 - type: recall_at_1 value: 3.95 - type: recall_at_10 value: 17.477999999999998 - type: recall_at_100 value: 38.99 - type: recall_at_1000 value: 65.417 - type: recall_at_3 value: 8.883000000000001 - type: recall_at_5 value: 11.933 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 83.48960456583775 - type: cos_sim_ap value: 76.31522115825375 - type: cos_sim_f1 value: 70.35573122529645 - type: cos_sim_precision value: 70.9934735315446 - type: cos_sim_recall value: 69.72934472934473 - type: dot_accuracy value: 83.48960456583775 - type: dot_ap value: 76.31522115825373 - type: dot_f1 value: 70.35573122529645 - type: dot_precision value: 70.9934735315446 - type: dot_recall value: 69.72934472934473 - type: euclidean_accuracy value: 83.48960456583775 - type: euclidean_ap value: 76.31522115825373 - type: euclidean_f1 value: 70.35573122529645 - type: euclidean_precision value: 70.9934735315446 - type: euclidean_recall value: 69.72934472934473 - type: manhattan_accuracy value: 83.46922136159804 - type: manhattan_ap value: 76.18474601388084 - type: manhattan_f1 value: 70.34779490856937 - type: manhattan_precision value: 70.83032490974729 - type: manhattan_recall value: 69.87179487179486 - type: max_accuracy value: 83.48960456583775 - type: max_ap value: 76.31522115825375 - type: max_f1 value: 70.35573122529645 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 77.95374883876302 - type: cos_sim_spearman value: 73.77630219171942 - type: euclidean_pearson value: 75.81927069594934 - type: euclidean_spearman value: 73.7763211303831 - type: manhattan_pearson value: 76.03126859057528 - type: manhattan_spearman value: 73.96528138013369 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 37.388282764841826 - type: cos_sim_spearman value: 40.83477184710897 - type: euclidean_pearson value: 26.754737044177805 - type: euclidean_spearman value: 40.83477184710897 - type: manhattan_pearson value: 26.760453110872458 - type: manhattan_spearman value: 41.034477441383856 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: map_at_1 value: 49.15 - type: map_at_10 value: 61.690999999999995 - type: map_at_100 value: 62.348000000000006 - type: map_at_1000 value: 62.38 - type: map_at_3 value: 58.824 - type: map_at_5 value: 60.662000000000006 - type: mrr_at_1 value: 51.333 - type: mrr_at_10 value: 62.731 - type: mrr_at_100 value: 63.245 - type: mrr_at_1000 value: 63.275000000000006 - type: mrr_at_3 value: 60.667 - type: mrr_at_5 value: 61.93300000000001 - type: ndcg_at_1 value: 51.333 - type: ndcg_at_10 value: 67.168 - type: ndcg_at_100 value: 69.833 - type: ndcg_at_1000 value: 70.56700000000001 - type: ndcg_at_3 value: 62.40599999999999 - type: ndcg_at_5 value: 65.029 - type: precision_at_1 value: 51.333 - type: precision_at_10 value: 9.333 - type: precision_at_100 value: 1.0699999999999998 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 25.333 - type: precision_at_5 value: 17.067 - type: recall_at_1 value: 49.15 - type: recall_at_10 value: 82.533 - type: recall_at_100 value: 94.167 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 69.917 - type: recall_at_5 value: 76.356 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: map_at_1 value: 0.261 - type: map_at_10 value: 2.1260000000000003 - type: map_at_100 value: 12.171999999999999 - type: map_at_1000 value: 26.884999999999998 - type: map_at_3 value: 0.695 - type: map_at_5 value: 1.134 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 96.952 - type: mrr_at_100 value: 96.952 - type: mrr_at_1000 value: 96.952 - type: mrr_at_3 value: 96.667 - type: mrr_at_5 value: 96.667 - type: ndcg_at_1 value: 92.0 - type: ndcg_at_10 value: 81.193 - type: ndcg_at_100 value: 61.129 - type: ndcg_at_1000 value: 51.157 - type: ndcg_at_3 value: 85.693 - type: ndcg_at_5 value: 84.129 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 85.39999999999999 - type: precision_at_100 value: 62.03999999999999 - type: precision_at_1000 value: 22.224 - type: precision_at_3 value: 88.0 - type: precision_at_5 value: 88.0 - type: recall_at_1 value: 0.261 - type: recall_at_10 value: 2.262 - type: recall_at_100 value: 14.981 - type: recall_at_1000 value: 46.837 - type: recall_at_3 value: 0.703 - type: recall_at_5 value: 1.172 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: v_measure value: 70.55290063940157 - type: v_measure value: 55.41500719337263 - task: type: Reranking dataset: name: MTEB AlloprofReranking type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: 666fdacebe0291776e86f29345663dfaf80a0db9 metrics: - type: map value: 73.48697375332002 - type: mrr value: 75.01836585523822 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: map_at_1 value: 38.454 - type: map_at_10 value: 51.605000000000004 - type: map_at_100 value: 52.653000000000006 - type: map_at_1000 value: 52.697 - type: map_at_3 value: 48.304 - type: map_at_5 value: 50.073 - type: mrr_at_1 value: 43.307 - type: mrr_at_10 value: 54.400000000000006 - type: mrr_at_100 value: 55.147999999999996 - type: mrr_at_1000 value: 55.174 - type: mrr_at_3 value: 51.77 - type: mrr_at_5 value: 53.166999999999994 - type: ndcg_at_1 value: 43.307 - type: ndcg_at_10 value: 57.891000000000005 - type: ndcg_at_100 value: 62.161 - type: ndcg_at_1000 value: 63.083 - type: ndcg_at_3 value: 51.851 - type: ndcg_at_5 value: 54.605000000000004 - type: precision_at_1 value: 43.307 - type: precision_at_10 value: 9.033 - type: precision_at_100 value: 1.172 - type: precision_at_1000 value: 0.127 - type: precision_at_3 value: 22.798 - type: precision_at_5 value: 15.492 - type: recall_at_1 value: 38.454 - type: recall_at_10 value: 74.166 - type: recall_at_100 value: 92.43599999999999 - type: recall_at_1000 value: 99.071 - type: recall_at_3 value: 58.087 - type: recall_at_5 value: 64.568 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.474 - type: f1 value: 50.38275392350236 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: map_at_1 value: 2.252 - type: map_at_10 value: 4.661 - type: map_at_100 value: 5.271 - type: map_at_1000 value: 5.3629999999999995 - type: map_at_3 value: 3.604 - type: map_at_5 value: 4.3020000000000005 - type: mrr_at_1 value: 2.252 - type: mrr_at_10 value: 4.661 - type: mrr_at_100 value: 5.271 - type: mrr_at_1000 value: 5.3629999999999995 - type: mrr_at_3 value: 3.604 - type: mrr_at_5 value: 4.3020000000000005 - type: ndcg_at_1 value: 2.252 - type: ndcg_at_10 value: 6.3020000000000005 - type: ndcg_at_100 value: 10.342 - type: ndcg_at_1000 value: 13.475999999999999 - type: ndcg_at_3 value: 4.0649999999999995 - type: ndcg_at_5 value: 5.344 - type: precision_at_1 value: 2.252 - type: precision_at_10 value: 1.171 - type: precision_at_100 value: 0.333 - type: precision_at_1000 value: 0.059000000000000004 - type: precision_at_3 value: 1.802 - type: precision_at_5 value: 1.712 - type: recall_at_1 value: 2.252 - type: recall_at_10 value: 11.712 - type: recall_at_100 value: 33.333 - type: recall_at_1000 value: 59.458999999999996 - type: recall_at_3 value: 5.405 - type: recall_at_5 value: 8.559 - task: type: Clustering dataset: name: MTEB HALClusteringS2S type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: v_measure value: 28.301882091023288 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P type: mlsum config: default split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: v_measure value: 45.26992995191701 - type: v_measure value: 42.773174876871145 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.47635452552458 - type: f1 value: 93.19922617577213 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 80.2317569683683 - type: f1 value: 56.18060418621901 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 85.18957345971565 - type: f1 value: 80.829981537394 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 71.04138999801822 - type: v_measure value: 71.7056263158008 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.65097511768661 - type: f1 value: 73.82441070598712 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.09885675857431 - type: f1 value: 78.28407777434224 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: map_at_1 value: 25.307000000000002 - type: map_at_10 value: 36.723 - type: map_at_100 value: 37.713 - type: map_at_1000 value: 37.769000000000005 - type: map_at_3 value: 33.77 - type: map_at_5 value: 35.463 - type: mrr_at_1 value: 25.307000000000002 - type: mrr_at_10 value: 36.723 - type: mrr_at_100 value: 37.713 - type: mrr_at_1000 value: 37.769000000000005 - type: mrr_at_3 value: 33.77 - type: mrr_at_5 value: 35.463 - type: ndcg_at_1 value: 25.307000000000002 - type: ndcg_at_10 value: 42.559999999999995 - type: ndcg_at_100 value: 47.457 - type: ndcg_at_1000 value: 49.162 - type: ndcg_at_3 value: 36.461 - type: ndcg_at_5 value: 39.504 - type: precision_at_1 value: 25.307000000000002 - type: precision_at_10 value: 6.106 - type: precision_at_100 value: 0.8420000000000001 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 14.741999999999999 - type: precision_at_5 value: 10.319 - type: recall_at_1 value: 25.307000000000002 - type: recall_at_10 value: 61.056999999999995 - type: recall_at_100 value: 84.152 - type: recall_at_1000 value: 98.03399999999999 - type: recall_at_3 value: 44.226 - type: recall_at_5 value: 51.597 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_accuracy value: 99.90069513406156 - type: cos_sim_ap value: 100.0 - type: cos_sim_f1 value: 99.95032290114257 - type: cos_sim_precision value: 100.0 - type: cos_sim_recall value: 99.90069513406156 - type: dot_accuracy value: 99.90069513406156 - type: dot_ap value: 100.0 - type: dot_f1 value: 99.95032290114257 - type: dot_precision value: 100.0 - type: dot_recall value: 99.90069513406156 - type: euclidean_accuracy value: 99.90069513406156 - type: euclidean_ap value: 100.0 - type: euclidean_f1 value: 99.95032290114257 - type: euclidean_precision value: 100.0 - type: euclidean_recall value: 99.90069513406156 - type: manhattan_accuracy value: 99.90069513406156 - type: manhattan_ap value: 100.0 - type: manhattan_f1 value: 99.95032290114257 - type: manhattan_precision value: 100.0 - type: manhattan_recall value: 99.90069513406156 - type: max_accuracy value: 99.90069513406156 - type: max_ap value: 100.0 - type: max_f1 value: 99.95032290114257 - task: type: PairClassification dataset: name: MTEB PawsX (fr) type: paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_accuracy value: 70.8 - type: cos_sim_ap value: 73.7671529695957 - type: cos_sim_f1 value: 68.80964339527875 - type: cos_sim_precision value: 62.95955882352941 - type: cos_sim_recall value: 75.85825027685493 - type: dot_accuracy value: 70.8 - type: dot_ap value: 73.78345265366947 - type: dot_f1 value: 68.80964339527875 - type: dot_precision value: 62.95955882352941 - type: dot_recall value: 75.85825027685493 - type: euclidean_accuracy value: 70.8 - type: euclidean_ap value: 73.7671529695957 - type: euclidean_f1 value: 68.80964339527875 - type: euclidean_precision value: 62.95955882352941 - type: euclidean_recall value: 75.85825027685493 - type: manhattan_accuracy value: 70.75 - type: manhattan_ap value: 73.78996383615953 - type: manhattan_f1 value: 68.79432624113475 - type: manhattan_precision value: 63.39869281045751 - type: manhattan_recall value: 75.1937984496124 - type: max_accuracy value: 70.8 - type: max_ap value: 73.78996383615953 - type: max_f1 value: 68.80964339527875 - task: type: STS dataset: name: MTEB SICKFr type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cos_sim_pearson value: 84.03253762760392 - type: cos_sim_spearman value: 79.68280105762004 - type: euclidean_pearson value: 80.98265050044444 - type: euclidean_spearman value: 79.68233242682867 - type: manhattan_pearson value: 80.9678911810704 - type: manhattan_spearman value: 79.70264097683109 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 80.56896987572884 - type: cos_sim_spearman value: 81.84352499523287 - type: euclidean_pearson value: 80.40831759421305 - type: euclidean_spearman value: 81.84352499523287 - type: manhattan_pearson value: 80.74333857561238 - type: manhattan_spearman value: 82.41503246733892 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: stsb_multi_mt config: fr split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_pearson value: 82.71826762276979 - type: cos_sim_spearman value: 82.25433354916042 - type: euclidean_pearson value: 81.87115571724316 - type: euclidean_spearman value: 82.25322342890107 - type: manhattan_pearson value: 82.11174867527224 - type: manhattan_spearman value: 82.55905365203084 - task: type: Summarization dataset: name: MTEB SummEvalFr type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cos_sim_pearson value: 30.659441623392887 - type: cos_sim_spearman value: 30.501134097353315 - type: dot_pearson value: 30.659444768851056 - type: dot_spearman value: 30.501134097353315 - task: type: Reranking dataset: name: MTEB SyntecReranking type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 94.03333333333333 - type: mrr value: 94.03333333333333 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 77f7e271bf4a92b24fce5119f3486b583ca016ff metrics: - type: map_at_1 value: 79.0 - type: map_at_10 value: 87.61 - type: map_at_100 value: 87.655 - type: map_at_1000 value: 87.655 - type: map_at_3 value: 87.167 - type: map_at_5 value: 87.36699999999999 - type: mrr_at_1 value: 79.0 - type: mrr_at_10 value: 87.61 - type: mrr_at_100 value: 87.655 - type: mrr_at_1000 value: 87.655 - type: mrr_at_3 value: 87.167 - type: mrr_at_5 value: 87.36699999999999 - type: ndcg_at_1 value: 79.0 - type: ndcg_at_10 value: 90.473 - type: ndcg_at_100 value: 90.694 - type: ndcg_at_1000 value: 90.694 - type: ndcg_at_3 value: 89.464 - type: ndcg_at_5 value: 89.851 - type: precision_at_1 value: 79.0 - type: precision_at_10 value: 9.9 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 32.0 - type: precision_at_5 value: 19.400000000000002 - type: recall_at_1 value: 79.0 - type: recall_at_10 value: 99.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 96.0 - type: recall_at_5 value: 97.0 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fr split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: map_at_1 value: 39.395 - type: map_at_10 value: 59.123999999999995 - type: map_at_100 value: 60.704 - type: map_at_1000 value: 60.760000000000005 - type: map_at_3 value: 53.187 - type: map_at_5 value: 56.863 - type: mrr_at_1 value: 62.083 - type: mrr_at_10 value: 68.87299999999999 - type: mrr_at_100 value: 69.46900000000001 - type: mrr_at_1000 value: 69.48299999999999 - type: mrr_at_3 value: 66.8 - type: mrr_at_5 value: 67.928 - type: ndcg_at_1 value: 62.083 - type: ndcg_at_10 value: 65.583 - type: ndcg_at_100 value: 70.918 - type: ndcg_at_1000 value: 71.72800000000001 - type: ndcg_at_3 value: 60.428000000000004 - type: ndcg_at_5 value: 61.853 - type: precision_at_1 value: 62.083 - type: precision_at_10 value: 15.033 - type: precision_at_100 value: 1.9529999999999998 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 36.315 - type: precision_at_5 value: 25.955000000000002 - type: recall_at_1 value: 39.395 - type: recall_at_10 value: 74.332 - type: recall_at_100 value: 94.729 - type: recall_at_1000 value: 99.75500000000001 - type: recall_at_3 value: 57.679 - type: recall_at_5 value: 65.036 --- ## gte-Qwen2-1.5B-instruct **gte-Qwen2-1.5B-instruct** is the latest model in the gte (General Text Embedding) model family. The model is built on [Qwen2-1.5B](https://huggingface.co/Qwen/Qwen2-1.5B) LLM model and use the same training data and strategies as the [gte-Qwen2-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) model. The model incorporates several key advancements: - Integration of bidirectional attention mechanisms, enriching its contextual understanding. - Instruction tuning, applied solely on the query side for streamlined efficiency - Comprehensive training across a vast, multilingual text corpus spanning diverse domains and scenarios. This training leverages both weakly supervised and supervised data, ensuring the model's applicability across numerous languages and a wide array of downstream tasks. ## Model Information - Model Size: 1.5B - Embedding Dimension: 1536 - Max Input Tokens: 32k ## Requirements ``` transformers>=4.39.2 flash_attn>=2.5.6 ``` ## Usage ### Sentence Transformers ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("Alibaba-NLP/gte-Qwen2-1.5B-instruct", trust_remote_code=True) # In case you want to reduce the maximum length: model.max_seq_length = 8192 queries = [ "how much protein should a female eat", "summit define", ] documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments.", ] query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) scores = (query_embeddings @ document_embeddings.T) * 100 print(scores.tolist()) ``` Observe the [config_sentence_transformers.json](config_sentence_transformers.json) to see all pre-built prompt names. Otherwise, you can use `model.encode(queries, prompt="Instruct: ...\nQuery: "` to use a custom prompt of your choice. ### Transformers ```python import torch import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: left_padding = (attention_mask[:, -1].sum() == attention_mask.shape[0]) if left_padding: return last_hidden_states[:, -1] else: sequence_lengths = attention_mask.sum(dim=1) - 1 batch_size = last_hidden_states.shape[0] return last_hidden_states[torch.arange(batch_size, device=last_hidden_states.device), sequence_lengths] def get_detailed_instruct(task_description: str, query: str) -> str: return f'Instruct: {task_description}\nQuery: {query}' # Each query must come with a one-sentence instruction that describes the task task = 'Given a web search query, retrieve relevant passages that answer the query' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, 'summit define') ] # No need to add instruction for retrieval documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] input_texts = queries + documents tokenizer = AutoTokenizer.from_pretrained('Alibaba-NLP/gte-Qwen2-1.5B-instruct', trust_remote_code=True) model = AutoModel.from_pretrained('Alibaba-NLP/gte-Qwen2-1.5B-instruct', trust_remote_code=True) max_length = 8192 # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ### infinity_emb Usage via [infinity, MIT Licensed](https://github.com/michaelfeil/infinity). ```bash docker run \ --gpus "0" -p "7997":"7997" \ michaelf34/infinity:0.0.68-trt-onnx \ v2 --model-id Alibaba-NLP/gte-Qwen2-1.5B-instruct --revision "refs/pr/20" --dtype bfloat16 --batch-size 16 --device cuda --engine torch --port 7997 --no-bettertransformer ``` ## Evaluation ### MTEB & C-MTEB You can use the [scripts/eval_mteb.py](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct/blob/main/scripts/eval_mteb.py) to reproduce the following result of **gte-Qwen2-1.5B-instruct** on MTEB(English)/C-MTEB(Chinese): | Model Name | MTEB(56) | C-MTEB(35) | MTEB-fr(26) | MTEB-pl(26) | |:----:|:---------:|:----------:|:----------:|:----------:| | [bge-base-en-1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 64.23 | - | - | - | | [bge-large-en-1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 63.55 | - | - | - | | [gte-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 65.39 | - | - | - | | [gte-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 64.11 | - | - | - | | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 64.68 | - | - | - | | [acge_text_embedding](https://huggingface.co/aspire/acge_text_embedding) | - | 69.07 | - | - | | [stella-mrl-large-zh-v3.5-1792d](https://huggingface.co/infgrad/stella-mrl-large-zh-v3.5-1792d) | - | 68.55 | - | - | | [gte-large-zh](https://huggingface.co/thenlper/gte-large-zh) | - | 66.72 | - | - | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 59.45 | 56.21 | - | - | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 61.50 | 58.81 | - | - | | [e5-mistral-7b-instruct](https://huggingface.co/intfloat/e5-mistral-7b-instruct) | 66.63 | 60.81 | - | - | | [gte-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) | 67.34 | 69.52 | - | - | | [NV-Embed-v1](https://huggingface.co/nvidia/NV-Embed-v1) | 69.32 | - | - | - | | [**gte-Qwen2-7B-instruct**](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) | **70.24** | **72.05** | **68.25** | **67.86** | | [**gte-Qwen2-1.5B-instruct**](https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct) | **67.16** | **67.65** | **66.60** | **64.04** | ### GTE Models The gte series models have consistently released two types of models: encoder-only models (based on the BERT architecture) and decode-only models (based on the LLM architecture). | Models | Language | Max Sequence Length | Dimension | Model Size (Memory Usage, fp32) | |:-------------------------------------------------------------------------------------:|:--------:|:-----: |:---------:|:-------------------------------:| | [GTE-large-zh](https://huggingface.co/thenlper/gte-large-zh) | Chinese | 512 | 1024 | 1.25GB | | [GTE-base-zh](https://huggingface.co/thenlper/gte-base-zh) | Chinese | 512 | 512 | 0.41GB | | [GTE-small-zh](https://huggingface.co/thenlper/gte-small-zh) | Chinese | 512 | 512 | 0.12GB | | [GTE-large](https://huggingface.co/thenlper/gte-large) | English | 512 | 1024 | 1.25GB | | [GTE-base](https://huggingface.co/thenlper/gte-base) | English | 512 | 512 | 0.21GB | | [GTE-small](https://huggingface.co/thenlper/gte-small) | English | 512 | 384 | 0.10GB | | [GTE-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | English | 8192 | 1024 | 1.74GB | | [GTE-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | English | 8192 | 768 | 0.51GB | | [GTE-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) | Multilingual | 32000 | 4096 | 26.45GB | | [GTE-Qwen2-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) | Multilingual | 32000 | 3584 | 26.45GB | | [GTE-Qwen2-1.5B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct) | Multilingual | 32000 | 1536 | 6.62GB | ## Cloud API Services In addition to the open-source [GTE](https://huggingface.co/collections/Alibaba-NLP/gte-models-6680f0b13f885cb431e6d469) series models, GTE series models are also available as commercial API services on Alibaba Cloud. - [Embedding Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-embedding/): Three versions of the text embedding models are available: text-embedding-v1/v2/v3, with v3 being the latest API service. - [ReRank Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-sorting-model/): The gte-rerank model service is available. Note that the models behind the commercial APIs are not entirely identical to the open-source models. ## Community support ### Fine-tuning GTE models can be fine-tuned with a third party framework SWIFT. ```shell pip install ms-swift -U ``` ```shell # check: https://swift.readthedocs.io/en/latest/BestPractices/Embedding.html nproc_per_node=8 NPROC_PER_NODE=$nproc_per_node \ USE_HF=1 \ swift sft \ --model Alibaba-NLP/gte-Qwen2-1.5B-instruct \ --train_type lora \ --dataset 'sentence-transformers/stsb' \ --torch_dtype bfloat16 \ --num_train_epochs 10 \ --per_device_train_batch_size 2 \ --per_device_eval_batch_size 1 \ --gradient_accumulation_steps $(expr 64 / $nproc_per_node) \ --eval_steps 100 \ --save_steps 100 \ --eval_strategy steps \ --use_chat_template false \ --save_total_limit 5 \ --logging_steps 5 \ --output_dir output \ --warmup_ratio 0.05 \ --learning_rate 5e-6 \ --deepspeed zero3 \ --dataloader_num_workers 4 \ --task_type embedding \ --loss_type cosine_similarity \ --dataloader_drop_last true ``` ## Citation If you find our paper or models helpful, please consider cite: ``` @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
BAAI/bge-large-en
BAAI
feature-extraction
[ "transformers", "pytorch", "safetensors", "bert", "feature-extraction", "mteb", "sentence-transfomres", "en", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-08-02T07:11:51Z"
2023-10-12T03:35:38+00:00
516,687
206
--- language: - en license: mit tags: - mteb - sentence-transfomres - transformers model-index: - name: bge-large-en results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.94029850746269 - type: ap value: 40.00228964744091 - type: f1 value: 70.86088267934595 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.93745 - type: ap value: 88.24758534667426 - type: f1 value: 91.91033034217591 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 46.158 - type: f1 value: 45.78935185074774 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 39.972 - type: map_at_10 value: 54.874 - type: map_at_100 value: 55.53399999999999 - type: map_at_1000 value: 55.539 - type: map_at_3 value: 51.031000000000006 - type: map_at_5 value: 53.342999999999996 - type: mrr_at_1 value: 40.541 - type: mrr_at_10 value: 55.096000000000004 - type: mrr_at_100 value: 55.75599999999999 - type: mrr_at_1000 value: 55.761 - type: mrr_at_3 value: 51.221000000000004 - type: mrr_at_5 value: 53.568000000000005 - type: ndcg_at_1 value: 39.972 - type: ndcg_at_10 value: 62.456999999999994 - type: ndcg_at_100 value: 65.262 - type: ndcg_at_1000 value: 65.389 - type: ndcg_at_3 value: 54.673 - type: ndcg_at_5 value: 58.80499999999999 - type: precision_at_1 value: 39.972 - type: precision_at_10 value: 8.634 - type: precision_at_100 value: 0.9860000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 21.740000000000002 - type: precision_at_5 value: 15.036 - type: recall_at_1 value: 39.972 - type: recall_at_10 value: 86.344 - type: recall_at_100 value: 98.578 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 65.22 - type: recall_at_5 value: 75.178 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.94652870403906 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 43.17257160340209 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.97867370559182 - type: mrr value: 77.00820032537484 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 80.00986015960616 - type: cos_sim_spearman value: 80.36387933827882 - type: euclidean_pearson value: 80.32305287257296 - type: euclidean_spearman value: 82.0524720308763 - type: manhattan_pearson value: 80.19847473906454 - type: manhattan_spearman value: 81.87957652506985 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 88.00000000000001 - type: f1 value: 87.99039027511853 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 41.36932844640705 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 38.34983239611985 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.257999999999996 - type: map_at_10 value: 42.937 - type: map_at_100 value: 44.406 - type: map_at_1000 value: 44.536 - type: map_at_3 value: 39.22 - type: map_at_5 value: 41.458 - type: mrr_at_1 value: 38.769999999999996 - type: mrr_at_10 value: 48.701 - type: mrr_at_100 value: 49.431000000000004 - type: mrr_at_1000 value: 49.476 - type: mrr_at_3 value: 45.875 - type: mrr_at_5 value: 47.67 - type: ndcg_at_1 value: 38.769999999999996 - type: ndcg_at_10 value: 49.35 - type: ndcg_at_100 value: 54.618 - type: ndcg_at_1000 value: 56.655 - type: ndcg_at_3 value: 43.826 - type: ndcg_at_5 value: 46.72 - type: precision_at_1 value: 38.769999999999996 - type: precision_at_10 value: 9.328 - type: precision_at_100 value: 1.484 - type: precision_at_1000 value: 0.196 - type: precision_at_3 value: 20.649 - type: precision_at_5 value: 15.25 - type: recall_at_1 value: 32.257999999999996 - type: recall_at_10 value: 61.849 - type: recall_at_100 value: 83.70400000000001 - type: recall_at_1000 value: 96.344 - type: recall_at_3 value: 46.037 - type: recall_at_5 value: 53.724000000000004 - type: map_at_1 value: 32.979 - type: map_at_10 value: 43.376999999999995 - type: map_at_100 value: 44.667 - type: map_at_1000 value: 44.794 - type: map_at_3 value: 40.461999999999996 - type: map_at_5 value: 42.138 - type: mrr_at_1 value: 41.146 - type: mrr_at_10 value: 49.575 - type: mrr_at_100 value: 50.187000000000005 - type: mrr_at_1000 value: 50.231 - type: mrr_at_3 value: 47.601 - type: mrr_at_5 value: 48.786 - type: ndcg_at_1 value: 41.146 - type: ndcg_at_10 value: 48.957 - type: ndcg_at_100 value: 53.296 - type: ndcg_at_1000 value: 55.254000000000005 - type: ndcg_at_3 value: 45.235 - type: ndcg_at_5 value: 47.014 - type: precision_at_1 value: 41.146 - type: precision_at_10 value: 9.107999999999999 - type: precision_at_100 value: 1.481 - type: precision_at_1000 value: 0.193 - type: precision_at_3 value: 21.783 - type: precision_at_5 value: 15.274 - type: recall_at_1 value: 32.979 - type: recall_at_10 value: 58.167 - type: recall_at_100 value: 76.374 - type: recall_at_1000 value: 88.836 - type: recall_at_3 value: 46.838 - type: recall_at_5 value: 52.006 - type: map_at_1 value: 40.326 - type: map_at_10 value: 53.468 - type: map_at_100 value: 54.454 - type: map_at_1000 value: 54.508 - type: map_at_3 value: 50.12799999999999 - type: map_at_5 value: 51.991 - type: mrr_at_1 value: 46.394999999999996 - type: mrr_at_10 value: 57.016999999999996 - type: mrr_at_100 value: 57.67099999999999 - type: mrr_at_1000 value: 57.699999999999996 - type: mrr_at_3 value: 54.65 - type: mrr_at_5 value: 56.101 - type: ndcg_at_1 value: 46.394999999999996 - type: ndcg_at_10 value: 59.507 - type: ndcg_at_100 value: 63.31099999999999 - type: ndcg_at_1000 value: 64.388 - type: ndcg_at_3 value: 54.04600000000001 - type: ndcg_at_5 value: 56.723 - type: precision_at_1 value: 46.394999999999996 - type: precision_at_10 value: 9.567 - type: precision_at_100 value: 1.234 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 24.117 - type: precision_at_5 value: 16.426 - type: recall_at_1 value: 40.326 - type: recall_at_10 value: 73.763 - type: recall_at_100 value: 89.927 - type: recall_at_1000 value: 97.509 - type: recall_at_3 value: 59.34 - type: recall_at_5 value: 65.915 - type: map_at_1 value: 26.661 - type: map_at_10 value: 35.522 - type: map_at_100 value: 36.619 - type: map_at_1000 value: 36.693999999999996 - type: map_at_3 value: 33.154 - type: map_at_5 value: 34.353 - type: mrr_at_1 value: 28.362 - type: mrr_at_10 value: 37.403999999999996 - type: mrr_at_100 value: 38.374 - type: mrr_at_1000 value: 38.428000000000004 - type: mrr_at_3 value: 35.235 - type: mrr_at_5 value: 36.269 - type: ndcg_at_1 value: 28.362 - type: ndcg_at_10 value: 40.431 - type: ndcg_at_100 value: 45.745999999999995 - type: ndcg_at_1000 value: 47.493 - type: ndcg_at_3 value: 35.733 - type: ndcg_at_5 value: 37.722 - type: precision_at_1 value: 28.362 - type: precision_at_10 value: 6.101999999999999 - type: precision_at_100 value: 0.922 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 15.140999999999998 - type: precision_at_5 value: 10.305 - type: recall_at_1 value: 26.661 - type: recall_at_10 value: 53.675 - type: recall_at_100 value: 77.891 - type: recall_at_1000 value: 90.72 - type: recall_at_3 value: 40.751 - type: recall_at_5 value: 45.517 - type: map_at_1 value: 18.886 - type: map_at_10 value: 27.288 - type: map_at_100 value: 28.327999999999996 - type: map_at_1000 value: 28.438999999999997 - type: map_at_3 value: 24.453 - type: map_at_5 value: 25.959 - type: mrr_at_1 value: 23.134 - type: mrr_at_10 value: 32.004 - type: mrr_at_100 value: 32.789 - type: mrr_at_1000 value: 32.857 - type: mrr_at_3 value: 29.084 - type: mrr_at_5 value: 30.614 - type: ndcg_at_1 value: 23.134 - type: ndcg_at_10 value: 32.852 - type: ndcg_at_100 value: 37.972 - type: ndcg_at_1000 value: 40.656 - type: ndcg_at_3 value: 27.435 - type: ndcg_at_5 value: 29.823 - type: precision_at_1 value: 23.134 - type: precision_at_10 value: 6.032 - type: precision_at_100 value: 0.9950000000000001 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 13.017999999999999 - type: precision_at_5 value: 9.501999999999999 - type: recall_at_1 value: 18.886 - type: recall_at_10 value: 45.34 - type: recall_at_100 value: 67.947 - type: recall_at_1000 value: 86.924 - type: recall_at_3 value: 30.535 - type: recall_at_5 value: 36.451 - type: map_at_1 value: 28.994999999999997 - type: map_at_10 value: 40.04 - type: map_at_100 value: 41.435 - type: map_at_1000 value: 41.537 - type: map_at_3 value: 37.091 - type: map_at_5 value: 38.802 - type: mrr_at_1 value: 35.034 - type: mrr_at_10 value: 45.411 - type: mrr_at_100 value: 46.226 - type: mrr_at_1000 value: 46.27 - type: mrr_at_3 value: 43.086 - type: mrr_at_5 value: 44.452999999999996 - type: ndcg_at_1 value: 35.034 - type: ndcg_at_10 value: 46.076 - type: ndcg_at_100 value: 51.483000000000004 - type: ndcg_at_1000 value: 53.433 - type: ndcg_at_3 value: 41.304 - type: ndcg_at_5 value: 43.641999999999996 - type: precision_at_1 value: 35.034 - type: precision_at_10 value: 8.258000000000001 - type: precision_at_100 value: 1.268 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 19.57 - type: precision_at_5 value: 13.782 - type: recall_at_1 value: 28.994999999999997 - type: recall_at_10 value: 58.538000000000004 - type: recall_at_100 value: 80.72399999999999 - type: recall_at_1000 value: 93.462 - type: recall_at_3 value: 45.199 - type: recall_at_5 value: 51.237 - type: map_at_1 value: 24.795 - type: map_at_10 value: 34.935 - type: map_at_100 value: 36.306 - type: map_at_1000 value: 36.417 - type: map_at_3 value: 31.831 - type: map_at_5 value: 33.626 - type: mrr_at_1 value: 30.479 - type: mrr_at_10 value: 40.225 - type: mrr_at_100 value: 41.055 - type: mrr_at_1000 value: 41.114 - type: mrr_at_3 value: 37.538 - type: mrr_at_5 value: 39.073 - type: ndcg_at_1 value: 30.479 - type: ndcg_at_10 value: 40.949999999999996 - type: ndcg_at_100 value: 46.525 - type: ndcg_at_1000 value: 48.892 - type: ndcg_at_3 value: 35.79 - type: ndcg_at_5 value: 38.237 - type: precision_at_1 value: 30.479 - type: precision_at_10 value: 7.6259999999999994 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 17.199 - type: precision_at_5 value: 12.466000000000001 - type: recall_at_1 value: 24.795 - type: recall_at_10 value: 53.421 - type: recall_at_100 value: 77.189 - type: recall_at_1000 value: 93.407 - type: recall_at_3 value: 39.051 - type: recall_at_5 value: 45.462 - type: map_at_1 value: 26.853499999999997 - type: map_at_10 value: 36.20433333333333 - type: map_at_100 value: 37.40391666666667 - type: map_at_1000 value: 37.515 - type: map_at_3 value: 33.39975 - type: map_at_5 value: 34.9665 - type: mrr_at_1 value: 31.62666666666667 - type: mrr_at_10 value: 40.436749999999996 - type: mrr_at_100 value: 41.260333333333335 - type: mrr_at_1000 value: 41.31525 - type: mrr_at_3 value: 38.06733333333332 - type: mrr_at_5 value: 39.41541666666667 - type: ndcg_at_1 value: 31.62666666666667 - type: ndcg_at_10 value: 41.63341666666667 - type: ndcg_at_100 value: 46.704166666666666 - type: ndcg_at_1000 value: 48.88483333333335 - type: ndcg_at_3 value: 36.896 - type: ndcg_at_5 value: 39.11891666666667 - type: precision_at_1 value: 31.62666666666667 - type: precision_at_10 value: 7.241083333333333 - type: precision_at_100 value: 1.1488333333333334 - type: precision_at_1000 value: 0.15250000000000002 - type: precision_at_3 value: 16.908333333333335 - type: precision_at_5 value: 11.942833333333333 - type: recall_at_1 value: 26.853499999999997 - type: recall_at_10 value: 53.461333333333336 - type: recall_at_100 value: 75.63633333333333 - type: recall_at_1000 value: 90.67016666666666 - type: recall_at_3 value: 40.24241666666667 - type: recall_at_5 value: 45.98608333333333 - type: map_at_1 value: 25.241999999999997 - type: map_at_10 value: 31.863999999999997 - type: map_at_100 value: 32.835 - type: map_at_1000 value: 32.928000000000004 - type: map_at_3 value: 29.694 - type: map_at_5 value: 30.978 - type: mrr_at_1 value: 28.374 - type: mrr_at_10 value: 34.814 - type: mrr_at_100 value: 35.596 - type: mrr_at_1000 value: 35.666 - type: mrr_at_3 value: 32.745000000000005 - type: mrr_at_5 value: 34.049 - type: ndcg_at_1 value: 28.374 - type: ndcg_at_10 value: 35.969 - type: ndcg_at_100 value: 40.708 - type: ndcg_at_1000 value: 43.08 - type: ndcg_at_3 value: 31.968999999999998 - type: ndcg_at_5 value: 34.069 - type: precision_at_1 value: 28.374 - type: precision_at_10 value: 5.583 - type: precision_at_100 value: 0.8630000000000001 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 13.547999999999998 - type: precision_at_5 value: 9.447999999999999 - type: recall_at_1 value: 25.241999999999997 - type: recall_at_10 value: 45.711 - type: recall_at_100 value: 67.482 - type: recall_at_1000 value: 85.13300000000001 - type: recall_at_3 value: 34.622 - type: recall_at_5 value: 40.043 - type: map_at_1 value: 17.488999999999997 - type: map_at_10 value: 25.142999999999997 - type: map_at_100 value: 26.244 - type: map_at_1000 value: 26.363999999999997 - type: map_at_3 value: 22.654 - type: map_at_5 value: 24.017 - type: mrr_at_1 value: 21.198 - type: mrr_at_10 value: 28.903000000000002 - type: mrr_at_100 value: 29.860999999999997 - type: mrr_at_1000 value: 29.934 - type: mrr_at_3 value: 26.634999999999998 - type: mrr_at_5 value: 27.903 - type: ndcg_at_1 value: 21.198 - type: ndcg_at_10 value: 29.982999999999997 - type: ndcg_at_100 value: 35.275 - type: ndcg_at_1000 value: 38.074000000000005 - type: ndcg_at_3 value: 25.502999999999997 - type: ndcg_at_5 value: 27.557 - type: precision_at_1 value: 21.198 - type: precision_at_10 value: 5.502 - type: precision_at_100 value: 0.942 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 12.044 - type: precision_at_5 value: 8.782 - type: recall_at_1 value: 17.488999999999997 - type: recall_at_10 value: 40.821000000000005 - type: recall_at_100 value: 64.567 - type: recall_at_1000 value: 84.452 - type: recall_at_3 value: 28.351 - type: recall_at_5 value: 33.645 - type: map_at_1 value: 27.066000000000003 - type: map_at_10 value: 36.134 - type: map_at_100 value: 37.285000000000004 - type: map_at_1000 value: 37.389 - type: map_at_3 value: 33.522999999999996 - type: map_at_5 value: 34.905 - type: mrr_at_1 value: 31.436999999999998 - type: mrr_at_10 value: 40.225 - type: mrr_at_100 value: 41.079 - type: mrr_at_1000 value: 41.138000000000005 - type: mrr_at_3 value: 38.074999999999996 - type: mrr_at_5 value: 39.190000000000005 - type: ndcg_at_1 value: 31.436999999999998 - type: ndcg_at_10 value: 41.494 - type: ndcg_at_100 value: 46.678999999999995 - type: ndcg_at_1000 value: 48.964 - type: ndcg_at_3 value: 36.828 - type: ndcg_at_5 value: 38.789 - type: precision_at_1 value: 31.436999999999998 - type: precision_at_10 value: 6.931 - type: precision_at_100 value: 1.072 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 16.729 - type: precision_at_5 value: 11.567 - type: recall_at_1 value: 27.066000000000003 - type: recall_at_10 value: 53.705000000000005 - type: recall_at_100 value: 75.968 - type: recall_at_1000 value: 91.937 - type: recall_at_3 value: 40.865 - type: recall_at_5 value: 45.739999999999995 - type: map_at_1 value: 24.979000000000003 - type: map_at_10 value: 32.799 - type: map_at_100 value: 34.508 - type: map_at_1000 value: 34.719 - type: map_at_3 value: 29.947000000000003 - type: map_at_5 value: 31.584 - type: mrr_at_1 value: 30.237000000000002 - type: mrr_at_10 value: 37.651 - type: mrr_at_100 value: 38.805 - type: mrr_at_1000 value: 38.851 - type: mrr_at_3 value: 35.046 - type: mrr_at_5 value: 36.548 - type: ndcg_at_1 value: 30.237000000000002 - type: ndcg_at_10 value: 38.356 - type: ndcg_at_100 value: 44.906 - type: ndcg_at_1000 value: 47.299 - type: ndcg_at_3 value: 33.717999999999996 - type: ndcg_at_5 value: 35.946 - type: precision_at_1 value: 30.237000000000002 - type: precision_at_10 value: 7.292 - type: precision_at_100 value: 1.496 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 15.547 - type: precision_at_5 value: 11.344 - type: recall_at_1 value: 24.979000000000003 - type: recall_at_10 value: 48.624 - type: recall_at_100 value: 77.932 - type: recall_at_1000 value: 92.66499999999999 - type: recall_at_3 value: 35.217 - type: recall_at_5 value: 41.394 - type: map_at_1 value: 22.566 - type: map_at_10 value: 30.945 - type: map_at_100 value: 31.759999999999998 - type: map_at_1000 value: 31.855 - type: map_at_3 value: 28.64 - type: map_at_5 value: 29.787000000000003 - type: mrr_at_1 value: 24.954 - type: mrr_at_10 value: 33.311 - type: mrr_at_100 value: 34.050000000000004 - type: mrr_at_1000 value: 34.117999999999995 - type: mrr_at_3 value: 31.238 - type: mrr_at_5 value: 32.329 - type: ndcg_at_1 value: 24.954 - type: ndcg_at_10 value: 35.676 - type: ndcg_at_100 value: 39.931 - type: ndcg_at_1000 value: 42.43 - type: ndcg_at_3 value: 31.365 - type: ndcg_at_5 value: 33.184999999999995 - type: precision_at_1 value: 24.954 - type: precision_at_10 value: 5.564 - type: precision_at_100 value: 0.826 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 13.555 - type: precision_at_5 value: 9.168 - type: recall_at_1 value: 22.566 - type: recall_at_10 value: 47.922 - type: recall_at_100 value: 67.931 - type: recall_at_1000 value: 86.653 - type: recall_at_3 value: 36.103 - type: recall_at_5 value: 40.699000000000005 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 16.950000000000003 - type: map_at_10 value: 28.612 - type: map_at_100 value: 30.476999999999997 - type: map_at_1000 value: 30.674 - type: map_at_3 value: 24.262 - type: map_at_5 value: 26.554 - type: mrr_at_1 value: 38.241 - type: mrr_at_10 value: 50.43 - type: mrr_at_100 value: 51.059 - type: mrr_at_1000 value: 51.090999999999994 - type: mrr_at_3 value: 47.514 - type: mrr_at_5 value: 49.246 - type: ndcg_at_1 value: 38.241 - type: ndcg_at_10 value: 38.218 - type: ndcg_at_100 value: 45.003 - type: ndcg_at_1000 value: 48.269 - type: ndcg_at_3 value: 32.568000000000005 - type: ndcg_at_5 value: 34.400999999999996 - type: precision_at_1 value: 38.241 - type: precision_at_10 value: 11.674 - type: precision_at_100 value: 1.913 - type: precision_at_1000 value: 0.252 - type: precision_at_3 value: 24.387 - type: precision_at_5 value: 18.163 - type: recall_at_1 value: 16.950000000000003 - type: recall_at_10 value: 43.769000000000005 - type: recall_at_100 value: 66.875 - type: recall_at_1000 value: 84.92699999999999 - type: recall_at_3 value: 29.353 - type: recall_at_5 value: 35.467 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.276 - type: map_at_10 value: 20.848 - type: map_at_100 value: 29.804000000000002 - type: map_at_1000 value: 31.398 - type: map_at_3 value: 14.886 - type: map_at_5 value: 17.516000000000002 - type: mrr_at_1 value: 71 - type: mrr_at_10 value: 78.724 - type: mrr_at_100 value: 78.976 - type: mrr_at_1000 value: 78.986 - type: mrr_at_3 value: 77.333 - type: mrr_at_5 value: 78.021 - type: ndcg_at_1 value: 57.875 - type: ndcg_at_10 value: 43.855 - type: ndcg_at_100 value: 48.99 - type: ndcg_at_1000 value: 56.141 - type: ndcg_at_3 value: 48.914 - type: ndcg_at_5 value: 45.961 - type: precision_at_1 value: 71 - type: precision_at_10 value: 34.575 - type: precision_at_100 value: 11.182 - type: precision_at_1000 value: 2.044 - type: precision_at_3 value: 52.5 - type: precision_at_5 value: 44.2 - type: recall_at_1 value: 9.276 - type: recall_at_10 value: 26.501 - type: recall_at_100 value: 55.72899999999999 - type: recall_at_1000 value: 78.532 - type: recall_at_3 value: 16.365 - type: recall_at_5 value: 20.154 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 52.71 - type: f1 value: 47.74801556489574 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 73.405 - type: map_at_10 value: 82.822 - type: map_at_100 value: 83.042 - type: map_at_1000 value: 83.055 - type: map_at_3 value: 81.65299999999999 - type: map_at_5 value: 82.431 - type: mrr_at_1 value: 79.178 - type: mrr_at_10 value: 87.02 - type: mrr_at_100 value: 87.095 - type: mrr_at_1000 value: 87.09700000000001 - type: mrr_at_3 value: 86.309 - type: mrr_at_5 value: 86.824 - type: ndcg_at_1 value: 79.178 - type: ndcg_at_10 value: 86.72 - type: ndcg_at_100 value: 87.457 - type: ndcg_at_1000 value: 87.691 - type: ndcg_at_3 value: 84.974 - type: ndcg_at_5 value: 86.032 - type: precision_at_1 value: 79.178 - type: precision_at_10 value: 10.548 - type: precision_at_100 value: 1.113 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 32.848 - type: precision_at_5 value: 20.45 - type: recall_at_1 value: 73.405 - type: recall_at_10 value: 94.39699999999999 - type: recall_at_100 value: 97.219 - type: recall_at_1000 value: 98.675 - type: recall_at_3 value: 89.679 - type: recall_at_5 value: 92.392 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.651 - type: map_at_10 value: 36.886 - type: map_at_100 value: 38.811 - type: map_at_1000 value: 38.981 - type: map_at_3 value: 32.538 - type: map_at_5 value: 34.763 - type: mrr_at_1 value: 44.444 - type: mrr_at_10 value: 53.168000000000006 - type: mrr_at_100 value: 53.839000000000006 - type: mrr_at_1000 value: 53.869 - type: mrr_at_3 value: 50.54 - type: mrr_at_5 value: 52.068000000000005 - type: ndcg_at_1 value: 44.444 - type: ndcg_at_10 value: 44.994 - type: ndcg_at_100 value: 51.599 - type: ndcg_at_1000 value: 54.339999999999996 - type: ndcg_at_3 value: 41.372 - type: ndcg_at_5 value: 42.149 - type: precision_at_1 value: 44.444 - type: precision_at_10 value: 12.407 - type: precision_at_100 value: 1.9269999999999998 - type: precision_at_1000 value: 0.242 - type: precision_at_3 value: 27.726 - type: precision_at_5 value: 19.814999999999998 - type: recall_at_1 value: 22.651 - type: recall_at_10 value: 52.075 - type: recall_at_100 value: 76.51400000000001 - type: recall_at_1000 value: 92.852 - type: recall_at_3 value: 37.236000000000004 - type: recall_at_5 value: 43.175999999999995 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 40.777 - type: map_at_10 value: 66.79899999999999 - type: map_at_100 value: 67.65299999999999 - type: map_at_1000 value: 67.706 - type: map_at_3 value: 63.352 - type: map_at_5 value: 65.52900000000001 - type: mrr_at_1 value: 81.553 - type: mrr_at_10 value: 86.983 - type: mrr_at_100 value: 87.132 - type: mrr_at_1000 value: 87.136 - type: mrr_at_3 value: 86.156 - type: mrr_at_5 value: 86.726 - type: ndcg_at_1 value: 81.553 - type: ndcg_at_10 value: 74.64 - type: ndcg_at_100 value: 77.459 - type: ndcg_at_1000 value: 78.43 - type: ndcg_at_3 value: 69.878 - type: ndcg_at_5 value: 72.59400000000001 - type: precision_at_1 value: 81.553 - type: precision_at_10 value: 15.654000000000002 - type: precision_at_100 value: 1.783 - type: precision_at_1000 value: 0.191 - type: precision_at_3 value: 45.199 - type: precision_at_5 value: 29.267 - type: recall_at_1 value: 40.777 - type: recall_at_10 value: 78.271 - type: recall_at_100 value: 89.129 - type: recall_at_1000 value: 95.49 - type: recall_at_3 value: 67.79899999999999 - type: recall_at_5 value: 73.167 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 93.5064 - type: ap value: 90.25495114444111 - type: f1 value: 93.5012434973381 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.301 - type: map_at_10 value: 35.657 - type: map_at_100 value: 36.797000000000004 - type: map_at_1000 value: 36.844 - type: map_at_3 value: 31.743 - type: map_at_5 value: 34.003 - type: mrr_at_1 value: 23.854 - type: mrr_at_10 value: 36.242999999999995 - type: mrr_at_100 value: 37.32 - type: mrr_at_1000 value: 37.361 - type: mrr_at_3 value: 32.4 - type: mrr_at_5 value: 34.634 - type: ndcg_at_1 value: 23.868000000000002 - type: ndcg_at_10 value: 42.589 - type: ndcg_at_100 value: 48.031 - type: ndcg_at_1000 value: 49.189 - type: ndcg_at_3 value: 34.649 - type: ndcg_at_5 value: 38.676 - type: precision_at_1 value: 23.868000000000002 - type: precision_at_10 value: 6.6850000000000005 - type: precision_at_100 value: 0.9400000000000001 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.651 - type: precision_at_5 value: 10.834000000000001 - type: recall_at_1 value: 23.301 - type: recall_at_10 value: 63.88700000000001 - type: recall_at_100 value: 88.947 - type: recall_at_1000 value: 97.783 - type: recall_at_3 value: 42.393 - type: recall_at_5 value: 52.036 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 94.64888280893753 - type: f1 value: 94.41310774203512 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.72184222526221 - type: f1 value: 61.522034067350106 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 79.60659045057163 - type: f1 value: 77.268649687049 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 81.83254875588432 - type: f1 value: 81.61520635919082 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 36.31529875009507 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.734233714415073 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.994501713009452 - type: mrr value: 32.13512850703073 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.603000000000001 - type: map_at_10 value: 13.767999999999999 - type: map_at_100 value: 17.197000000000003 - type: map_at_1000 value: 18.615000000000002 - type: map_at_3 value: 10.567 - type: map_at_5 value: 12.078999999999999 - type: mrr_at_1 value: 44.891999999999996 - type: mrr_at_10 value: 53.75299999999999 - type: mrr_at_100 value: 54.35 - type: mrr_at_1000 value: 54.388000000000005 - type: mrr_at_3 value: 51.495999999999995 - type: mrr_at_5 value: 52.688 - type: ndcg_at_1 value: 43.189 - type: ndcg_at_10 value: 34.567 - type: ndcg_at_100 value: 32.273 - type: ndcg_at_1000 value: 41.321999999999996 - type: ndcg_at_3 value: 40.171 - type: ndcg_at_5 value: 37.502 - type: precision_at_1 value: 44.582 - type: precision_at_10 value: 25.139 - type: precision_at_100 value: 7.739999999999999 - type: precision_at_1000 value: 2.054 - type: precision_at_3 value: 37.152 - type: precision_at_5 value: 31.826999999999998 - type: recall_at_1 value: 6.603000000000001 - type: recall_at_10 value: 17.023 - type: recall_at_100 value: 32.914 - type: recall_at_1000 value: 64.44800000000001 - type: recall_at_3 value: 11.457 - type: recall_at_5 value: 13.816 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 30.026000000000003 - type: map_at_10 value: 45.429 - type: map_at_100 value: 46.45 - type: map_at_1000 value: 46.478 - type: map_at_3 value: 41.147 - type: map_at_5 value: 43.627 - type: mrr_at_1 value: 33.951 - type: mrr_at_10 value: 47.953 - type: mrr_at_100 value: 48.731 - type: mrr_at_1000 value: 48.751 - type: mrr_at_3 value: 44.39 - type: mrr_at_5 value: 46.533 - type: ndcg_at_1 value: 33.951 - type: ndcg_at_10 value: 53.24100000000001 - type: ndcg_at_100 value: 57.599999999999994 - type: ndcg_at_1000 value: 58.270999999999994 - type: ndcg_at_3 value: 45.190999999999995 - type: ndcg_at_5 value: 49.339 - type: precision_at_1 value: 33.951 - type: precision_at_10 value: 8.856 - type: precision_at_100 value: 1.133 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 20.713 - type: precision_at_5 value: 14.838000000000001 - type: recall_at_1 value: 30.026000000000003 - type: recall_at_10 value: 74.512 - type: recall_at_100 value: 93.395 - type: recall_at_1000 value: 98.402 - type: recall_at_3 value: 53.677 - type: recall_at_5 value: 63.198 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.41300000000001 - type: map_at_10 value: 85.387 - type: map_at_100 value: 86.027 - type: map_at_1000 value: 86.041 - type: map_at_3 value: 82.543 - type: map_at_5 value: 84.304 - type: mrr_at_1 value: 82.35 - type: mrr_at_10 value: 88.248 - type: mrr_at_100 value: 88.348 - type: mrr_at_1000 value: 88.349 - type: mrr_at_3 value: 87.348 - type: mrr_at_5 value: 87.96300000000001 - type: ndcg_at_1 value: 82.37 - type: ndcg_at_10 value: 88.98 - type: ndcg_at_100 value: 90.16499999999999 - type: ndcg_at_1000 value: 90.239 - type: ndcg_at_3 value: 86.34100000000001 - type: ndcg_at_5 value: 87.761 - type: precision_at_1 value: 82.37 - type: precision_at_10 value: 13.471 - type: precision_at_100 value: 1.534 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.827 - type: precision_at_5 value: 24.773999999999997 - type: recall_at_1 value: 71.41300000000001 - type: recall_at_10 value: 95.748 - type: recall_at_100 value: 99.69200000000001 - type: recall_at_1000 value: 99.98 - type: recall_at_3 value: 87.996 - type: recall_at_5 value: 92.142 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.96878497780007 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 65.31371347128074 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.287 - type: map_at_10 value: 13.530000000000001 - type: map_at_100 value: 15.891 - type: map_at_1000 value: 16.245 - type: map_at_3 value: 9.612 - type: map_at_5 value: 11.672 - type: mrr_at_1 value: 26 - type: mrr_at_10 value: 37.335 - type: mrr_at_100 value: 38.443 - type: mrr_at_1000 value: 38.486 - type: mrr_at_3 value: 33.783 - type: mrr_at_5 value: 36.028 - type: ndcg_at_1 value: 26 - type: ndcg_at_10 value: 22.215 - type: ndcg_at_100 value: 31.101 - type: ndcg_at_1000 value: 36.809 - type: ndcg_at_3 value: 21.104 - type: ndcg_at_5 value: 18.759999999999998 - type: precision_at_1 value: 26 - type: precision_at_10 value: 11.43 - type: precision_at_100 value: 2.424 - type: precision_at_1000 value: 0.379 - type: precision_at_3 value: 19.7 - type: precision_at_5 value: 16.619999999999997 - type: recall_at_1 value: 5.287 - type: recall_at_10 value: 23.18 - type: recall_at_100 value: 49.208 - type: recall_at_1000 value: 76.85300000000001 - type: recall_at_3 value: 11.991999999999999 - type: recall_at_5 value: 16.85 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.87834913790886 - type: cos_sim_spearman value: 81.04583513112122 - type: euclidean_pearson value: 81.20484174558065 - type: euclidean_spearman value: 80.76430832561769 - type: manhattan_pearson value: 81.21416730978615 - type: manhattan_spearman value: 80.7797637394211 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.56143998865157 - type: cos_sim_spearman value: 79.75387012744471 - type: euclidean_pearson value: 83.7877519997019 - type: euclidean_spearman value: 79.90489748003296 - type: manhattan_pearson value: 83.7540590666095 - type: manhattan_spearman value: 79.86434577931573 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 83.92102564177941 - type: cos_sim_spearman value: 84.98234585939103 - type: euclidean_pearson value: 84.47729567593696 - type: euclidean_spearman value: 85.09490696194469 - type: manhattan_pearson value: 84.38622951588229 - type: manhattan_spearman value: 85.02507171545574 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 80.1891164763377 - type: cos_sim_spearman value: 80.7997969966883 - type: euclidean_pearson value: 80.48572256162396 - type: euclidean_spearman value: 80.57851903536378 - type: manhattan_pearson value: 80.4324819433651 - type: manhattan_spearman value: 80.5074526239062 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 82.64319975116025 - type: cos_sim_spearman value: 84.88671197763652 - type: euclidean_pearson value: 84.74692193293231 - type: euclidean_spearman value: 85.27151722073653 - type: manhattan_pearson value: 84.72460516785438 - type: manhattan_spearman value: 85.26518899786687 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.24687565822381 - type: cos_sim_spearman value: 85.60418454111263 - type: euclidean_pearson value: 84.85829740169851 - type: euclidean_spearman value: 85.66378014138306 - type: manhattan_pearson value: 84.84672408808835 - type: manhattan_spearman value: 85.63331924364891 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 84.87758895415485 - type: cos_sim_spearman value: 85.8193745617297 - type: euclidean_pearson value: 85.78719118848134 - type: euclidean_spearman value: 84.35797575385688 - type: manhattan_pearson value: 85.97919844815692 - type: manhattan_spearman value: 84.58334745175151 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.27076035963599 - type: cos_sim_spearman value: 67.21433656439973 - type: euclidean_pearson value: 68.07434078679324 - type: euclidean_spearman value: 66.0249731719049 - type: manhattan_pearson value: 67.95495198947476 - type: manhattan_spearman value: 65.99893908331886 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 82.22437747056817 - type: cos_sim_spearman value: 85.0995685206174 - type: euclidean_pearson value: 84.08616925603394 - type: euclidean_spearman value: 84.89633925691658 - type: manhattan_pearson value: 84.08332675923133 - type: manhattan_spearman value: 84.8858228112915 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.6909022589666 - type: mrr value: 96.43341952165481 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.660999999999994 - type: map_at_10 value: 67.625 - type: map_at_100 value: 68.07600000000001 - type: map_at_1000 value: 68.10199999999999 - type: map_at_3 value: 64.50399999999999 - type: map_at_5 value: 66.281 - type: mrr_at_1 value: 61 - type: mrr_at_10 value: 68.953 - type: mrr_at_100 value: 69.327 - type: mrr_at_1000 value: 69.352 - type: mrr_at_3 value: 66.833 - type: mrr_at_5 value: 68.05 - type: ndcg_at_1 value: 61 - type: ndcg_at_10 value: 72.369 - type: ndcg_at_100 value: 74.237 - type: ndcg_at_1000 value: 74.939 - type: ndcg_at_3 value: 67.284 - type: ndcg_at_5 value: 69.72500000000001 - type: precision_at_1 value: 61 - type: precision_at_10 value: 9.733 - type: precision_at_100 value: 1.0670000000000002 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 26.222 - type: precision_at_5 value: 17.4 - type: recall_at_1 value: 57.660999999999994 - type: recall_at_10 value: 85.656 - type: recall_at_100 value: 93.833 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 71.961 - type: recall_at_5 value: 78.094 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.86930693069307 - type: cos_sim_ap value: 96.76685487950894 - type: cos_sim_f1 value: 93.44587884806354 - type: cos_sim_precision value: 92.80078895463511 - type: cos_sim_recall value: 94.1 - type: dot_accuracy value: 99.54356435643564 - type: dot_ap value: 81.18659960405607 - type: dot_f1 value: 75.78008915304605 - type: dot_precision value: 75.07360157016683 - type: dot_recall value: 76.5 - type: euclidean_accuracy value: 99.87326732673267 - type: euclidean_ap value: 96.8102411908941 - type: euclidean_f1 value: 93.6127744510978 - type: euclidean_precision value: 93.42629482071713 - type: euclidean_recall value: 93.8 - type: manhattan_accuracy value: 99.87425742574257 - type: manhattan_ap value: 96.82857341435529 - type: manhattan_f1 value: 93.62129583124059 - type: manhattan_precision value: 94.04641775983855 - type: manhattan_recall value: 93.2 - type: max_accuracy value: 99.87425742574257 - type: max_ap value: 96.82857341435529 - type: max_f1 value: 93.62129583124059 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.92560972698926 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.92797240259008 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.244624045597654 - type: mrr value: 56.185303666921314 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.02491987312937 - type: cos_sim_spearman value: 32.055592206679734 - type: dot_pearson value: 24.731627575422557 - type: dot_spearman value: 24.308029077069733 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.231 - type: map_at_10 value: 1.899 - type: map_at_100 value: 9.498 - type: map_at_1000 value: 20.979999999999997 - type: map_at_3 value: 0.652 - type: map_at_5 value: 1.069 - type: mrr_at_1 value: 88 - type: mrr_at_10 value: 93.4 - type: mrr_at_100 value: 93.4 - type: mrr_at_1000 value: 93.4 - type: mrr_at_3 value: 93 - type: mrr_at_5 value: 93.4 - type: ndcg_at_1 value: 86 - type: ndcg_at_10 value: 75.375 - type: ndcg_at_100 value: 52.891999999999996 - type: ndcg_at_1000 value: 44.952999999999996 - type: ndcg_at_3 value: 81.05 - type: ndcg_at_5 value: 80.175 - type: precision_at_1 value: 88 - type: precision_at_10 value: 79 - type: precision_at_100 value: 53.16 - type: precision_at_1000 value: 19.408 - type: precision_at_3 value: 85.333 - type: precision_at_5 value: 84 - type: recall_at_1 value: 0.231 - type: recall_at_10 value: 2.078 - type: recall_at_100 value: 12.601 - type: recall_at_1000 value: 41.296 - type: recall_at_3 value: 0.6779999999999999 - type: recall_at_5 value: 1.1360000000000001 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.782 - type: map_at_10 value: 10.204 - type: map_at_100 value: 16.176 - type: map_at_1000 value: 17.456 - type: map_at_3 value: 5.354 - type: map_at_5 value: 7.503 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 54.010000000000005 - type: mrr_at_100 value: 54.49 - type: mrr_at_1000 value: 54.49 - type: mrr_at_3 value: 48.980000000000004 - type: mrr_at_5 value: 51.735 - type: ndcg_at_1 value: 36.735 - type: ndcg_at_10 value: 26.61 - type: ndcg_at_100 value: 36.967 - type: ndcg_at_1000 value: 47.274 - type: ndcg_at_3 value: 30.363 - type: ndcg_at_5 value: 29.448999999999998 - type: precision_at_1 value: 40.816 - type: precision_at_10 value: 23.878 - type: precision_at_100 value: 7.693999999999999 - type: precision_at_1000 value: 1.4489999999999998 - type: precision_at_3 value: 31.293 - type: precision_at_5 value: 29.796 - type: recall_at_1 value: 2.782 - type: recall_at_10 value: 16.485 - type: recall_at_100 value: 46.924 - type: recall_at_1000 value: 79.365 - type: recall_at_3 value: 6.52 - type: recall_at_5 value: 10.48 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 70.08300000000001 - type: ap value: 13.91559884590195 - type: f1 value: 53.956838444291364 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.34069043576683 - type: f1 value: 59.662041994618406 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.70780611078653 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.10734934732073 - type: cos_sim_ap value: 77.58349999516054 - type: cos_sim_f1 value: 70.25391395868965 - type: cos_sim_precision value: 70.06035161374967 - type: cos_sim_recall value: 70.44854881266491 - type: dot_accuracy value: 80.60439887941826 - type: dot_ap value: 54.52935200483575 - type: dot_f1 value: 54.170444242973716 - type: dot_precision value: 47.47715534366309 - type: dot_recall value: 63.06068601583114 - type: euclidean_accuracy value: 87.26828396018358 - type: euclidean_ap value: 78.00158454104036 - type: euclidean_f1 value: 70.70292457670601 - type: euclidean_precision value: 68.79680479281079 - type: euclidean_recall value: 72.71767810026385 - type: manhattan_accuracy value: 87.11330988853788 - type: manhattan_ap value: 77.92527099601855 - type: manhattan_f1 value: 70.76488706365502 - type: manhattan_precision value: 68.89055472263868 - type: manhattan_recall value: 72.74406332453826 - type: max_accuracy value: 87.26828396018358 - type: max_ap value: 78.00158454104036 - type: max_f1 value: 70.76488706365502 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 87.80804905499282 - type: cos_sim_ap value: 83.06187782630936 - type: cos_sim_f1 value: 74.99716435403985 - type: cos_sim_precision value: 73.67951860931579 - type: cos_sim_recall value: 76.36279642747151 - type: dot_accuracy value: 81.83141227151008 - type: dot_ap value: 67.18241090841795 - type: dot_f1 value: 62.216037571751606 - type: dot_precision value: 56.749381227391005 - type: dot_recall value: 68.84816753926701 - type: euclidean_accuracy value: 87.91671517832887 - type: euclidean_ap value: 83.56538942001427 - type: euclidean_f1 value: 75.7327253337256 - type: euclidean_precision value: 72.48856036606828 - type: euclidean_recall value: 79.28087465352634 - type: manhattan_accuracy value: 87.86626304963713 - type: manhattan_ap value: 83.52939841172832 - type: manhattan_f1 value: 75.73635656329888 - type: manhattan_precision value: 72.99150182103836 - type: manhattan_recall value: 78.69571912534647 - type: max_accuracy value: 87.91671517832887 - type: max_ap value: 83.56538942001427 - type: max_f1 value: 75.73635656329888 --- **Recommend switching to newest [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5), which has more reasonable similarity distribution and same method of usage.** <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding can map any text to a low-dimensional dense vector which can be used for tasks like retrieval, classification, clustering, or semantic search. And it also can be used in vector databases for LLMs. ************* 🌟**Updates**🌟 ************* - 10/12/2023: Release [LLM-Embedder](./FlagEmbedding/llm_embedder/README.md), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Paper](https://arxiv.org/pdf/2310.07554.pdf) :fire: - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [masive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR", "BIOSSES", "SCIFACT" ]
thenlper/gte-large
thenlper
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "openvino", "bert", "mteb", "sentence-similarity", "Sentence Transformers", "en", "arxiv:2308.03281", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-07-27T09:55:39Z"
2024-11-15T14:10:31+00:00
460,453
272
--- language: - en license: mit tags: - mteb - sentence-similarity - sentence-transformers - Sentence Transformers model-index: - name: gte-large results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 72.62686567164178 - type: ap value: 34.46944126809772 - type: f1 value: 66.23684353950857 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 92.51805 - type: ap value: 89.49842783330848 - type: f1 value: 92.51112169431808 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.074 - type: f1 value: 48.44785682572955 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 32.077 - type: map_at_10 value: 48.153 - type: map_at_100 value: 48.963 - type: map_at_1000 value: 48.966 - type: map_at_3 value: 43.184 - type: map_at_5 value: 46.072 - type: mrr_at_1 value: 33.073 - type: mrr_at_10 value: 48.54 - type: mrr_at_100 value: 49.335 - type: mrr_at_1000 value: 49.338 - type: mrr_at_3 value: 43.563 - type: mrr_at_5 value: 46.383 - type: ndcg_at_1 value: 32.077 - type: ndcg_at_10 value: 57.158 - type: ndcg_at_100 value: 60.324999999999996 - type: ndcg_at_1000 value: 60.402 - type: ndcg_at_3 value: 46.934 - type: ndcg_at_5 value: 52.158 - type: precision_at_1 value: 32.077 - type: precision_at_10 value: 8.591999999999999 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 19.275000000000002 - type: precision_at_5 value: 14.111 - type: recall_at_1 value: 32.077 - type: recall_at_10 value: 85.917 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 57.824 - type: recall_at_5 value: 70.555 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.619246083417295 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 43.3574067664688 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.06359661829253 - type: mrr value: 76.15596007562766 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 90.25407547368691 - type: cos_sim_spearman value: 88.65081514968477 - type: euclidean_pearson value: 88.14857116664494 - type: euclidean_spearman value: 88.50683596540692 - type: manhattan_pearson value: 87.9654797992225 - type: manhattan_spearman value: 88.21164851646908 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 86.05844155844157 - type: f1 value: 86.01555597681825 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.10510519739522 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.84689960264385 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.800000000000004 - type: map_at_10 value: 44.857 - type: map_at_100 value: 46.512 - type: map_at_1000 value: 46.635 - type: map_at_3 value: 41.062 - type: map_at_5 value: 43.126 - type: mrr_at_1 value: 39.628 - type: mrr_at_10 value: 50.879 - type: mrr_at_100 value: 51.605000000000004 - type: mrr_at_1000 value: 51.641000000000005 - type: mrr_at_3 value: 48.14 - type: mrr_at_5 value: 49.835 - type: ndcg_at_1 value: 39.628 - type: ndcg_at_10 value: 51.819 - type: ndcg_at_100 value: 57.318999999999996 - type: ndcg_at_1000 value: 58.955999999999996 - type: ndcg_at_3 value: 46.409 - type: ndcg_at_5 value: 48.825 - type: precision_at_1 value: 39.628 - type: precision_at_10 value: 10.072000000000001 - type: precision_at_100 value: 1.625 - type: precision_at_1000 value: 0.21 - type: precision_at_3 value: 22.556 - type: precision_at_5 value: 16.309 - type: recall_at_1 value: 32.800000000000004 - type: recall_at_10 value: 65.078 - type: recall_at_100 value: 87.491 - type: recall_at_1000 value: 97.514 - type: recall_at_3 value: 49.561 - type: recall_at_5 value: 56.135999999999996 - type: map_at_1 value: 32.614 - type: map_at_10 value: 43.578 - type: map_at_100 value: 44.897 - type: map_at_1000 value: 45.023 - type: map_at_3 value: 40.282000000000004 - type: map_at_5 value: 42.117 - type: mrr_at_1 value: 40.510000000000005 - type: mrr_at_10 value: 49.428 - type: mrr_at_100 value: 50.068999999999996 - type: mrr_at_1000 value: 50.111000000000004 - type: mrr_at_3 value: 47.176 - type: mrr_at_5 value: 48.583999999999996 - type: ndcg_at_1 value: 40.510000000000005 - type: ndcg_at_10 value: 49.478 - type: ndcg_at_100 value: 53.852 - type: ndcg_at_1000 value: 55.782 - type: ndcg_at_3 value: 45.091 - type: ndcg_at_5 value: 47.19 - type: precision_at_1 value: 40.510000000000005 - type: precision_at_10 value: 9.363000000000001 - type: precision_at_100 value: 1.51 - type: precision_at_1000 value: 0.196 - type: precision_at_3 value: 21.741 - type: precision_at_5 value: 15.465000000000002 - type: recall_at_1 value: 32.614 - type: recall_at_10 value: 59.782000000000004 - type: recall_at_100 value: 78.012 - type: recall_at_1000 value: 90.319 - type: recall_at_3 value: 46.825 - type: recall_at_5 value: 52.688 - type: map_at_1 value: 40.266000000000005 - type: map_at_10 value: 53.756 - type: map_at_100 value: 54.809 - type: map_at_1000 value: 54.855 - type: map_at_3 value: 50.073 - type: map_at_5 value: 52.293 - type: mrr_at_1 value: 46.332 - type: mrr_at_10 value: 57.116 - type: mrr_at_100 value: 57.767 - type: mrr_at_1000 value: 57.791000000000004 - type: mrr_at_3 value: 54.461999999999996 - type: mrr_at_5 value: 56.092 - type: ndcg_at_1 value: 46.332 - type: ndcg_at_10 value: 60.092 - type: ndcg_at_100 value: 64.034 - type: ndcg_at_1000 value: 64.937 - type: ndcg_at_3 value: 54.071000000000005 - type: ndcg_at_5 value: 57.254000000000005 - type: precision_at_1 value: 46.332 - type: precision_at_10 value: 9.799 - type: precision_at_100 value: 1.278 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.368000000000002 - type: precision_at_5 value: 16.89 - type: recall_at_1 value: 40.266000000000005 - type: recall_at_10 value: 75.41499999999999 - type: recall_at_100 value: 92.01700000000001 - type: recall_at_1000 value: 98.379 - type: recall_at_3 value: 59.476 - type: recall_at_5 value: 67.297 - type: map_at_1 value: 28.589 - type: map_at_10 value: 37.755 - type: map_at_100 value: 38.881 - type: map_at_1000 value: 38.954 - type: map_at_3 value: 34.759 - type: map_at_5 value: 36.544 - type: mrr_at_1 value: 30.734 - type: mrr_at_10 value: 39.742 - type: mrr_at_100 value: 40.774 - type: mrr_at_1000 value: 40.824 - type: mrr_at_3 value: 37.137 - type: mrr_at_5 value: 38.719 - type: ndcg_at_1 value: 30.734 - type: ndcg_at_10 value: 42.978 - type: ndcg_at_100 value: 48.309000000000005 - type: ndcg_at_1000 value: 50.068 - type: ndcg_at_3 value: 37.361 - type: ndcg_at_5 value: 40.268 - type: precision_at_1 value: 30.734 - type: precision_at_10 value: 6.565 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 15.744 - type: precision_at_5 value: 11.096 - type: recall_at_1 value: 28.589 - type: recall_at_10 value: 57.126999999999995 - type: recall_at_100 value: 81.051 - type: recall_at_1000 value: 94.027 - type: recall_at_3 value: 42.045 - type: recall_at_5 value: 49.019 - type: map_at_1 value: 18.5 - type: map_at_10 value: 27.950999999999997 - type: map_at_100 value: 29.186 - type: map_at_1000 value: 29.298000000000002 - type: map_at_3 value: 25.141000000000002 - type: map_at_5 value: 26.848 - type: mrr_at_1 value: 22.637 - type: mrr_at_10 value: 32.572 - type: mrr_at_100 value: 33.472 - type: mrr_at_1000 value: 33.533 - type: mrr_at_3 value: 29.747 - type: mrr_at_5 value: 31.482 - type: ndcg_at_1 value: 22.637 - type: ndcg_at_10 value: 33.73 - type: ndcg_at_100 value: 39.568 - type: ndcg_at_1000 value: 42.201 - type: ndcg_at_3 value: 28.505999999999997 - type: ndcg_at_5 value: 31.255 - type: precision_at_1 value: 22.637 - type: precision_at_10 value: 6.281000000000001 - type: precision_at_100 value: 1.073 - type: precision_at_1000 value: 0.14300000000000002 - type: precision_at_3 value: 13.847000000000001 - type: precision_at_5 value: 10.224 - type: recall_at_1 value: 18.5 - type: recall_at_10 value: 46.744 - type: recall_at_100 value: 72.072 - type: recall_at_1000 value: 91.03999999999999 - type: recall_at_3 value: 32.551 - type: recall_at_5 value: 39.533 - type: map_at_1 value: 30.602 - type: map_at_10 value: 42.18 - type: map_at_100 value: 43.6 - type: map_at_1000 value: 43.704 - type: map_at_3 value: 38.413000000000004 - type: map_at_5 value: 40.626 - type: mrr_at_1 value: 37.344 - type: mrr_at_10 value: 47.638000000000005 - type: mrr_at_100 value: 48.485 - type: mrr_at_1000 value: 48.52 - type: mrr_at_3 value: 44.867000000000004 - type: mrr_at_5 value: 46.566 - type: ndcg_at_1 value: 37.344 - type: ndcg_at_10 value: 48.632 - type: ndcg_at_100 value: 54.215 - type: ndcg_at_1000 value: 55.981 - type: ndcg_at_3 value: 42.681999999999995 - type: ndcg_at_5 value: 45.732 - type: precision_at_1 value: 37.344 - type: precision_at_10 value: 8.932 - type: precision_at_100 value: 1.376 - type: precision_at_1000 value: 0.17099999999999999 - type: precision_at_3 value: 20.276 - type: precision_at_5 value: 14.726 - type: recall_at_1 value: 30.602 - type: recall_at_10 value: 62.273 - type: recall_at_100 value: 85.12100000000001 - type: recall_at_1000 value: 96.439 - type: recall_at_3 value: 45.848 - type: recall_at_5 value: 53.615 - type: map_at_1 value: 23.952 - type: map_at_10 value: 35.177 - type: map_at_100 value: 36.59 - type: map_at_1000 value: 36.703 - type: map_at_3 value: 31.261 - type: map_at_5 value: 33.222 - type: mrr_at_1 value: 29.337999999999997 - type: mrr_at_10 value: 40.152 - type: mrr_at_100 value: 40.963 - type: mrr_at_1000 value: 41.016999999999996 - type: mrr_at_3 value: 36.91 - type: mrr_at_5 value: 38.685 - type: ndcg_at_1 value: 29.337999999999997 - type: ndcg_at_10 value: 41.994 - type: ndcg_at_100 value: 47.587 - type: ndcg_at_1000 value: 49.791000000000004 - type: ndcg_at_3 value: 35.27 - type: ndcg_at_5 value: 38.042 - type: precision_at_1 value: 29.337999999999997 - type: precision_at_10 value: 8.276 - type: precision_at_100 value: 1.276 - type: precision_at_1000 value: 0.164 - type: precision_at_3 value: 17.161 - type: precision_at_5 value: 12.671 - type: recall_at_1 value: 23.952 - type: recall_at_10 value: 57.267 - type: recall_at_100 value: 80.886 - type: recall_at_1000 value: 95.611 - type: recall_at_3 value: 38.622 - type: recall_at_5 value: 45.811 - type: map_at_1 value: 27.092083333333335 - type: map_at_10 value: 37.2925 - type: map_at_100 value: 38.57041666666666 - type: map_at_1000 value: 38.68141666666667 - type: map_at_3 value: 34.080000000000005 - type: map_at_5 value: 35.89958333333333 - type: mrr_at_1 value: 31.94758333333333 - type: mrr_at_10 value: 41.51049999999999 - type: mrr_at_100 value: 42.36099999999999 - type: mrr_at_1000 value: 42.4125 - type: mrr_at_3 value: 38.849583333333335 - type: mrr_at_5 value: 40.448249999999994 - type: ndcg_at_1 value: 31.94758333333333 - type: ndcg_at_10 value: 43.17633333333333 - type: ndcg_at_100 value: 48.45241666666668 - type: ndcg_at_1000 value: 50.513999999999996 - type: ndcg_at_3 value: 37.75216666666667 - type: ndcg_at_5 value: 40.393833333333326 - type: precision_at_1 value: 31.94758333333333 - type: precision_at_10 value: 7.688916666666666 - type: precision_at_100 value: 1.2250833333333333 - type: precision_at_1000 value: 0.1595 - type: precision_at_3 value: 17.465999999999998 - type: precision_at_5 value: 12.548083333333333 - type: recall_at_1 value: 27.092083333333335 - type: recall_at_10 value: 56.286583333333326 - type: recall_at_100 value: 79.09033333333333 - type: recall_at_1000 value: 93.27483333333335 - type: recall_at_3 value: 41.35325 - type: recall_at_5 value: 48.072750000000006 - type: map_at_1 value: 25.825 - type: map_at_10 value: 33.723 - type: map_at_100 value: 34.74 - type: map_at_1000 value: 34.824 - type: map_at_3 value: 31.369000000000003 - type: map_at_5 value: 32.533 - type: mrr_at_1 value: 29.293999999999997 - type: mrr_at_10 value: 36.84 - type: mrr_at_100 value: 37.681 - type: mrr_at_1000 value: 37.742 - type: mrr_at_3 value: 34.79 - type: mrr_at_5 value: 35.872 - type: ndcg_at_1 value: 29.293999999999997 - type: ndcg_at_10 value: 38.385999999999996 - type: ndcg_at_100 value: 43.327 - type: ndcg_at_1000 value: 45.53 - type: ndcg_at_3 value: 33.985 - type: ndcg_at_5 value: 35.817 - type: precision_at_1 value: 29.293999999999997 - type: precision_at_10 value: 6.12 - type: precision_at_100 value: 0.9329999999999999 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 14.621999999999998 - type: precision_at_5 value: 10.030999999999999 - type: recall_at_1 value: 25.825 - type: recall_at_10 value: 49.647000000000006 - type: recall_at_100 value: 72.32300000000001 - type: recall_at_1000 value: 88.62400000000001 - type: recall_at_3 value: 37.366 - type: recall_at_5 value: 41.957 - type: map_at_1 value: 18.139 - type: map_at_10 value: 26.107000000000003 - type: map_at_100 value: 27.406999999999996 - type: map_at_1000 value: 27.535999999999998 - type: map_at_3 value: 23.445 - type: map_at_5 value: 24.916 - type: mrr_at_1 value: 21.817 - type: mrr_at_10 value: 29.99 - type: mrr_at_100 value: 31.052000000000003 - type: mrr_at_1000 value: 31.128 - type: mrr_at_3 value: 27.627000000000002 - type: mrr_at_5 value: 29.005 - type: ndcg_at_1 value: 21.817 - type: ndcg_at_10 value: 31.135 - type: ndcg_at_100 value: 37.108000000000004 - type: ndcg_at_1000 value: 39.965 - type: ndcg_at_3 value: 26.439 - type: ndcg_at_5 value: 28.655 - type: precision_at_1 value: 21.817 - type: precision_at_10 value: 5.757000000000001 - type: precision_at_100 value: 1.036 - type: precision_at_1000 value: 0.147 - type: precision_at_3 value: 12.537 - type: precision_at_5 value: 9.229 - type: recall_at_1 value: 18.139 - type: recall_at_10 value: 42.272999999999996 - type: recall_at_100 value: 68.657 - type: recall_at_1000 value: 88.93799999999999 - type: recall_at_3 value: 29.266 - type: recall_at_5 value: 34.892 - type: map_at_1 value: 27.755000000000003 - type: map_at_10 value: 37.384 - type: map_at_100 value: 38.56 - type: map_at_1000 value: 38.655 - type: map_at_3 value: 34.214 - type: map_at_5 value: 35.96 - type: mrr_at_1 value: 32.369 - type: mrr_at_10 value: 41.625 - type: mrr_at_100 value: 42.449 - type: mrr_at_1000 value: 42.502 - type: mrr_at_3 value: 38.899 - type: mrr_at_5 value: 40.489999999999995 - type: ndcg_at_1 value: 32.369 - type: ndcg_at_10 value: 43.287 - type: ndcg_at_100 value: 48.504999999999995 - type: ndcg_at_1000 value: 50.552 - type: ndcg_at_3 value: 37.549 - type: ndcg_at_5 value: 40.204 - type: precision_at_1 value: 32.369 - type: precision_at_10 value: 7.425 - type: precision_at_100 value: 1.134 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_3 value: 17.102 - type: precision_at_5 value: 12.107999999999999 - type: recall_at_1 value: 27.755000000000003 - type: recall_at_10 value: 57.071000000000005 - type: recall_at_100 value: 79.456 - type: recall_at_1000 value: 93.54299999999999 - type: recall_at_3 value: 41.298 - type: recall_at_5 value: 48.037 - type: map_at_1 value: 24.855 - type: map_at_10 value: 34.53 - type: map_at_100 value: 36.167 - type: map_at_1000 value: 36.394999999999996 - type: map_at_3 value: 31.037 - type: map_at_5 value: 33.119 - type: mrr_at_1 value: 30.631999999999998 - type: mrr_at_10 value: 39.763999999999996 - type: mrr_at_100 value: 40.77 - type: mrr_at_1000 value: 40.826 - type: mrr_at_3 value: 36.495 - type: mrr_at_5 value: 38.561 - type: ndcg_at_1 value: 30.631999999999998 - type: ndcg_at_10 value: 40.942 - type: ndcg_at_100 value: 47.07 - type: ndcg_at_1000 value: 49.363 - type: ndcg_at_3 value: 35.038000000000004 - type: ndcg_at_5 value: 38.161 - type: precision_at_1 value: 30.631999999999998 - type: precision_at_10 value: 7.983999999999999 - type: precision_at_100 value: 1.6070000000000002 - type: precision_at_1000 value: 0.246 - type: precision_at_3 value: 16.206 - type: precision_at_5 value: 12.253 - type: recall_at_1 value: 24.855 - type: recall_at_10 value: 53.291999999999994 - type: recall_at_100 value: 80.283 - type: recall_at_1000 value: 94.309 - type: recall_at_3 value: 37.257 - type: recall_at_5 value: 45.282 - type: map_at_1 value: 21.208 - type: map_at_10 value: 30.512 - type: map_at_100 value: 31.496000000000002 - type: map_at_1000 value: 31.595000000000002 - type: map_at_3 value: 27.904 - type: map_at_5 value: 29.491 - type: mrr_at_1 value: 22.736 - type: mrr_at_10 value: 32.379999999999995 - type: mrr_at_100 value: 33.245000000000005 - type: mrr_at_1000 value: 33.315 - type: mrr_at_3 value: 29.945 - type: mrr_at_5 value: 31.488 - type: ndcg_at_1 value: 22.736 - type: ndcg_at_10 value: 35.643 - type: ndcg_at_100 value: 40.535 - type: ndcg_at_1000 value: 43.042 - type: ndcg_at_3 value: 30.625000000000004 - type: ndcg_at_5 value: 33.323 - type: precision_at_1 value: 22.736 - type: precision_at_10 value: 5.6930000000000005 - type: precision_at_100 value: 0.889 - type: precision_at_1000 value: 0.122 - type: precision_at_3 value: 13.431999999999999 - type: precision_at_5 value: 9.575 - type: recall_at_1 value: 21.208 - type: recall_at_10 value: 49.47 - type: recall_at_100 value: 71.71499999999999 - type: recall_at_1000 value: 90.55499999999999 - type: recall_at_3 value: 36.124 - type: recall_at_5 value: 42.606 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 11.363 - type: map_at_10 value: 20.312 - type: map_at_100 value: 22.225 - type: map_at_1000 value: 22.411 - type: map_at_3 value: 16.68 - type: map_at_5 value: 18.608 - type: mrr_at_1 value: 25.537 - type: mrr_at_10 value: 37.933 - type: mrr_at_100 value: 38.875 - type: mrr_at_1000 value: 38.911 - type: mrr_at_3 value: 34.387 - type: mrr_at_5 value: 36.51 - type: ndcg_at_1 value: 25.537 - type: ndcg_at_10 value: 28.82 - type: ndcg_at_100 value: 36.341 - type: ndcg_at_1000 value: 39.615 - type: ndcg_at_3 value: 23.01 - type: ndcg_at_5 value: 25.269000000000002 - type: precision_at_1 value: 25.537 - type: precision_at_10 value: 9.153 - type: precision_at_100 value: 1.7319999999999998 - type: precision_at_1000 value: 0.234 - type: precision_at_3 value: 17.22 - type: precision_at_5 value: 13.629 - type: recall_at_1 value: 11.363 - type: recall_at_10 value: 35.382999999999996 - type: recall_at_100 value: 61.367000000000004 - type: recall_at_1000 value: 79.699 - type: recall_at_3 value: 21.495 - type: recall_at_5 value: 27.42 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.65 - type: map_at_10 value: 20.742 - type: map_at_100 value: 29.614 - type: map_at_1000 value: 31.373 - type: map_at_3 value: 14.667 - type: map_at_5 value: 17.186 - type: mrr_at_1 value: 69.75 - type: mrr_at_10 value: 76.762 - type: mrr_at_100 value: 77.171 - type: mrr_at_1000 value: 77.179 - type: mrr_at_3 value: 75.125 - type: mrr_at_5 value: 76.287 - type: ndcg_at_1 value: 57.62500000000001 - type: ndcg_at_10 value: 42.370999999999995 - type: ndcg_at_100 value: 47.897 - type: ndcg_at_1000 value: 55.393 - type: ndcg_at_3 value: 46.317 - type: ndcg_at_5 value: 43.906 - type: precision_at_1 value: 69.75 - type: precision_at_10 value: 33.95 - type: precision_at_100 value: 10.885 - type: precision_at_1000 value: 2.2239999999999998 - type: precision_at_3 value: 49.75 - type: precision_at_5 value: 42.3 - type: recall_at_1 value: 9.65 - type: recall_at_10 value: 26.117 - type: recall_at_100 value: 55.084 - type: recall_at_1000 value: 78.62400000000001 - type: recall_at_3 value: 15.823 - type: recall_at_5 value: 19.652 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 47.885 - type: f1 value: 42.99567641346983 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 70.97 - type: map_at_10 value: 80.34599999999999 - type: map_at_100 value: 80.571 - type: map_at_1000 value: 80.584 - type: map_at_3 value: 79.279 - type: map_at_5 value: 79.94 - type: mrr_at_1 value: 76.613 - type: mrr_at_10 value: 85.15700000000001 - type: mrr_at_100 value: 85.249 - type: mrr_at_1000 value: 85.252 - type: mrr_at_3 value: 84.33800000000001 - type: mrr_at_5 value: 84.89 - type: ndcg_at_1 value: 76.613 - type: ndcg_at_10 value: 84.53399999999999 - type: ndcg_at_100 value: 85.359 - type: ndcg_at_1000 value: 85.607 - type: ndcg_at_3 value: 82.76599999999999 - type: ndcg_at_5 value: 83.736 - type: precision_at_1 value: 76.613 - type: precision_at_10 value: 10.206 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 31.913000000000004 - type: precision_at_5 value: 19.769000000000002 - type: recall_at_1 value: 70.97 - type: recall_at_10 value: 92.674 - type: recall_at_100 value: 95.985 - type: recall_at_1000 value: 97.57000000000001 - type: recall_at_3 value: 87.742 - type: recall_at_5 value: 90.28 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.494 - type: map_at_10 value: 36.491 - type: map_at_100 value: 38.550000000000004 - type: map_at_1000 value: 38.726 - type: map_at_3 value: 31.807000000000002 - type: map_at_5 value: 34.299 - type: mrr_at_1 value: 44.907000000000004 - type: mrr_at_10 value: 53.146 - type: mrr_at_100 value: 54.013999999999996 - type: mrr_at_1000 value: 54.044000000000004 - type: mrr_at_3 value: 50.952 - type: mrr_at_5 value: 52.124 - type: ndcg_at_1 value: 44.907000000000004 - type: ndcg_at_10 value: 44.499 - type: ndcg_at_100 value: 51.629000000000005 - type: ndcg_at_1000 value: 54.367 - type: ndcg_at_3 value: 40.900999999999996 - type: ndcg_at_5 value: 41.737 - type: precision_at_1 value: 44.907000000000004 - type: precision_at_10 value: 12.346 - type: precision_at_100 value: 1.974 - type: precision_at_1000 value: 0.246 - type: precision_at_3 value: 27.366 - type: precision_at_5 value: 19.846 - type: recall_at_1 value: 22.494 - type: recall_at_10 value: 51.156 - type: recall_at_100 value: 77.11200000000001 - type: recall_at_1000 value: 93.44 - type: recall_at_3 value: 36.574 - type: recall_at_5 value: 42.361 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 38.568999999999996 - type: map_at_10 value: 58.485 - type: map_at_100 value: 59.358999999999995 - type: map_at_1000 value: 59.429 - type: map_at_3 value: 55.217000000000006 - type: map_at_5 value: 57.236 - type: mrr_at_1 value: 77.137 - type: mrr_at_10 value: 82.829 - type: mrr_at_100 value: 83.04599999999999 - type: mrr_at_1000 value: 83.05399999999999 - type: mrr_at_3 value: 81.904 - type: mrr_at_5 value: 82.50800000000001 - type: ndcg_at_1 value: 77.137 - type: ndcg_at_10 value: 67.156 - type: ndcg_at_100 value: 70.298 - type: ndcg_at_1000 value: 71.65700000000001 - type: ndcg_at_3 value: 62.535 - type: ndcg_at_5 value: 65.095 - type: precision_at_1 value: 77.137 - type: precision_at_10 value: 13.911999999999999 - type: precision_at_100 value: 1.6389999999999998 - type: precision_at_1000 value: 0.182 - type: precision_at_3 value: 39.572 - type: precision_at_5 value: 25.766 - type: recall_at_1 value: 38.568999999999996 - type: recall_at_10 value: 69.56099999999999 - type: recall_at_100 value: 81.931 - type: recall_at_1000 value: 90.91799999999999 - type: recall_at_3 value: 59.358999999999995 - type: recall_at_5 value: 64.416 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 88.45600000000002 - type: ap value: 84.09725115338568 - type: f1 value: 88.41874909080512 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.404999999999998 - type: map_at_10 value: 33.921 - type: map_at_100 value: 35.116 - type: map_at_1000 value: 35.164 - type: map_at_3 value: 30.043999999999997 - type: map_at_5 value: 32.327 - type: mrr_at_1 value: 21.977 - type: mrr_at_10 value: 34.505 - type: mrr_at_100 value: 35.638999999999996 - type: mrr_at_1000 value: 35.68 - type: mrr_at_3 value: 30.703999999999997 - type: mrr_at_5 value: 32.96 - type: ndcg_at_1 value: 21.963 - type: ndcg_at_10 value: 40.859 - type: ndcg_at_100 value: 46.614 - type: ndcg_at_1000 value: 47.789 - type: ndcg_at_3 value: 33.007999999999996 - type: ndcg_at_5 value: 37.084 - type: precision_at_1 value: 21.963 - type: precision_at_10 value: 6.493 - type: precision_at_100 value: 0.938 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.155000000000001 - type: precision_at_5 value: 10.544 - type: recall_at_1 value: 21.404999999999998 - type: recall_at_10 value: 62.175000000000004 - type: recall_at_100 value: 88.786 - type: recall_at_1000 value: 97.738 - type: recall_at_3 value: 40.925 - type: recall_at_5 value: 50.722 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.50661194710442 - type: f1 value: 93.30311193153668 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 73.24669402644778 - type: f1 value: 54.23122108002977 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.61936785474109 - type: f1 value: 70.52644941025565 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.76529926025555 - type: f1 value: 77.26872729322514 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.39450293021839 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.757796879839294 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.62512146657428 - type: mrr value: 33.84624322066173 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.462 - type: map_at_10 value: 14.947 - type: map_at_100 value: 19.344 - type: map_at_1000 value: 20.933 - type: map_at_3 value: 10.761999999999999 - type: map_at_5 value: 12.744 - type: mrr_at_1 value: 47.988 - type: mrr_at_10 value: 57.365 - type: mrr_at_100 value: 57.931 - type: mrr_at_1000 value: 57.96 - type: mrr_at_3 value: 54.85 - type: mrr_at_5 value: 56.569 - type: ndcg_at_1 value: 46.129999999999995 - type: ndcg_at_10 value: 38.173 - type: ndcg_at_100 value: 35.983 - type: ndcg_at_1000 value: 44.507000000000005 - type: ndcg_at_3 value: 42.495 - type: ndcg_at_5 value: 41.019 - type: precision_at_1 value: 47.678 - type: precision_at_10 value: 28.731 - type: precision_at_100 value: 9.232 - type: precision_at_1000 value: 2.202 - type: precision_at_3 value: 39.628 - type: precision_at_5 value: 35.851 - type: recall_at_1 value: 6.462 - type: recall_at_10 value: 18.968 - type: recall_at_100 value: 37.131 - type: recall_at_1000 value: 67.956 - type: recall_at_3 value: 11.905000000000001 - type: recall_at_5 value: 15.097 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 30.335 - type: map_at_10 value: 46.611999999999995 - type: map_at_100 value: 47.632000000000005 - type: map_at_1000 value: 47.661 - type: map_at_3 value: 41.876999999999995 - type: map_at_5 value: 44.799 - type: mrr_at_1 value: 34.125 - type: mrr_at_10 value: 49.01 - type: mrr_at_100 value: 49.75 - type: mrr_at_1000 value: 49.768 - type: mrr_at_3 value: 45.153 - type: mrr_at_5 value: 47.589999999999996 - type: ndcg_at_1 value: 34.125 - type: ndcg_at_10 value: 54.777 - type: ndcg_at_100 value: 58.914 - type: ndcg_at_1000 value: 59.521 - type: ndcg_at_3 value: 46.015 - type: ndcg_at_5 value: 50.861000000000004 - type: precision_at_1 value: 34.125 - type: precision_at_10 value: 9.166 - type: precision_at_100 value: 1.149 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 21.147 - type: precision_at_5 value: 15.469 - type: recall_at_1 value: 30.335 - type: recall_at_10 value: 77.194 - type: recall_at_100 value: 94.812 - type: recall_at_1000 value: 99.247 - type: recall_at_3 value: 54.681000000000004 - type: recall_at_5 value: 65.86800000000001 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.62 - type: map_at_10 value: 84.536 - type: map_at_100 value: 85.167 - type: map_at_1000 value: 85.184 - type: map_at_3 value: 81.607 - type: map_at_5 value: 83.423 - type: mrr_at_1 value: 81.36 - type: mrr_at_10 value: 87.506 - type: mrr_at_100 value: 87.601 - type: mrr_at_1000 value: 87.601 - type: mrr_at_3 value: 86.503 - type: mrr_at_5 value: 87.179 - type: ndcg_at_1 value: 81.36 - type: ndcg_at_10 value: 88.319 - type: ndcg_at_100 value: 89.517 - type: ndcg_at_1000 value: 89.60900000000001 - type: ndcg_at_3 value: 85.423 - type: ndcg_at_5 value: 86.976 - type: precision_at_1 value: 81.36 - type: precision_at_10 value: 13.415 - type: precision_at_100 value: 1.529 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.342999999999996 - type: precision_at_5 value: 24.534 - type: recall_at_1 value: 70.62 - type: recall_at_10 value: 95.57600000000001 - type: recall_at_100 value: 99.624 - type: recall_at_1000 value: 99.991 - type: recall_at_3 value: 87.22 - type: recall_at_5 value: 91.654 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 60.826438478212744 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 64.24027467551447 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.997999999999999 - type: map_at_10 value: 14.267 - type: map_at_100 value: 16.843 - type: map_at_1000 value: 17.229 - type: map_at_3 value: 9.834 - type: map_at_5 value: 11.92 - type: mrr_at_1 value: 24.7 - type: mrr_at_10 value: 37.685 - type: mrr_at_100 value: 38.704 - type: mrr_at_1000 value: 38.747 - type: mrr_at_3 value: 34.150000000000006 - type: mrr_at_5 value: 36.075 - type: ndcg_at_1 value: 24.7 - type: ndcg_at_10 value: 23.44 - type: ndcg_at_100 value: 32.617000000000004 - type: ndcg_at_1000 value: 38.628 - type: ndcg_at_3 value: 21.747 - type: ndcg_at_5 value: 19.076 - type: precision_at_1 value: 24.7 - type: precision_at_10 value: 12.47 - type: precision_at_100 value: 2.564 - type: precision_at_1000 value: 0.4 - type: precision_at_3 value: 20.767 - type: precision_at_5 value: 17.06 - type: recall_at_1 value: 4.997999999999999 - type: recall_at_10 value: 25.3 - type: recall_at_100 value: 52.048 - type: recall_at_1000 value: 81.093 - type: recall_at_3 value: 12.642999999999999 - type: recall_at_5 value: 17.312 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 85.44942006292234 - type: cos_sim_spearman value: 79.80930790660699 - type: euclidean_pearson value: 82.93400777494863 - type: euclidean_spearman value: 80.04664991110705 - type: manhattan_pearson value: 82.93551681854949 - type: manhattan_spearman value: 80.03156736837379 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.63574059135726 - type: cos_sim_spearman value: 76.80552915288186 - type: euclidean_pearson value: 82.46368529820518 - type: euclidean_spearman value: 76.60338474719275 - type: manhattan_pearson value: 82.4558617035968 - type: manhattan_spearman value: 76.57936082895705 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 86.24116811084211 - type: cos_sim_spearman value: 88.10998662068769 - type: euclidean_pearson value: 87.04961732352689 - type: euclidean_spearman value: 88.12543945864087 - type: manhattan_pearson value: 86.9905224528854 - type: manhattan_spearman value: 88.07827944705546 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 84.74847296555048 - type: cos_sim_spearman value: 82.66200957916445 - type: euclidean_pearson value: 84.48132256004965 - type: euclidean_spearman value: 82.67915286000596 - type: manhattan_pearson value: 84.44950477268334 - type: manhattan_spearman value: 82.63327639173352 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.23056258027053 - type: cos_sim_spearman value: 88.92791680286955 - type: euclidean_pearson value: 88.13819235461933 - type: euclidean_spearman value: 88.87294661361716 - type: manhattan_pearson value: 88.14212133687899 - type: manhattan_spearman value: 88.88551854529777 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.64179522732887 - type: cos_sim_spearman value: 84.25028809903114 - type: euclidean_pearson value: 83.40175015236979 - type: euclidean_spearman value: 84.23369296429406 - type: manhattan_pearson value: 83.43768174261321 - type: manhattan_spearman value: 84.27855229214734 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.20378955494732 - type: cos_sim_spearman value: 88.46863559173111 - type: euclidean_pearson value: 88.8249295811663 - type: euclidean_spearman value: 88.6312737724905 - type: manhattan_pearson value: 88.87744466378827 - type: manhattan_spearman value: 88.82908423767314 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 69.91342028796086 - type: cos_sim_spearman value: 69.71495021867864 - type: euclidean_pearson value: 70.65334330405646 - type: euclidean_spearman value: 69.4321253472211 - type: manhattan_pearson value: 70.59743494727465 - type: manhattan_spearman value: 69.11695509297482 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.42451709766952 - type: cos_sim_spearman value: 86.07166710670508 - type: euclidean_pearson value: 86.12711421258899 - type: euclidean_spearman value: 86.05232086925126 - type: manhattan_pearson value: 86.15591089932126 - type: manhattan_spearman value: 86.0890128623439 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.1976344717285 - type: mrr value: 96.3703145075694 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 59.511 - type: map_at_10 value: 69.724 - type: map_at_100 value: 70.208 - type: map_at_1000 value: 70.22800000000001 - type: map_at_3 value: 66.986 - type: map_at_5 value: 68.529 - type: mrr_at_1 value: 62.333000000000006 - type: mrr_at_10 value: 70.55 - type: mrr_at_100 value: 70.985 - type: mrr_at_1000 value: 71.004 - type: mrr_at_3 value: 68.611 - type: mrr_at_5 value: 69.728 - type: ndcg_at_1 value: 62.333000000000006 - type: ndcg_at_10 value: 74.265 - type: ndcg_at_100 value: 76.361 - type: ndcg_at_1000 value: 76.82900000000001 - type: ndcg_at_3 value: 69.772 - type: ndcg_at_5 value: 71.94800000000001 - type: precision_at_1 value: 62.333000000000006 - type: precision_at_10 value: 9.9 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27.444000000000003 - type: precision_at_5 value: 18 - type: recall_at_1 value: 59.511 - type: recall_at_10 value: 87.156 - type: recall_at_100 value: 96.5 - type: recall_at_1000 value: 100 - type: recall_at_3 value: 75.2 - type: recall_at_5 value: 80.661 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.81683168316832 - type: cos_sim_ap value: 95.74716566563774 - type: cos_sim_f1 value: 90.64238745574103 - type: cos_sim_precision value: 91.7093142272262 - type: cos_sim_recall value: 89.60000000000001 - type: dot_accuracy value: 99.69405940594059 - type: dot_ap value: 91.09013507754594 - type: dot_f1 value: 84.54227113556779 - type: dot_precision value: 84.58458458458459 - type: dot_recall value: 84.5 - type: euclidean_accuracy value: 99.81782178217821 - type: euclidean_ap value: 95.6324301072609 - type: euclidean_f1 value: 90.58341862845445 - type: euclidean_precision value: 92.76729559748428 - type: euclidean_recall value: 88.5 - type: manhattan_accuracy value: 99.81980198019802 - type: manhattan_ap value: 95.68510494437183 - type: manhattan_f1 value: 90.58945191313342 - type: manhattan_precision value: 93.79014989293361 - type: manhattan_recall value: 87.6 - type: max_accuracy value: 99.81980198019802 - type: max_ap value: 95.74716566563774 - type: max_f1 value: 90.64238745574103 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 67.63761899427078 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.572473369697235 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 53.63000245208579 - type: mrr value: 54.504193722943725 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.300791939416545 - type: cos_sim_spearman value: 31.662904057924123 - type: dot_pearson value: 26.21198530758316 - type: dot_spearman value: 27.006921548904263 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.197 - type: map_at_10 value: 1.752 - type: map_at_100 value: 10.795 - type: map_at_1000 value: 27.18 - type: map_at_3 value: 0.5890000000000001 - type: map_at_5 value: 0.938 - type: mrr_at_1 value: 74 - type: mrr_at_10 value: 85.833 - type: mrr_at_100 value: 85.833 - type: mrr_at_1000 value: 85.833 - type: mrr_at_3 value: 85.333 - type: mrr_at_5 value: 85.833 - type: ndcg_at_1 value: 69 - type: ndcg_at_10 value: 70.22 - type: ndcg_at_100 value: 55.785 - type: ndcg_at_1000 value: 52.93600000000001 - type: ndcg_at_3 value: 72.084 - type: ndcg_at_5 value: 71.184 - type: precision_at_1 value: 74 - type: precision_at_10 value: 75.2 - type: precision_at_100 value: 57.3 - type: precision_at_1000 value: 23.302 - type: precision_at_3 value: 77.333 - type: precision_at_5 value: 75.6 - type: recall_at_1 value: 0.197 - type: recall_at_10 value: 2.019 - type: recall_at_100 value: 14.257 - type: recall_at_1000 value: 50.922 - type: recall_at_3 value: 0.642 - type: recall_at_5 value: 1.043 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.803 - type: map_at_10 value: 10.407 - type: map_at_100 value: 16.948 - type: map_at_1000 value: 18.424 - type: map_at_3 value: 5.405 - type: map_at_5 value: 6.908 - type: mrr_at_1 value: 36.735 - type: mrr_at_10 value: 50.221000000000004 - type: mrr_at_100 value: 51.388 - type: mrr_at_1000 value: 51.402 - type: mrr_at_3 value: 47.278999999999996 - type: mrr_at_5 value: 49.626 - type: ndcg_at_1 value: 34.694 - type: ndcg_at_10 value: 25.507 - type: ndcg_at_100 value: 38.296 - type: ndcg_at_1000 value: 49.492000000000004 - type: ndcg_at_3 value: 29.006999999999998 - type: ndcg_at_5 value: 25.979000000000003 - type: precision_at_1 value: 36.735 - type: precision_at_10 value: 22.041 - type: precision_at_100 value: 8.02 - type: precision_at_1000 value: 1.567 - type: precision_at_3 value: 28.571 - type: precision_at_5 value: 24.490000000000002 - type: recall_at_1 value: 2.803 - type: recall_at_10 value: 16.378 - type: recall_at_100 value: 50.489 - type: recall_at_1000 value: 85.013 - type: recall_at_3 value: 6.505 - type: recall_at_5 value: 9.243 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 70.55579999999999 - type: ap value: 14.206982753316227 - type: f1 value: 54.372142814964285 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 56.57611771363893 - type: f1 value: 56.924172639063144 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 52.82304915719759 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.92716218632653 - type: cos_sim_ap value: 73.73359122546046 - type: cos_sim_f1 value: 68.42559487116262 - type: cos_sim_precision value: 64.22124508215691 - type: cos_sim_recall value: 73.21899736147758 - type: dot_accuracy value: 80.38981939560112 - type: dot_ap value: 54.61060862444974 - type: dot_f1 value: 53.45710627400769 - type: dot_precision value: 44.87638839125761 - type: dot_recall value: 66.09498680738787 - type: euclidean_accuracy value: 86.02849138701794 - type: euclidean_ap value: 73.95673761922404 - type: euclidean_f1 value: 68.6783042394015 - type: euclidean_precision value: 65.1063829787234 - type: euclidean_recall value: 72.66490765171504 - type: manhattan_accuracy value: 85.9808070572808 - type: manhattan_ap value: 73.9050720058029 - type: manhattan_f1 value: 68.57560618983794 - type: manhattan_precision value: 63.70839936608558 - type: manhattan_recall value: 74.24802110817942 - type: max_accuracy value: 86.02849138701794 - type: max_ap value: 73.95673761922404 - type: max_f1 value: 68.6783042394015 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.72783017037295 - type: cos_sim_ap value: 85.52705223340233 - type: cos_sim_f1 value: 77.91659078492079 - type: cos_sim_precision value: 73.93378032764221 - type: cos_sim_recall value: 82.35294117647058 - type: dot_accuracy value: 85.41739434159972 - type: dot_ap value: 77.17734818118443 - type: dot_f1 value: 71.63473589973144 - type: dot_precision value: 66.96123719622415 - type: dot_recall value: 77.00954727440714 - type: euclidean_accuracy value: 88.68125897465751 - type: euclidean_ap value: 85.47712213906692 - type: euclidean_f1 value: 77.81419950830664 - type: euclidean_precision value: 75.37162649733006 - type: euclidean_recall value: 80.42038805050817 - type: manhattan_accuracy value: 88.67349710870494 - type: manhattan_ap value: 85.46506475241955 - type: manhattan_f1 value: 77.87259084890393 - type: manhattan_precision value: 74.54929577464789 - type: manhattan_recall value: 81.50600554357868 - type: max_accuracy value: 88.72783017037295 - type: max_ap value: 85.52705223340233 - type: max_f1 value: 77.91659078492079 --- # gte-large General Text Embeddings (GTE) model. [Towards General Text Embeddings with Multi-stage Contrastive Learning](https://arxiv.org/abs/2308.03281) The GTE models are trained by Alibaba DAMO Academy. They are mainly based on the BERT framework and currently offer three different sizes of models, including [GTE-large](https://huggingface.co/thenlper/gte-large), [GTE-base](https://huggingface.co/thenlper/gte-base), and [GTE-small](https://huggingface.co/thenlper/gte-small). The GTE models are trained on a large-scale corpus of relevance text pairs, covering a wide range of domains and scenarios. This enables the GTE models to be applied to various downstream tasks of text embeddings, including **information retrieval**, **semantic textual similarity**, **text reranking**, etc. ## Metrics We compared the performance of the GTE models with other popular text embedding models on the MTEB benchmark. For more detailed comparison results, please refer to the [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). | Model Name | Model Size (GB) | Dimension | Sequence Length | Average (56) | Clustering (11) | Pair Classification (3) | Reranking (4) | Retrieval (15) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [**gte-large**](https://huggingface.co/thenlper/gte-large) | 0.67 | 1024 | 512 | **63.13** | 46.84 | 85.00 | 59.13 | 52.22 | 83.35 | 31.66 | 73.33 | | [**gte-base**](https://huggingface.co/thenlper/gte-base) | 0.22 | 768 | 512 | **62.39** | 46.2 | 84.57 | 58.61 | 51.14 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1.34 | 1024| 512 | 62.25 | 44.49 | 86.03 | 56.61 | 50.56 | 82.05 | 30.19 | 75.24 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.44 | 768 | 512 | 61.5 | 43.80 | 85.73 | 55.91 | 50.29 | 81.05 | 30.28 | 73.84 | | [**gte-small**](https://huggingface.co/thenlper/gte-small) | 0.07 | 384 | 512 | **61.36** | 44.89 | 83.54 | 57.7 | 49.46 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | - | 1536 | 8192 | 60.99 | 45.9 | 84.89 | 56.32 | 49.25 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.13 | 384 | 512 | 59.93 | 39.92 | 84.67 | 54.32 | 49.04 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 9.73 | 768 | 512 | 59.51 | 43.72 | 85.06 | 56.42 | 42.24 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 0.44 | 768 | 514 | 57.78 | 43.69 | 83.04 | 59.36 | 43.81 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 28.27 | 4096 | 2048 | 57.59 | 38.93 | 81.9 | 55.65 | 48.22 | 77.74 | 33.6 | 66.19 | | [all-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2) | 0.13 | 384 | 512 | 56.53 | 41.81 | 82.41 | 58.44 | 42.69 | 79.8 | 27.9 | 63.21 | | [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | 0.09 | 384 | 512 | 56.26 | 42.35 | 82.37 | 58.04 | 41.95 | 78.9 | 30.81 | 63.05 | | [contriever-base-msmarco](https://huggingface.co/nthakur/contriever-base-msmarco) | 0.44 | 768 | 512 | 56.00 | 41.1 | 82.54 | 53.14 | 41.88 | 76.51 | 30.36 | 66.68 | | [sentence-t5-base](https://huggingface.co/sentence-transformers/sentence-t5-base) | 0.22 | 768 | 512 | 55.27 | 40.21 | 85.18 | 53.09 | 33.63 | 81.14 | 31.39 | 69.81 | ## Usage Code example ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] tokenizer = AutoTokenizer.from_pretrained("thenlper/gte-large") model = AutoModel.from_pretrained("thenlper/gte-large") # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # (Optionally) normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) ``` Use with sentence-transformers: ```python from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim sentences = ['That is a happy person', 'That is a very happy person'] model = SentenceTransformer('thenlper/gte-large') embeddings = model.encode(sentences) print(cos_sim(embeddings[0], embeddings[1])) ``` ### Limitation This model exclusively caters to English texts, and any lengthy texts will be truncated to a maximum of 512 tokens. ### Citation If you find our paper or models helpful, please consider citing them as follows: ``` @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
thenlper/gte-base
thenlper
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "openvino", "bert", "mteb", "sentence-similarity", "Sentence Transformers", "en", "arxiv:2308.03281", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-07-27T03:21:20Z"
2024-11-16T08:09:34+00:00
424,333
113
--- language: - en license: mit tags: - mteb - sentence-similarity - sentence-transformers - Sentence Transformers model-index: - name: gte-base results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 74.17910447761193 - type: ap value: 36.827146398068926 - type: f1 value: 68.11292888046363 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.77345000000001 - type: ap value: 88.33530426691347 - type: f1 value: 91.76549906404642 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.964 - type: f1 value: 48.22995586184998 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 32.147999999999996 - type: map_at_10 value: 48.253 - type: map_at_100 value: 49.038 - type: map_at_1000 value: 49.042 - type: map_at_3 value: 43.433 - type: map_at_5 value: 46.182 - type: mrr_at_1 value: 32.717 - type: mrr_at_10 value: 48.467 - type: mrr_at_100 value: 49.252 - type: mrr_at_1000 value: 49.254999999999995 - type: mrr_at_3 value: 43.599 - type: mrr_at_5 value: 46.408 - type: ndcg_at_1 value: 32.147999999999996 - type: ndcg_at_10 value: 57.12199999999999 - type: ndcg_at_100 value: 60.316 - type: ndcg_at_1000 value: 60.402 - type: ndcg_at_3 value: 47.178 - type: ndcg_at_5 value: 52.146 - type: precision_at_1 value: 32.147999999999996 - type: precision_at_10 value: 8.542 - type: precision_at_100 value: 0.9900000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 19.346 - type: precision_at_5 value: 14.026 - type: recall_at_1 value: 32.147999999999996 - type: recall_at_10 value: 85.42 - type: recall_at_100 value: 99.004 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 58.037000000000006 - type: recall_at_5 value: 70.128 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.59706013699614 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 43.01463593002057 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.80250355752458 - type: mrr value: 74.79455216989844 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.87448576082345 - type: cos_sim_spearman value: 87.64235843637468 - type: euclidean_pearson value: 88.4901825511062 - type: euclidean_spearman value: 87.74537283182033 - type: manhattan_pearson value: 88.39040638362911 - type: manhattan_spearman value: 87.62669542888003 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.06818181818183 - type: f1 value: 85.02524460098233 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 38.20471092679967 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.58967592147641 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.411 - type: map_at_10 value: 45.162 - type: map_at_100 value: 46.717 - type: map_at_1000 value: 46.836 - type: map_at_3 value: 41.428 - type: map_at_5 value: 43.54 - type: mrr_at_1 value: 39.914 - type: mrr_at_10 value: 51.534 - type: mrr_at_100 value: 52.185 - type: mrr_at_1000 value: 52.22 - type: mrr_at_3 value: 49.046 - type: mrr_at_5 value: 50.548 - type: ndcg_at_1 value: 39.914 - type: ndcg_at_10 value: 52.235 - type: ndcg_at_100 value: 57.4 - type: ndcg_at_1000 value: 58.982 - type: ndcg_at_3 value: 47.332 - type: ndcg_at_5 value: 49.62 - type: precision_at_1 value: 39.914 - type: precision_at_10 value: 10.258000000000001 - type: precision_at_100 value: 1.6219999999999999 - type: precision_at_1000 value: 0.20500000000000002 - type: precision_at_3 value: 23.462 - type: precision_at_5 value: 16.71 - type: recall_at_1 value: 32.411 - type: recall_at_10 value: 65.408 - type: recall_at_100 value: 87.248 - type: recall_at_1000 value: 96.951 - type: recall_at_3 value: 50.349999999999994 - type: recall_at_5 value: 57.431 - type: map_at_1 value: 31.911 - type: map_at_10 value: 42.608000000000004 - type: map_at_100 value: 43.948 - type: map_at_1000 value: 44.089 - type: map_at_3 value: 39.652 - type: map_at_5 value: 41.236 - type: mrr_at_1 value: 40.064 - type: mrr_at_10 value: 48.916 - type: mrr_at_100 value: 49.539 - type: mrr_at_1000 value: 49.583 - type: mrr_at_3 value: 46.741 - type: mrr_at_5 value: 48.037 - type: ndcg_at_1 value: 40.064 - type: ndcg_at_10 value: 48.442 - type: ndcg_at_100 value: 52.798 - type: ndcg_at_1000 value: 54.871 - type: ndcg_at_3 value: 44.528 - type: ndcg_at_5 value: 46.211 - type: precision_at_1 value: 40.064 - type: precision_at_10 value: 9.178 - type: precision_at_100 value: 1.452 - type: precision_at_1000 value: 0.193 - type: precision_at_3 value: 21.614 - type: precision_at_5 value: 15.185 - type: recall_at_1 value: 31.911 - type: recall_at_10 value: 58.155 - type: recall_at_100 value: 76.46300000000001 - type: recall_at_1000 value: 89.622 - type: recall_at_3 value: 46.195 - type: recall_at_5 value: 51.288999999999994 - type: map_at_1 value: 40.597 - type: map_at_10 value: 54.290000000000006 - type: map_at_100 value: 55.340999999999994 - type: map_at_1000 value: 55.388999999999996 - type: map_at_3 value: 50.931000000000004 - type: map_at_5 value: 52.839999999999996 - type: mrr_at_1 value: 46.646 - type: mrr_at_10 value: 57.524 - type: mrr_at_100 value: 58.225 - type: mrr_at_1000 value: 58.245999999999995 - type: mrr_at_3 value: 55.235 - type: mrr_at_5 value: 56.589 - type: ndcg_at_1 value: 46.646 - type: ndcg_at_10 value: 60.324999999999996 - type: ndcg_at_100 value: 64.30900000000001 - type: ndcg_at_1000 value: 65.19 - type: ndcg_at_3 value: 54.983000000000004 - type: ndcg_at_5 value: 57.621 - type: precision_at_1 value: 46.646 - type: precision_at_10 value: 9.774 - type: precision_at_100 value: 1.265 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 24.911 - type: precision_at_5 value: 16.977999999999998 - type: recall_at_1 value: 40.597 - type: recall_at_10 value: 74.773 - type: recall_at_100 value: 91.61200000000001 - type: recall_at_1000 value: 97.726 - type: recall_at_3 value: 60.458 - type: recall_at_5 value: 66.956 - type: map_at_1 value: 27.122 - type: map_at_10 value: 36.711 - type: map_at_100 value: 37.775 - type: map_at_1000 value: 37.842999999999996 - type: map_at_3 value: 33.693 - type: map_at_5 value: 35.607 - type: mrr_at_1 value: 29.153000000000002 - type: mrr_at_10 value: 38.873999999999995 - type: mrr_at_100 value: 39.739000000000004 - type: mrr_at_1000 value: 39.794000000000004 - type: mrr_at_3 value: 36.102000000000004 - type: mrr_at_5 value: 37.876 - type: ndcg_at_1 value: 29.153000000000002 - type: ndcg_at_10 value: 42.048 - type: ndcg_at_100 value: 47.144999999999996 - type: ndcg_at_1000 value: 48.901 - type: ndcg_at_3 value: 36.402 - type: ndcg_at_5 value: 39.562999999999995 - type: precision_at_1 value: 29.153000000000002 - type: precision_at_10 value: 6.4750000000000005 - type: precision_at_100 value: 0.951 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 15.479999999999999 - type: precision_at_5 value: 11.028 - type: recall_at_1 value: 27.122 - type: recall_at_10 value: 56.279999999999994 - type: recall_at_100 value: 79.597 - type: recall_at_1000 value: 92.804 - type: recall_at_3 value: 41.437000000000005 - type: recall_at_5 value: 49.019 - type: map_at_1 value: 17.757 - type: map_at_10 value: 26.739 - type: map_at_100 value: 28.015 - type: map_at_1000 value: 28.127999999999997 - type: map_at_3 value: 23.986 - type: map_at_5 value: 25.514 - type: mrr_at_1 value: 22.015 - type: mrr_at_10 value: 31.325999999999997 - type: mrr_at_100 value: 32.368 - type: mrr_at_1000 value: 32.426 - type: mrr_at_3 value: 28.897000000000002 - type: mrr_at_5 value: 30.147000000000002 - type: ndcg_at_1 value: 22.015 - type: ndcg_at_10 value: 32.225 - type: ndcg_at_100 value: 38.405 - type: ndcg_at_1000 value: 40.932 - type: ndcg_at_3 value: 27.403 - type: ndcg_at_5 value: 29.587000000000003 - type: precision_at_1 value: 22.015 - type: precision_at_10 value: 5.9830000000000005 - type: precision_at_100 value: 1.051 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 13.391 - type: precision_at_5 value: 9.602 - type: recall_at_1 value: 17.757 - type: recall_at_10 value: 44.467 - type: recall_at_100 value: 71.53699999999999 - type: recall_at_1000 value: 89.281 - type: recall_at_3 value: 31.095 - type: recall_at_5 value: 36.818 - type: map_at_1 value: 30.354 - type: map_at_10 value: 42.134 - type: map_at_100 value: 43.429 - type: map_at_1000 value: 43.532 - type: map_at_3 value: 38.491 - type: map_at_5 value: 40.736 - type: mrr_at_1 value: 37.247 - type: mrr_at_10 value: 47.775 - type: mrr_at_100 value: 48.522999999999996 - type: mrr_at_1000 value: 48.567 - type: mrr_at_3 value: 45.059 - type: mrr_at_5 value: 46.811 - type: ndcg_at_1 value: 37.247 - type: ndcg_at_10 value: 48.609 - type: ndcg_at_100 value: 53.782 - type: ndcg_at_1000 value: 55.666000000000004 - type: ndcg_at_3 value: 42.866 - type: ndcg_at_5 value: 46.001 - type: precision_at_1 value: 37.247 - type: precision_at_10 value: 8.892999999999999 - type: precision_at_100 value: 1.341 - type: precision_at_1000 value: 0.168 - type: precision_at_3 value: 20.5 - type: precision_at_5 value: 14.976 - type: recall_at_1 value: 30.354 - type: recall_at_10 value: 62.273 - type: recall_at_100 value: 83.65599999999999 - type: recall_at_1000 value: 95.82000000000001 - type: recall_at_3 value: 46.464 - type: recall_at_5 value: 54.225 - type: map_at_1 value: 26.949 - type: map_at_10 value: 37.230000000000004 - type: map_at_100 value: 38.644 - type: map_at_1000 value: 38.751999999999995 - type: map_at_3 value: 33.816 - type: map_at_5 value: 35.817 - type: mrr_at_1 value: 33.446999999999996 - type: mrr_at_10 value: 42.970000000000006 - type: mrr_at_100 value: 43.873 - type: mrr_at_1000 value: 43.922 - type: mrr_at_3 value: 40.467999999999996 - type: mrr_at_5 value: 41.861 - type: ndcg_at_1 value: 33.446999999999996 - type: ndcg_at_10 value: 43.403000000000006 - type: ndcg_at_100 value: 49.247 - type: ndcg_at_1000 value: 51.361999999999995 - type: ndcg_at_3 value: 38.155 - type: ndcg_at_5 value: 40.643 - type: precision_at_1 value: 33.446999999999996 - type: precision_at_10 value: 8.128 - type: precision_at_100 value: 1.274 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 18.493000000000002 - type: precision_at_5 value: 13.333 - type: recall_at_1 value: 26.949 - type: recall_at_10 value: 56.006 - type: recall_at_100 value: 80.99199999999999 - type: recall_at_1000 value: 95.074 - type: recall_at_3 value: 40.809 - type: recall_at_5 value: 47.57 - type: map_at_1 value: 27.243583333333333 - type: map_at_10 value: 37.193250000000006 - type: map_at_100 value: 38.44833333333334 - type: map_at_1000 value: 38.56083333333333 - type: map_at_3 value: 34.06633333333333 - type: map_at_5 value: 35.87858333333334 - type: mrr_at_1 value: 32.291583333333335 - type: mrr_at_10 value: 41.482749999999996 - type: mrr_at_100 value: 42.33583333333333 - type: mrr_at_1000 value: 42.38683333333333 - type: mrr_at_3 value: 38.952999999999996 - type: mrr_at_5 value: 40.45333333333333 - type: ndcg_at_1 value: 32.291583333333335 - type: ndcg_at_10 value: 42.90533333333334 - type: ndcg_at_100 value: 48.138666666666666 - type: ndcg_at_1000 value: 50.229083333333335 - type: ndcg_at_3 value: 37.76133333333334 - type: ndcg_at_5 value: 40.31033333333334 - type: precision_at_1 value: 32.291583333333335 - type: precision_at_10 value: 7.585583333333333 - type: precision_at_100 value: 1.2045000000000001 - type: precision_at_1000 value: 0.15733333333333335 - type: precision_at_3 value: 17.485416666666666 - type: precision_at_5 value: 12.5145 - type: recall_at_1 value: 27.243583333333333 - type: recall_at_10 value: 55.45108333333334 - type: recall_at_100 value: 78.25858333333335 - type: recall_at_1000 value: 92.61716666666665 - type: recall_at_3 value: 41.130583333333334 - type: recall_at_5 value: 47.73133333333334 - type: map_at_1 value: 26.325 - type: map_at_10 value: 32.795 - type: map_at_100 value: 33.96 - type: map_at_1000 value: 34.054 - type: map_at_3 value: 30.64 - type: map_at_5 value: 31.771 - type: mrr_at_1 value: 29.908 - type: mrr_at_10 value: 35.83 - type: mrr_at_100 value: 36.868 - type: mrr_at_1000 value: 36.928 - type: mrr_at_3 value: 33.896 - type: mrr_at_5 value: 34.893 - type: ndcg_at_1 value: 29.908 - type: ndcg_at_10 value: 36.746 - type: ndcg_at_100 value: 42.225 - type: ndcg_at_1000 value: 44.523 - type: ndcg_at_3 value: 32.82 - type: ndcg_at_5 value: 34.583000000000006 - type: precision_at_1 value: 29.908 - type: precision_at_10 value: 5.6129999999999995 - type: precision_at_100 value: 0.9079999999999999 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 13.753000000000002 - type: precision_at_5 value: 9.417 - type: recall_at_1 value: 26.325 - type: recall_at_10 value: 45.975 - type: recall_at_100 value: 70.393 - type: recall_at_1000 value: 87.217 - type: recall_at_3 value: 35.195 - type: recall_at_5 value: 39.69 - type: map_at_1 value: 17.828 - type: map_at_10 value: 25.759 - type: map_at_100 value: 26.961000000000002 - type: map_at_1000 value: 27.094 - type: map_at_3 value: 23.166999999999998 - type: map_at_5 value: 24.610000000000003 - type: mrr_at_1 value: 21.61 - type: mrr_at_10 value: 29.605999999999998 - type: mrr_at_100 value: 30.586000000000002 - type: mrr_at_1000 value: 30.664 - type: mrr_at_3 value: 27.214 - type: mrr_at_5 value: 28.571 - type: ndcg_at_1 value: 21.61 - type: ndcg_at_10 value: 30.740000000000002 - type: ndcg_at_100 value: 36.332 - type: ndcg_at_1000 value: 39.296 - type: ndcg_at_3 value: 26.11 - type: ndcg_at_5 value: 28.297 - type: precision_at_1 value: 21.61 - type: precision_at_10 value: 5.643 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.14400000000000002 - type: precision_at_3 value: 12.4 - type: precision_at_5 value: 9.119 - type: recall_at_1 value: 17.828 - type: recall_at_10 value: 41.876000000000005 - type: recall_at_100 value: 66.648 - type: recall_at_1000 value: 87.763 - type: recall_at_3 value: 28.957 - type: recall_at_5 value: 34.494 - type: map_at_1 value: 27.921000000000003 - type: map_at_10 value: 37.156 - type: map_at_100 value: 38.399 - type: map_at_1000 value: 38.498 - type: map_at_3 value: 34.134 - type: map_at_5 value: 35.936 - type: mrr_at_1 value: 32.649 - type: mrr_at_10 value: 41.19 - type: mrr_at_100 value: 42.102000000000004 - type: mrr_at_1000 value: 42.157 - type: mrr_at_3 value: 38.464 - type: mrr_at_5 value: 40.148 - type: ndcg_at_1 value: 32.649 - type: ndcg_at_10 value: 42.679 - type: ndcg_at_100 value: 48.27 - type: ndcg_at_1000 value: 50.312 - type: ndcg_at_3 value: 37.269000000000005 - type: ndcg_at_5 value: 40.055 - type: precision_at_1 value: 32.649 - type: precision_at_10 value: 7.155 - type: precision_at_100 value: 1.124 - type: precision_at_1000 value: 0.14100000000000001 - type: precision_at_3 value: 16.791 - type: precision_at_5 value: 12.015 - type: recall_at_1 value: 27.921000000000003 - type: recall_at_10 value: 55.357 - type: recall_at_100 value: 79.476 - type: recall_at_1000 value: 93.314 - type: recall_at_3 value: 40.891 - type: recall_at_5 value: 47.851 - type: map_at_1 value: 25.524 - type: map_at_10 value: 35.135 - type: map_at_100 value: 36.665 - type: map_at_1000 value: 36.886 - type: map_at_3 value: 31.367 - type: map_at_5 value: 33.724 - type: mrr_at_1 value: 30.631999999999998 - type: mrr_at_10 value: 39.616 - type: mrr_at_100 value: 40.54 - type: mrr_at_1000 value: 40.585 - type: mrr_at_3 value: 36.462 - type: mrr_at_5 value: 38.507999999999996 - type: ndcg_at_1 value: 30.631999999999998 - type: ndcg_at_10 value: 41.61 - type: ndcg_at_100 value: 47.249 - type: ndcg_at_1000 value: 49.662 - type: ndcg_at_3 value: 35.421 - type: ndcg_at_5 value: 38.811 - type: precision_at_1 value: 30.631999999999998 - type: precision_at_10 value: 8.123 - type: precision_at_100 value: 1.5810000000000002 - type: precision_at_1000 value: 0.245 - type: precision_at_3 value: 16.337 - type: precision_at_5 value: 12.568999999999999 - type: recall_at_1 value: 25.524 - type: recall_at_10 value: 54.994 - type: recall_at_100 value: 80.03099999999999 - type: recall_at_1000 value: 95.25099999999999 - type: recall_at_3 value: 37.563 - type: recall_at_5 value: 46.428999999999995 - type: map_at_1 value: 22.224 - type: map_at_10 value: 30.599999999999998 - type: map_at_100 value: 31.526 - type: map_at_1000 value: 31.629 - type: map_at_3 value: 27.491 - type: map_at_5 value: 29.212 - type: mrr_at_1 value: 24.214 - type: mrr_at_10 value: 32.632 - type: mrr_at_100 value: 33.482 - type: mrr_at_1000 value: 33.550000000000004 - type: mrr_at_3 value: 29.852 - type: mrr_at_5 value: 31.451 - type: ndcg_at_1 value: 24.214 - type: ndcg_at_10 value: 35.802 - type: ndcg_at_100 value: 40.502 - type: ndcg_at_1000 value: 43.052 - type: ndcg_at_3 value: 29.847 - type: ndcg_at_5 value: 32.732 - type: precision_at_1 value: 24.214 - type: precision_at_10 value: 5.804 - type: precision_at_100 value: 0.885 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 12.692999999999998 - type: precision_at_5 value: 9.242 - type: recall_at_1 value: 22.224 - type: recall_at_10 value: 49.849 - type: recall_at_100 value: 71.45 - type: recall_at_1000 value: 90.583 - type: recall_at_3 value: 34.153 - type: recall_at_5 value: 41.004000000000005 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 12.386999999999999 - type: map_at_10 value: 20.182 - type: map_at_100 value: 21.86 - type: map_at_1000 value: 22.054000000000002 - type: map_at_3 value: 17.165 - type: map_at_5 value: 18.643 - type: mrr_at_1 value: 26.906000000000002 - type: mrr_at_10 value: 37.907999999999994 - type: mrr_at_100 value: 38.868 - type: mrr_at_1000 value: 38.913 - type: mrr_at_3 value: 34.853 - type: mrr_at_5 value: 36.567 - type: ndcg_at_1 value: 26.906000000000002 - type: ndcg_at_10 value: 28.103 - type: ndcg_at_100 value: 35.073 - type: ndcg_at_1000 value: 38.653 - type: ndcg_at_3 value: 23.345 - type: ndcg_at_5 value: 24.828 - type: precision_at_1 value: 26.906000000000002 - type: precision_at_10 value: 8.547 - type: precision_at_100 value: 1.617 - type: precision_at_1000 value: 0.22799999999999998 - type: precision_at_3 value: 17.025000000000002 - type: precision_at_5 value: 12.834000000000001 - type: recall_at_1 value: 12.386999999999999 - type: recall_at_10 value: 33.306999999999995 - type: recall_at_100 value: 57.516 - type: recall_at_1000 value: 77.74799999999999 - type: recall_at_3 value: 21.433 - type: recall_at_5 value: 25.915 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.322 - type: map_at_10 value: 20.469 - type: map_at_100 value: 28.638 - type: map_at_1000 value: 30.433 - type: map_at_3 value: 14.802000000000001 - type: map_at_5 value: 17.297 - type: mrr_at_1 value: 68.75 - type: mrr_at_10 value: 76.29599999999999 - type: mrr_at_100 value: 76.62400000000001 - type: mrr_at_1000 value: 76.633 - type: mrr_at_3 value: 75.083 - type: mrr_at_5 value: 75.771 - type: ndcg_at_1 value: 54.87499999999999 - type: ndcg_at_10 value: 41.185 - type: ndcg_at_100 value: 46.400000000000006 - type: ndcg_at_1000 value: 54.223 - type: ndcg_at_3 value: 45.489000000000004 - type: ndcg_at_5 value: 43.161 - type: precision_at_1 value: 68.75 - type: precision_at_10 value: 32.300000000000004 - type: precision_at_100 value: 10.607999999999999 - type: precision_at_1000 value: 2.237 - type: precision_at_3 value: 49.083 - type: precision_at_5 value: 41.6 - type: recall_at_1 value: 9.322 - type: recall_at_10 value: 25.696 - type: recall_at_100 value: 52.898 - type: recall_at_1000 value: 77.281 - type: recall_at_3 value: 15.943 - type: recall_at_5 value: 19.836000000000002 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 48.650000000000006 - type: f1 value: 43.528467245539396 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 66.56 - type: map_at_10 value: 76.767 - type: map_at_100 value: 77.054 - type: map_at_1000 value: 77.068 - type: map_at_3 value: 75.29299999999999 - type: map_at_5 value: 76.24 - type: mrr_at_1 value: 71.842 - type: mrr_at_10 value: 81.459 - type: mrr_at_100 value: 81.58800000000001 - type: mrr_at_1000 value: 81.59100000000001 - type: mrr_at_3 value: 80.188 - type: mrr_at_5 value: 81.038 - type: ndcg_at_1 value: 71.842 - type: ndcg_at_10 value: 81.51899999999999 - type: ndcg_at_100 value: 82.544 - type: ndcg_at_1000 value: 82.829 - type: ndcg_at_3 value: 78.92 - type: ndcg_at_5 value: 80.406 - type: precision_at_1 value: 71.842 - type: precision_at_10 value: 10.066 - type: precision_at_100 value: 1.076 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 30.703000000000003 - type: precision_at_5 value: 19.301 - type: recall_at_1 value: 66.56 - type: recall_at_10 value: 91.55 - type: recall_at_100 value: 95.67099999999999 - type: recall_at_1000 value: 97.539 - type: recall_at_3 value: 84.46900000000001 - type: recall_at_5 value: 88.201 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 20.087 - type: map_at_10 value: 32.830999999999996 - type: map_at_100 value: 34.814 - type: map_at_1000 value: 34.999 - type: map_at_3 value: 28.198 - type: map_at_5 value: 30.779 - type: mrr_at_1 value: 38.889 - type: mrr_at_10 value: 48.415 - type: mrr_at_100 value: 49.187 - type: mrr_at_1000 value: 49.226 - type: mrr_at_3 value: 45.705 - type: mrr_at_5 value: 47.225 - type: ndcg_at_1 value: 38.889 - type: ndcg_at_10 value: 40.758 - type: ndcg_at_100 value: 47.671 - type: ndcg_at_1000 value: 50.744 - type: ndcg_at_3 value: 36.296 - type: ndcg_at_5 value: 37.852999999999994 - type: precision_at_1 value: 38.889 - type: precision_at_10 value: 11.466 - type: precision_at_100 value: 1.8499999999999999 - type: precision_at_1000 value: 0.24 - type: precision_at_3 value: 24.126 - type: precision_at_5 value: 18.21 - type: recall_at_1 value: 20.087 - type: recall_at_10 value: 48.042 - type: recall_at_100 value: 73.493 - type: recall_at_1000 value: 91.851 - type: recall_at_3 value: 32.694 - type: recall_at_5 value: 39.099000000000004 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 38.096000000000004 - type: map_at_10 value: 56.99999999999999 - type: map_at_100 value: 57.914 - type: map_at_1000 value: 57.984 - type: map_at_3 value: 53.900999999999996 - type: map_at_5 value: 55.827000000000005 - type: mrr_at_1 value: 76.19200000000001 - type: mrr_at_10 value: 81.955 - type: mrr_at_100 value: 82.164 - type: mrr_at_1000 value: 82.173 - type: mrr_at_3 value: 80.963 - type: mrr_at_5 value: 81.574 - type: ndcg_at_1 value: 76.19200000000001 - type: ndcg_at_10 value: 65.75 - type: ndcg_at_100 value: 68.949 - type: ndcg_at_1000 value: 70.342 - type: ndcg_at_3 value: 61.29 - type: ndcg_at_5 value: 63.747 - type: precision_at_1 value: 76.19200000000001 - type: precision_at_10 value: 13.571 - type: precision_at_100 value: 1.6070000000000002 - type: precision_at_1000 value: 0.179 - type: precision_at_3 value: 38.663 - type: precision_at_5 value: 25.136999999999997 - type: recall_at_1 value: 38.096000000000004 - type: recall_at_10 value: 67.853 - type: recall_at_100 value: 80.365 - type: recall_at_1000 value: 89.629 - type: recall_at_3 value: 57.995 - type: recall_at_5 value: 62.843 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 85.95200000000001 - type: ap value: 80.73847277002109 - type: f1 value: 85.92406135678594 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 20.916999999999998 - type: map_at_10 value: 33.23 - type: map_at_100 value: 34.427 - type: map_at_1000 value: 34.477000000000004 - type: map_at_3 value: 29.292 - type: map_at_5 value: 31.6 - type: mrr_at_1 value: 21.547 - type: mrr_at_10 value: 33.839999999999996 - type: mrr_at_100 value: 34.979 - type: mrr_at_1000 value: 35.022999999999996 - type: mrr_at_3 value: 29.988 - type: mrr_at_5 value: 32.259 - type: ndcg_at_1 value: 21.519 - type: ndcg_at_10 value: 40.209 - type: ndcg_at_100 value: 45.954 - type: ndcg_at_1000 value: 47.187 - type: ndcg_at_3 value: 32.227 - type: ndcg_at_5 value: 36.347 - type: precision_at_1 value: 21.519 - type: precision_at_10 value: 6.447 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.877999999999998 - type: precision_at_5 value: 10.404 - type: recall_at_1 value: 20.916999999999998 - type: recall_at_10 value: 61.7 - type: recall_at_100 value: 88.202 - type: recall_at_1000 value: 97.588 - type: recall_at_3 value: 40.044999999999995 - type: recall_at_5 value: 49.964999999999996 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.02781577747379 - type: f1 value: 92.83653922768306 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 72.04286365709075 - type: f1 value: 53.43867658525793 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.47276395427035 - type: f1 value: 69.77017399597342 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.3819771351715 - type: f1 value: 76.8484533435409 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.16515993299593 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.77145323314774 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.53637706586391 - type: mrr value: 33.7312926288863 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 7.063999999999999 - type: map_at_10 value: 15.046999999999999 - type: map_at_100 value: 19.116 - type: map_at_1000 value: 20.702 - type: map_at_3 value: 10.932 - type: map_at_5 value: 12.751999999999999 - type: mrr_at_1 value: 50.464 - type: mrr_at_10 value: 58.189 - type: mrr_at_100 value: 58.733999999999995 - type: mrr_at_1000 value: 58.769000000000005 - type: mrr_at_3 value: 56.24400000000001 - type: mrr_at_5 value: 57.68299999999999 - type: ndcg_at_1 value: 48.142 - type: ndcg_at_10 value: 37.897 - type: ndcg_at_100 value: 35.264 - type: ndcg_at_1000 value: 44.033 - type: ndcg_at_3 value: 42.967 - type: ndcg_at_5 value: 40.815 - type: precision_at_1 value: 50.15500000000001 - type: precision_at_10 value: 28.235 - type: precision_at_100 value: 8.994 - type: precision_at_1000 value: 2.218 - type: precision_at_3 value: 40.041 - type: precision_at_5 value: 35.046 - type: recall_at_1 value: 7.063999999999999 - type: recall_at_10 value: 18.598 - type: recall_at_100 value: 35.577999999999996 - type: recall_at_1000 value: 67.43 - type: recall_at_3 value: 11.562999999999999 - type: recall_at_5 value: 14.771 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 29.046 - type: map_at_10 value: 44.808 - type: map_at_100 value: 45.898 - type: map_at_1000 value: 45.927 - type: map_at_3 value: 40.19 - type: map_at_5 value: 42.897 - type: mrr_at_1 value: 32.706 - type: mrr_at_10 value: 47.275 - type: mrr_at_100 value: 48.075 - type: mrr_at_1000 value: 48.095 - type: mrr_at_3 value: 43.463 - type: mrr_at_5 value: 45.741 - type: ndcg_at_1 value: 32.706 - type: ndcg_at_10 value: 52.835 - type: ndcg_at_100 value: 57.345 - type: ndcg_at_1000 value: 57.985 - type: ndcg_at_3 value: 44.171 - type: ndcg_at_5 value: 48.661 - type: precision_at_1 value: 32.706 - type: precision_at_10 value: 8.895999999999999 - type: precision_at_100 value: 1.143 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 20.238999999999997 - type: precision_at_5 value: 14.728 - type: recall_at_1 value: 29.046 - type: recall_at_10 value: 74.831 - type: recall_at_100 value: 94.192 - type: recall_at_1000 value: 98.897 - type: recall_at_3 value: 52.37500000000001 - type: recall_at_5 value: 62.732 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.38799999999999 - type: map_at_10 value: 84.315 - type: map_at_100 value: 84.955 - type: map_at_1000 value: 84.971 - type: map_at_3 value: 81.33399999999999 - type: map_at_5 value: 83.21300000000001 - type: mrr_at_1 value: 81.03 - type: mrr_at_10 value: 87.395 - type: mrr_at_100 value: 87.488 - type: mrr_at_1000 value: 87.48899999999999 - type: mrr_at_3 value: 86.41499999999999 - type: mrr_at_5 value: 87.074 - type: ndcg_at_1 value: 81.04 - type: ndcg_at_10 value: 88.151 - type: ndcg_at_100 value: 89.38199999999999 - type: ndcg_at_1000 value: 89.479 - type: ndcg_at_3 value: 85.24000000000001 - type: ndcg_at_5 value: 86.856 - type: precision_at_1 value: 81.04 - type: precision_at_10 value: 13.372 - type: precision_at_100 value: 1.526 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.217 - type: precision_at_5 value: 24.502 - type: recall_at_1 value: 70.38799999999999 - type: recall_at_10 value: 95.452 - type: recall_at_100 value: 99.59700000000001 - type: recall_at_1000 value: 99.988 - type: recall_at_3 value: 87.11 - type: recall_at_5 value: 91.662 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 59.334991029213235 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.586500854616666 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.153 - type: map_at_10 value: 14.277000000000001 - type: map_at_100 value: 16.922 - type: map_at_1000 value: 17.302999999999997 - type: map_at_3 value: 9.961 - type: map_at_5 value: 12.257 - type: mrr_at_1 value: 25.4 - type: mrr_at_10 value: 37.458000000000006 - type: mrr_at_100 value: 38.681 - type: mrr_at_1000 value: 38.722 - type: mrr_at_3 value: 34.1 - type: mrr_at_5 value: 36.17 - type: ndcg_at_1 value: 25.4 - type: ndcg_at_10 value: 23.132 - type: ndcg_at_100 value: 32.908 - type: ndcg_at_1000 value: 38.754 - type: ndcg_at_3 value: 21.82 - type: ndcg_at_5 value: 19.353 - type: precision_at_1 value: 25.4 - type: precision_at_10 value: 12.1 - type: precision_at_100 value: 2.628 - type: precision_at_1000 value: 0.402 - type: precision_at_3 value: 20.732999999999997 - type: precision_at_5 value: 17.34 - type: recall_at_1 value: 5.153 - type: recall_at_10 value: 24.54 - type: recall_at_100 value: 53.293 - type: recall_at_1000 value: 81.57 - type: recall_at_3 value: 12.613 - type: recall_at_5 value: 17.577 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.86284404925333 - type: cos_sim_spearman value: 78.85870555294795 - type: euclidean_pearson value: 82.20105295276093 - type: euclidean_spearman value: 78.92125617009592 - type: manhattan_pearson value: 82.15840025289069 - type: manhattan_spearman value: 78.85955732900803 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.98747423389027 - type: cos_sim_spearman value: 75.71298531799367 - type: euclidean_pearson value: 81.59709559192291 - type: euclidean_spearman value: 75.40622749225653 - type: manhattan_pearson value: 81.55553547608804 - type: manhattan_spearman value: 75.39380235424899 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 83.76861330695503 - type: cos_sim_spearman value: 85.72991921531624 - type: euclidean_pearson value: 84.84504307397536 - type: euclidean_spearman value: 86.02679162824732 - type: manhattan_pearson value: 84.79969439220142 - type: manhattan_spearman value: 85.99238837291625 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.31929747511796 - type: cos_sim_spearman value: 81.50806522502528 - type: euclidean_pearson value: 82.93936686512777 - type: euclidean_spearman value: 81.54403447993224 - type: manhattan_pearson value: 82.89696981900828 - type: manhattan_spearman value: 81.52817825470865 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.14413295332908 - type: cos_sim_spearman value: 88.81032027008195 - type: euclidean_pearson value: 88.19205563407645 - type: euclidean_spearman value: 88.89738339479216 - type: manhattan_pearson value: 88.11075942004189 - type: manhattan_spearman value: 88.8297061675564 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.15980075557017 - type: cos_sim_spearman value: 83.81896308594801 - type: euclidean_pearson value: 83.11195254311338 - type: euclidean_spearman value: 84.10479481755407 - type: manhattan_pearson value: 83.13915225100556 - type: manhattan_spearman value: 84.09895591027859 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.93669480147919 - type: cos_sim_spearman value: 87.89861394614361 - type: euclidean_pearson value: 88.37316413202339 - type: euclidean_spearman value: 88.18033817842569 - type: manhattan_pearson value: 88.39427578879469 - type: manhattan_spearman value: 88.09185009236847 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 66.62215083348255 - type: cos_sim_spearman value: 67.33243665716736 - type: euclidean_pearson value: 67.60871701996284 - type: euclidean_spearman value: 66.75929225238659 - type: manhattan_pearson value: 67.63907838970992 - type: manhattan_spearman value: 66.79313656754846 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.65549191934764 - type: cos_sim_spearman value: 85.73266847750143 - type: euclidean_pearson value: 85.75609932254318 - type: euclidean_spearman value: 85.9452287759371 - type: manhattan_pearson value: 85.69717413063573 - type: manhattan_spearman value: 85.86546318377046 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.08164129085783 - type: mrr value: 96.2877273416489 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 62.09400000000001 - type: map_at_10 value: 71.712 - type: map_at_100 value: 72.128 - type: map_at_1000 value: 72.14399999999999 - type: map_at_3 value: 68.93 - type: map_at_5 value: 70.694 - type: mrr_at_1 value: 65.0 - type: mrr_at_10 value: 72.572 - type: mrr_at_100 value: 72.842 - type: mrr_at_1000 value: 72.856 - type: mrr_at_3 value: 70.44399999999999 - type: mrr_at_5 value: 71.744 - type: ndcg_at_1 value: 65.0 - type: ndcg_at_10 value: 76.178 - type: ndcg_at_100 value: 77.887 - type: ndcg_at_1000 value: 78.227 - type: ndcg_at_3 value: 71.367 - type: ndcg_at_5 value: 73.938 - type: precision_at_1 value: 65.0 - type: precision_at_10 value: 10.033 - type: precision_at_100 value: 1.097 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 27.667 - type: precision_at_5 value: 18.4 - type: recall_at_1 value: 62.09400000000001 - type: recall_at_10 value: 89.022 - type: recall_at_100 value: 96.833 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 75.922 - type: recall_at_5 value: 82.428 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.82178217821782 - type: cos_sim_ap value: 95.71282508220798 - type: cos_sim_f1 value: 90.73120494335737 - type: cos_sim_precision value: 93.52441613588111 - type: cos_sim_recall value: 88.1 - type: dot_accuracy value: 99.73960396039604 - type: dot_ap value: 92.98534606529098 - type: dot_f1 value: 86.83024536805209 - type: dot_precision value: 86.96088264794383 - type: dot_recall value: 86.7 - type: euclidean_accuracy value: 99.82475247524752 - type: euclidean_ap value: 95.72927039014849 - type: euclidean_f1 value: 90.89974293059126 - type: euclidean_precision value: 93.54497354497354 - type: euclidean_recall value: 88.4 - type: manhattan_accuracy value: 99.82574257425742 - type: manhattan_ap value: 95.72142177390405 - type: manhattan_f1 value: 91.00152516522625 - type: manhattan_precision value: 92.55429162357808 - type: manhattan_recall value: 89.5 - type: max_accuracy value: 99.82574257425742 - type: max_ap value: 95.72927039014849 - type: max_f1 value: 91.00152516522625 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.63957663468679 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.003307257923964 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 53.005825525863905 - type: mrr value: 53.854683919022165 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.503611569974098 - type: cos_sim_spearman value: 31.17155564248449 - type: dot_pearson value: 26.740428413981306 - type: dot_spearman value: 26.55727635469746 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.23600000000000002 - type: map_at_10 value: 1.7670000000000001 - type: map_at_100 value: 10.208 - type: map_at_1000 value: 25.997999999999998 - type: map_at_3 value: 0.605 - type: map_at_5 value: 0.9560000000000001 - type: mrr_at_1 value: 84.0 - type: mrr_at_10 value: 90.167 - type: mrr_at_100 value: 90.167 - type: mrr_at_1000 value: 90.167 - type: mrr_at_3 value: 89.667 - type: mrr_at_5 value: 90.167 - type: ndcg_at_1 value: 77.0 - type: ndcg_at_10 value: 68.783 - type: ndcg_at_100 value: 54.196 - type: ndcg_at_1000 value: 52.077 - type: ndcg_at_3 value: 71.642 - type: ndcg_at_5 value: 70.45700000000001 - type: precision_at_1 value: 84.0 - type: precision_at_10 value: 73.0 - type: precision_at_100 value: 55.48 - type: precision_at_1000 value: 23.102 - type: precision_at_3 value: 76.0 - type: precision_at_5 value: 74.8 - type: recall_at_1 value: 0.23600000000000002 - type: recall_at_10 value: 1.9869999999999999 - type: recall_at_100 value: 13.749 - type: recall_at_1000 value: 50.157 - type: recall_at_3 value: 0.633 - type: recall_at_5 value: 1.0290000000000001 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.437 - type: map_at_10 value: 8.791 - type: map_at_100 value: 15.001999999999999 - type: map_at_1000 value: 16.549 - type: map_at_3 value: 3.8080000000000003 - type: map_at_5 value: 5.632000000000001 - type: mrr_at_1 value: 20.408 - type: mrr_at_10 value: 36.96 - type: mrr_at_100 value: 37.912 - type: mrr_at_1000 value: 37.912 - type: mrr_at_3 value: 29.592000000000002 - type: mrr_at_5 value: 34.489999999999995 - type: ndcg_at_1 value: 19.387999999999998 - type: ndcg_at_10 value: 22.554 - type: ndcg_at_100 value: 35.197 - type: ndcg_at_1000 value: 46.58 - type: ndcg_at_3 value: 20.285 - type: ndcg_at_5 value: 21.924 - type: precision_at_1 value: 20.408 - type: precision_at_10 value: 21.837 - type: precision_at_100 value: 7.754999999999999 - type: precision_at_1000 value: 1.537 - type: precision_at_3 value: 21.769 - type: precision_at_5 value: 23.673 - type: recall_at_1 value: 1.437 - type: recall_at_10 value: 16.314999999999998 - type: recall_at_100 value: 47.635 - type: recall_at_1000 value: 82.963 - type: recall_at_3 value: 4.955 - type: recall_at_5 value: 8.805 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.6128 - type: ap value: 14.279639861175664 - type: f1 value: 54.922292491204274 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 57.01188455008489 - type: f1 value: 57.377953019225515 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 52.306769136544254 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.64701674912082 - type: cos_sim_ap value: 72.46600945328552 - type: cos_sim_f1 value: 67.96572367648784 - type: cos_sim_precision value: 61.21801649397336 - type: cos_sim_recall value: 76.38522427440633 - type: dot_accuracy value: 82.33295583238957 - type: dot_ap value: 62.54843443071716 - type: dot_f1 value: 60.38378562507096 - type: dot_precision value: 52.99980067769583 - type: dot_recall value: 70.15831134564644 - type: euclidean_accuracy value: 85.7423854085951 - type: euclidean_ap value: 72.76873850945174 - type: euclidean_f1 value: 68.23556960543262 - type: euclidean_precision value: 61.3344559040202 - type: euclidean_recall value: 76.88654353562005 - type: manhattan_accuracy value: 85.74834594981225 - type: manhattan_ap value: 72.66825372446462 - type: manhattan_f1 value: 68.21539194662853 - type: manhattan_precision value: 62.185056472632496 - type: manhattan_recall value: 75.54089709762533 - type: max_accuracy value: 85.74834594981225 - type: max_ap value: 72.76873850945174 - type: max_f1 value: 68.23556960543262 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.73171110334924 - type: cos_sim_ap value: 85.51855542063649 - type: cos_sim_f1 value: 77.95706775700934 - type: cos_sim_precision value: 74.12524298805887 - type: cos_sim_recall value: 82.20665229442562 - type: dot_accuracy value: 86.94842240074514 - type: dot_ap value: 80.90995345771762 - type: dot_f1 value: 74.20765027322403 - type: dot_precision value: 70.42594385285575 - type: dot_recall value: 78.41854019094548 - type: euclidean_accuracy value: 88.73753250281368 - type: euclidean_ap value: 85.54712254033734 - type: euclidean_f1 value: 78.07565728654365 - type: euclidean_precision value: 75.1120597652081 - type: euclidean_recall value: 81.282722513089 - type: manhattan_accuracy value: 88.72588970388482 - type: manhattan_ap value: 85.52118291594071 - type: manhattan_f1 value: 78.04428724070593 - type: manhattan_precision value: 74.83219105490002 - type: manhattan_recall value: 81.54450261780106 - type: max_accuracy value: 88.73753250281368 - type: max_ap value: 85.54712254033734 - type: max_f1 value: 78.07565728654365 --- # gte-base General Text Embeddings (GTE) model. [Towards General Text Embeddings with Multi-stage Contrastive Learning](https://arxiv.org/abs/2308.03281) The GTE models are trained by Alibaba DAMO Academy. They are mainly based on the BERT framework and currently offer three different sizes of models, including [GTE-large](https://huggingface.co/thenlper/gte-large), [GTE-base](https://huggingface.co/thenlper/gte-base), and [GTE-small](https://huggingface.co/thenlper/gte-small). The GTE models are trained on a large-scale corpus of relevance text pairs, covering a wide range of domains and scenarios. This enables the GTE models to be applied to various downstream tasks of text embeddings, including **information retrieval**, **semantic textual similarity**, **text reranking**, etc. ## Metrics We compared the performance of the GTE models with other popular text embedding models on the MTEB benchmark. For more detailed comparison results, please refer to the [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). | Model Name | Model Size (GB) | Dimension | Sequence Length | Average (56) | Clustering (11) | Pair Classification (3) | Reranking (4) | Retrieval (15) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [**gte-large**](https://huggingface.co/thenlper/gte-large) | 0.67 | 1024 | 512 | **63.13** | 46.84 | 85.00 | 59.13 | 52.22 | 83.35 | 31.66 | 73.33 | | [**gte-base**](https://huggingface.co/thenlper/gte-base) | 0.22 | 768 | 512 | **62.39** | 46.2 | 84.57 | 58.61 | 51.14 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1.34 | 1024| 512 | 62.25 | 44.49 | 86.03 | 56.61 | 50.56 | 82.05 | 30.19 | 75.24 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.44 | 768 | 512 | 61.5 | 43.80 | 85.73 | 55.91 | 50.29 | 81.05 | 30.28 | 73.84 | | [**gte-small**](https://huggingface.co/thenlper/gte-small) | 0.07 | 384 | 512 | **61.36** | 44.89 | 83.54 | 57.7 | 49.46 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | - | 1536 | 8192 | 60.99 | 45.9 | 84.89 | 56.32 | 49.25 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.13 | 384 | 512 | 59.93 | 39.92 | 84.67 | 54.32 | 49.04 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 9.73 | 768 | 512 | 59.51 | 43.72 | 85.06 | 56.42 | 42.24 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 0.44 | 768 | 514 | 57.78 | 43.69 | 83.04 | 59.36 | 43.81 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 28.27 | 4096 | 2048 | 57.59 | 38.93 | 81.9 | 55.65 | 48.22 | 77.74 | 33.6 | 66.19 | | [all-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2) | 0.13 | 384 | 512 | 56.53 | 41.81 | 82.41 | 58.44 | 42.69 | 79.8 | 27.9 | 63.21 | | [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | 0.09 | 384 | 512 | 56.26 | 42.35 | 82.37 | 58.04 | 41.95 | 78.9 | 30.81 | 63.05 | | [contriever-base-msmarco](https://huggingface.co/nthakur/contriever-base-msmarco) | 0.44 | 768 | 512 | 56.00 | 41.1 | 82.54 | 53.14 | 41.88 | 76.51 | 30.36 | 66.68 | | [sentence-t5-base](https://huggingface.co/sentence-transformers/sentence-t5-base) | 0.22 | 768 | 512 | 55.27 | 40.21 | 85.18 | 53.09 | 33.63 | 81.14 | 31.39 | 69.81 | ## Usage Code example ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] tokenizer = AutoTokenizer.from_pretrained("thenlper/gte-base") model = AutoModel.from_pretrained("thenlper/gte-base") # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # (Optionally) normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) ``` Use with sentence-transformers: ```python from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim sentences = ['That is a happy person', 'That is a very happy person'] model = SentenceTransformer('thenlper/gte-base') embeddings = model.encode(sentences) print(cos_sim(embeddings[0], embeddings[1])) ``` ### Limitation This model exclusively caters to English texts, and any lengthy texts will be truncated to a maximum of 512 tokens. ### Citation If you find our paper or models helpful, please consider citing them as follows: ``` @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
BAAI/bge-small-en
BAAI
feature-extraction
[ "transformers", "pytorch", "safetensors", "bert", "feature-extraction", "mteb", "sentence transformers", "en", "arxiv:2311.13534", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-08-05T08:04:07Z"
2023-12-13T03:53:21+00:00
419,855
74
--- language: - en license: mit tags: - mteb - sentence transformers model-index: - name: bge-small-en results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 74.34328358208955 - type: ap value: 37.59947775195661 - type: f1 value: 68.548415491933 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.04527499999999 - type: ap value: 89.60696356772135 - type: f1 value: 93.03361469382438 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 46.08 - type: f1 value: 45.66249835363254 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 35.205999999999996 - type: map_at_10 value: 50.782000000000004 - type: map_at_100 value: 51.547 - type: map_at_1000 value: 51.554 - type: map_at_3 value: 46.515 - type: map_at_5 value: 49.296 - type: mrr_at_1 value: 35.632999999999996 - type: mrr_at_10 value: 50.958999999999996 - type: mrr_at_100 value: 51.724000000000004 - type: mrr_at_1000 value: 51.731 - type: mrr_at_3 value: 46.669 - type: mrr_at_5 value: 49.439 - type: ndcg_at_1 value: 35.205999999999996 - type: ndcg_at_10 value: 58.835 - type: ndcg_at_100 value: 62.095 - type: ndcg_at_1000 value: 62.255 - type: ndcg_at_3 value: 50.255 - type: ndcg_at_5 value: 55.296 - type: precision_at_1 value: 35.205999999999996 - type: precision_at_10 value: 8.421 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.365 - type: precision_at_5 value: 14.680000000000001 - type: recall_at_1 value: 35.205999999999996 - type: recall_at_10 value: 84.211 - type: recall_at_100 value: 98.43499999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 61.095 - type: recall_at_5 value: 73.4 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.52644476278646 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 39.973045724188964 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.28285314871488 - type: mrr value: 74.52743701358659 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 80.09041909160327 - type: cos_sim_spearman value: 79.96266537706944 - type: euclidean_pearson value: 79.50774978162241 - type: euclidean_spearman value: 79.9144715078551 - type: manhattan_pearson value: 79.2062139879302 - type: manhattan_spearman value: 79.35000081468212 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.31493506493506 - type: f1 value: 85.2704557977762 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.6837242810816 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 35.38881249555897 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 27.884999999999998 - type: map_at_10 value: 39.574 - type: map_at_100 value: 40.993 - type: map_at_1000 value: 41.129 - type: map_at_3 value: 36.089 - type: map_at_5 value: 38.191 - type: mrr_at_1 value: 34.477999999999994 - type: mrr_at_10 value: 45.411 - type: mrr_at_100 value: 46.089999999999996 - type: mrr_at_1000 value: 46.147 - type: mrr_at_3 value: 42.346000000000004 - type: mrr_at_5 value: 44.292 - type: ndcg_at_1 value: 34.477999999999994 - type: ndcg_at_10 value: 46.123999999999995 - type: ndcg_at_100 value: 51.349999999999994 - type: ndcg_at_1000 value: 53.578 - type: ndcg_at_3 value: 40.824 - type: ndcg_at_5 value: 43.571 - type: precision_at_1 value: 34.477999999999994 - type: precision_at_10 value: 8.841000000000001 - type: precision_at_100 value: 1.4460000000000002 - type: precision_at_1000 value: 0.192 - type: precision_at_3 value: 19.742 - type: precision_at_5 value: 14.421000000000001 - type: recall_at_1 value: 27.884999999999998 - type: recall_at_10 value: 59.087 - type: recall_at_100 value: 80.609 - type: recall_at_1000 value: 95.054 - type: recall_at_3 value: 44.082 - type: recall_at_5 value: 51.593999999999994 - type: map_at_1 value: 30.639 - type: map_at_10 value: 40.047 - type: map_at_100 value: 41.302 - type: map_at_1000 value: 41.425 - type: map_at_3 value: 37.406 - type: map_at_5 value: 38.934000000000005 - type: mrr_at_1 value: 37.707 - type: mrr_at_10 value: 46.082 - type: mrr_at_100 value: 46.745 - type: mrr_at_1000 value: 46.786 - type: mrr_at_3 value: 43.980999999999995 - type: mrr_at_5 value: 45.287 - type: ndcg_at_1 value: 37.707 - type: ndcg_at_10 value: 45.525 - type: ndcg_at_100 value: 49.976 - type: ndcg_at_1000 value: 51.94499999999999 - type: ndcg_at_3 value: 41.704 - type: ndcg_at_5 value: 43.596000000000004 - type: precision_at_1 value: 37.707 - type: precision_at_10 value: 8.465 - type: precision_at_100 value: 1.375 - type: precision_at_1000 value: 0.183 - type: precision_at_3 value: 19.979 - type: precision_at_5 value: 14.115 - type: recall_at_1 value: 30.639 - type: recall_at_10 value: 54.775 - type: recall_at_100 value: 73.678 - type: recall_at_1000 value: 86.142 - type: recall_at_3 value: 43.230000000000004 - type: recall_at_5 value: 48.622 - type: map_at_1 value: 38.038 - type: map_at_10 value: 49.922 - type: map_at_100 value: 51.032 - type: map_at_1000 value: 51.085 - type: map_at_3 value: 46.664 - type: map_at_5 value: 48.588 - type: mrr_at_1 value: 43.95 - type: mrr_at_10 value: 53.566 - type: mrr_at_100 value: 54.318999999999996 - type: mrr_at_1000 value: 54.348 - type: mrr_at_3 value: 51.066 - type: mrr_at_5 value: 52.649 - type: ndcg_at_1 value: 43.95 - type: ndcg_at_10 value: 55.676 - type: ndcg_at_100 value: 60.126000000000005 - type: ndcg_at_1000 value: 61.208 - type: ndcg_at_3 value: 50.20400000000001 - type: ndcg_at_5 value: 53.038 - type: precision_at_1 value: 43.95 - type: precision_at_10 value: 8.953 - type: precision_at_100 value: 1.2109999999999999 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 22.256999999999998 - type: precision_at_5 value: 15.524 - type: recall_at_1 value: 38.038 - type: recall_at_10 value: 69.15 - type: recall_at_100 value: 88.31599999999999 - type: recall_at_1000 value: 95.993 - type: recall_at_3 value: 54.663 - type: recall_at_5 value: 61.373 - type: map_at_1 value: 24.872 - type: map_at_10 value: 32.912 - type: map_at_100 value: 33.972 - type: map_at_1000 value: 34.046 - type: map_at_3 value: 30.361 - type: map_at_5 value: 31.704 - type: mrr_at_1 value: 26.779999999999998 - type: mrr_at_10 value: 34.812 - type: mrr_at_100 value: 35.754999999999995 - type: mrr_at_1000 value: 35.809000000000005 - type: mrr_at_3 value: 32.335 - type: mrr_at_5 value: 33.64 - type: ndcg_at_1 value: 26.779999999999998 - type: ndcg_at_10 value: 37.623 - type: ndcg_at_100 value: 42.924 - type: ndcg_at_1000 value: 44.856 - type: ndcg_at_3 value: 32.574 - type: ndcg_at_5 value: 34.842 - type: precision_at_1 value: 26.779999999999998 - type: precision_at_10 value: 5.729 - type: precision_at_100 value: 0.886 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 13.559 - type: precision_at_5 value: 9.469 - type: recall_at_1 value: 24.872 - type: recall_at_10 value: 50.400999999999996 - type: recall_at_100 value: 74.954 - type: recall_at_1000 value: 89.56 - type: recall_at_3 value: 36.726 - type: recall_at_5 value: 42.138999999999996 - type: map_at_1 value: 16.803 - type: map_at_10 value: 24.348 - type: map_at_100 value: 25.56 - type: map_at_1000 value: 25.668000000000003 - type: map_at_3 value: 21.811 - type: map_at_5 value: 23.287 - type: mrr_at_1 value: 20.771 - type: mrr_at_10 value: 28.961 - type: mrr_at_100 value: 29.979 - type: mrr_at_1000 value: 30.046 - type: mrr_at_3 value: 26.555 - type: mrr_at_5 value: 28.060000000000002 - type: ndcg_at_1 value: 20.771 - type: ndcg_at_10 value: 29.335 - type: ndcg_at_100 value: 35.188 - type: ndcg_at_1000 value: 37.812 - type: ndcg_at_3 value: 24.83 - type: ndcg_at_5 value: 27.119 - type: precision_at_1 value: 20.771 - type: precision_at_10 value: 5.4350000000000005 - type: precision_at_100 value: 0.9480000000000001 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 11.982 - type: precision_at_5 value: 8.831 - type: recall_at_1 value: 16.803 - type: recall_at_10 value: 40.039 - type: recall_at_100 value: 65.83200000000001 - type: recall_at_1000 value: 84.478 - type: recall_at_3 value: 27.682000000000002 - type: recall_at_5 value: 33.535 - type: map_at_1 value: 28.345 - type: map_at_10 value: 37.757000000000005 - type: map_at_100 value: 39.141 - type: map_at_1000 value: 39.262 - type: map_at_3 value: 35.183 - type: map_at_5 value: 36.592 - type: mrr_at_1 value: 34.649 - type: mrr_at_10 value: 43.586999999999996 - type: mrr_at_100 value: 44.481 - type: mrr_at_1000 value: 44.542 - type: mrr_at_3 value: 41.29 - type: mrr_at_5 value: 42.642 - type: ndcg_at_1 value: 34.649 - type: ndcg_at_10 value: 43.161 - type: ndcg_at_100 value: 48.734 - type: ndcg_at_1000 value: 51.046 - type: ndcg_at_3 value: 39.118 - type: ndcg_at_5 value: 41.022 - type: precision_at_1 value: 34.649 - type: precision_at_10 value: 7.603 - type: precision_at_100 value: 1.209 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 18.319 - type: precision_at_5 value: 12.839 - type: recall_at_1 value: 28.345 - type: recall_at_10 value: 53.367 - type: recall_at_100 value: 76.453 - type: recall_at_1000 value: 91.82000000000001 - type: recall_at_3 value: 41.636 - type: recall_at_5 value: 46.760000000000005 - type: map_at_1 value: 22.419 - type: map_at_10 value: 31.716 - type: map_at_100 value: 33.152 - type: map_at_1000 value: 33.267 - type: map_at_3 value: 28.74 - type: map_at_5 value: 30.48 - type: mrr_at_1 value: 28.310999999999996 - type: mrr_at_10 value: 37.039 - type: mrr_at_100 value: 38.09 - type: mrr_at_1000 value: 38.145 - type: mrr_at_3 value: 34.437 - type: mrr_at_5 value: 36.024 - type: ndcg_at_1 value: 28.310999999999996 - type: ndcg_at_10 value: 37.41 - type: ndcg_at_100 value: 43.647999999999996 - type: ndcg_at_1000 value: 46.007 - type: ndcg_at_3 value: 32.509 - type: ndcg_at_5 value: 34.943999999999996 - type: precision_at_1 value: 28.310999999999996 - type: precision_at_10 value: 6.963 - type: precision_at_100 value: 1.1860000000000002 - type: precision_at_1000 value: 0.154 - type: precision_at_3 value: 15.867999999999999 - type: precision_at_5 value: 11.507000000000001 - type: recall_at_1 value: 22.419 - type: recall_at_10 value: 49.28 - type: recall_at_100 value: 75.802 - type: recall_at_1000 value: 92.032 - type: recall_at_3 value: 35.399 - type: recall_at_5 value: 42.027 - type: map_at_1 value: 24.669249999999998 - type: map_at_10 value: 33.332583333333325 - type: map_at_100 value: 34.557833333333335 - type: map_at_1000 value: 34.67141666666666 - type: map_at_3 value: 30.663166666666662 - type: map_at_5 value: 32.14883333333333 - type: mrr_at_1 value: 29.193833333333334 - type: mrr_at_10 value: 37.47625 - type: mrr_at_100 value: 38.3545 - type: mrr_at_1000 value: 38.413166666666676 - type: mrr_at_3 value: 35.06741666666667 - type: mrr_at_5 value: 36.450666666666656 - type: ndcg_at_1 value: 29.193833333333334 - type: ndcg_at_10 value: 38.505416666666676 - type: ndcg_at_100 value: 43.81125 - type: ndcg_at_1000 value: 46.09558333333333 - type: ndcg_at_3 value: 33.90916666666667 - type: ndcg_at_5 value: 36.07666666666666 - type: precision_at_1 value: 29.193833333333334 - type: precision_at_10 value: 6.7251666666666665 - type: precision_at_100 value: 1.1058333333333332 - type: precision_at_1000 value: 0.14833333333333332 - type: precision_at_3 value: 15.554166666666665 - type: precision_at_5 value: 11.079250000000002 - type: recall_at_1 value: 24.669249999999998 - type: recall_at_10 value: 49.75583333333332 - type: recall_at_100 value: 73.06908333333332 - type: recall_at_1000 value: 88.91316666666667 - type: recall_at_3 value: 36.913250000000005 - type: recall_at_5 value: 42.48641666666666 - type: map_at_1 value: 24.044999999999998 - type: map_at_10 value: 30.349999999999998 - type: map_at_100 value: 31.273 - type: map_at_1000 value: 31.362000000000002 - type: map_at_3 value: 28.508 - type: map_at_5 value: 29.369 - type: mrr_at_1 value: 26.994 - type: mrr_at_10 value: 33.12 - type: mrr_at_100 value: 33.904 - type: mrr_at_1000 value: 33.967000000000006 - type: mrr_at_3 value: 31.365 - type: mrr_at_5 value: 32.124 - type: ndcg_at_1 value: 26.994 - type: ndcg_at_10 value: 34.214 - type: ndcg_at_100 value: 38.681 - type: ndcg_at_1000 value: 40.926 - type: ndcg_at_3 value: 30.725 - type: ndcg_at_5 value: 31.967000000000002 - type: precision_at_1 value: 26.994 - type: precision_at_10 value: 5.215 - type: precision_at_100 value: 0.807 - type: precision_at_1000 value: 0.108 - type: precision_at_3 value: 12.986 - type: precision_at_5 value: 8.712 - type: recall_at_1 value: 24.044999999999998 - type: recall_at_10 value: 43.456 - type: recall_at_100 value: 63.675000000000004 - type: recall_at_1000 value: 80.05499999999999 - type: recall_at_3 value: 33.561 - type: recall_at_5 value: 36.767 - type: map_at_1 value: 15.672 - type: map_at_10 value: 22.641 - type: map_at_100 value: 23.75 - type: map_at_1000 value: 23.877000000000002 - type: map_at_3 value: 20.219 - type: map_at_5 value: 21.648 - type: mrr_at_1 value: 18.823 - type: mrr_at_10 value: 26.101999999999997 - type: mrr_at_100 value: 27.038 - type: mrr_at_1000 value: 27.118 - type: mrr_at_3 value: 23.669 - type: mrr_at_5 value: 25.173000000000002 - type: ndcg_at_1 value: 18.823 - type: ndcg_at_10 value: 27.176000000000002 - type: ndcg_at_100 value: 32.42 - type: ndcg_at_1000 value: 35.413 - type: ndcg_at_3 value: 22.756999999999998 - type: ndcg_at_5 value: 25.032 - type: precision_at_1 value: 18.823 - type: precision_at_10 value: 5.034000000000001 - type: precision_at_100 value: 0.895 - type: precision_at_1000 value: 0.132 - type: precision_at_3 value: 10.771 - type: precision_at_5 value: 8.1 - type: recall_at_1 value: 15.672 - type: recall_at_10 value: 37.296 - type: recall_at_100 value: 60.863 - type: recall_at_1000 value: 82.234 - type: recall_at_3 value: 25.330000000000002 - type: recall_at_5 value: 30.964000000000002 - type: map_at_1 value: 24.633 - type: map_at_10 value: 32.858 - type: map_at_100 value: 34.038000000000004 - type: map_at_1000 value: 34.141 - type: map_at_3 value: 30.209000000000003 - type: map_at_5 value: 31.567 - type: mrr_at_1 value: 28.358 - type: mrr_at_10 value: 36.433 - type: mrr_at_100 value: 37.352000000000004 - type: mrr_at_1000 value: 37.41 - type: mrr_at_3 value: 34.033 - type: mrr_at_5 value: 35.246 - type: ndcg_at_1 value: 28.358 - type: ndcg_at_10 value: 37.973 - type: ndcg_at_100 value: 43.411 - type: ndcg_at_1000 value: 45.747 - type: ndcg_at_3 value: 32.934999999999995 - type: ndcg_at_5 value: 35.013 - type: precision_at_1 value: 28.358 - type: precision_at_10 value: 6.418 - type: precision_at_100 value: 1.02 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 14.677000000000001 - type: precision_at_5 value: 10.335999999999999 - type: recall_at_1 value: 24.633 - type: recall_at_10 value: 50.048 - type: recall_at_100 value: 73.821 - type: recall_at_1000 value: 90.046 - type: recall_at_3 value: 36.284 - type: recall_at_5 value: 41.370000000000005 - type: map_at_1 value: 23.133 - type: map_at_10 value: 31.491999999999997 - type: map_at_100 value: 33.062000000000005 - type: map_at_1000 value: 33.256 - type: map_at_3 value: 28.886 - type: map_at_5 value: 30.262 - type: mrr_at_1 value: 28.063 - type: mrr_at_10 value: 36.144 - type: mrr_at_100 value: 37.14 - type: mrr_at_1000 value: 37.191 - type: mrr_at_3 value: 33.762 - type: mrr_at_5 value: 34.997 - type: ndcg_at_1 value: 28.063 - type: ndcg_at_10 value: 36.951 - type: ndcg_at_100 value: 43.287 - type: ndcg_at_1000 value: 45.777 - type: ndcg_at_3 value: 32.786 - type: ndcg_at_5 value: 34.65 - type: precision_at_1 value: 28.063 - type: precision_at_10 value: 7.055 - type: precision_at_100 value: 1.476 - type: precision_at_1000 value: 0.22899999999999998 - type: precision_at_3 value: 15.481 - type: precision_at_5 value: 11.186 - type: recall_at_1 value: 23.133 - type: recall_at_10 value: 47.285 - type: recall_at_100 value: 76.176 - type: recall_at_1000 value: 92.176 - type: recall_at_3 value: 35.223 - type: recall_at_5 value: 40.142 - type: map_at_1 value: 19.547 - type: map_at_10 value: 26.374 - type: map_at_100 value: 27.419 - type: map_at_1000 value: 27.539 - type: map_at_3 value: 23.882 - type: map_at_5 value: 25.163999999999998 - type: mrr_at_1 value: 21.442 - type: mrr_at_10 value: 28.458 - type: mrr_at_100 value: 29.360999999999997 - type: mrr_at_1000 value: 29.448999999999998 - type: mrr_at_3 value: 25.97 - type: mrr_at_5 value: 27.273999999999997 - type: ndcg_at_1 value: 21.442 - type: ndcg_at_10 value: 30.897000000000002 - type: ndcg_at_100 value: 35.99 - type: ndcg_at_1000 value: 38.832 - type: ndcg_at_3 value: 25.944 - type: ndcg_at_5 value: 28.126 - type: precision_at_1 value: 21.442 - type: precision_at_10 value: 4.9910000000000005 - type: precision_at_100 value: 0.8109999999999999 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 11.029 - type: precision_at_5 value: 7.911 - type: recall_at_1 value: 19.547 - type: recall_at_10 value: 42.886 - type: recall_at_100 value: 66.64999999999999 - type: recall_at_1000 value: 87.368 - type: recall_at_3 value: 29.143 - type: recall_at_5 value: 34.544000000000004 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 15.572 - type: map_at_10 value: 25.312 - type: map_at_100 value: 27.062 - type: map_at_1000 value: 27.253 - type: map_at_3 value: 21.601 - type: map_at_5 value: 23.473 - type: mrr_at_1 value: 34.984 - type: mrr_at_10 value: 46.406 - type: mrr_at_100 value: 47.179 - type: mrr_at_1000 value: 47.21 - type: mrr_at_3 value: 43.485 - type: mrr_at_5 value: 45.322 - type: ndcg_at_1 value: 34.984 - type: ndcg_at_10 value: 34.344 - type: ndcg_at_100 value: 41.015 - type: ndcg_at_1000 value: 44.366 - type: ndcg_at_3 value: 29.119 - type: ndcg_at_5 value: 30.825999999999997 - type: precision_at_1 value: 34.984 - type: precision_at_10 value: 10.358 - type: precision_at_100 value: 1.762 - type: precision_at_1000 value: 0.23900000000000002 - type: precision_at_3 value: 21.368000000000002 - type: precision_at_5 value: 15.948 - type: recall_at_1 value: 15.572 - type: recall_at_10 value: 39.367999999999995 - type: recall_at_100 value: 62.183 - type: recall_at_1000 value: 80.92200000000001 - type: recall_at_3 value: 26.131999999999998 - type: recall_at_5 value: 31.635999999999996 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.848 - type: map_at_10 value: 19.25 - type: map_at_100 value: 27.193 - type: map_at_1000 value: 28.721999999999998 - type: map_at_3 value: 13.968 - type: map_at_5 value: 16.283 - type: mrr_at_1 value: 68.75 - type: mrr_at_10 value: 76.25 - type: mrr_at_100 value: 76.534 - type: mrr_at_1000 value: 76.53999999999999 - type: mrr_at_3 value: 74.667 - type: mrr_at_5 value: 75.86699999999999 - type: ndcg_at_1 value: 56.00000000000001 - type: ndcg_at_10 value: 41.426 - type: ndcg_at_100 value: 45.660000000000004 - type: ndcg_at_1000 value: 53.02 - type: ndcg_at_3 value: 46.581 - type: ndcg_at_5 value: 43.836999999999996 - type: precision_at_1 value: 68.75 - type: precision_at_10 value: 32.800000000000004 - type: precision_at_100 value: 10.440000000000001 - type: precision_at_1000 value: 1.9980000000000002 - type: precision_at_3 value: 49.667 - type: precision_at_5 value: 42.25 - type: recall_at_1 value: 8.848 - type: recall_at_10 value: 24.467 - type: recall_at_100 value: 51.344 - type: recall_at_1000 value: 75.235 - type: recall_at_3 value: 15.329 - type: recall_at_5 value: 18.892999999999997 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 48.95 - type: f1 value: 43.44563593360779 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 78.036 - type: map_at_10 value: 85.639 - type: map_at_100 value: 85.815 - type: map_at_1000 value: 85.829 - type: map_at_3 value: 84.795 - type: map_at_5 value: 85.336 - type: mrr_at_1 value: 84.353 - type: mrr_at_10 value: 90.582 - type: mrr_at_100 value: 90.617 - type: mrr_at_1000 value: 90.617 - type: mrr_at_3 value: 90.132 - type: mrr_at_5 value: 90.447 - type: ndcg_at_1 value: 84.353 - type: ndcg_at_10 value: 89.003 - type: ndcg_at_100 value: 89.60000000000001 - type: ndcg_at_1000 value: 89.836 - type: ndcg_at_3 value: 87.81400000000001 - type: ndcg_at_5 value: 88.478 - type: precision_at_1 value: 84.353 - type: precision_at_10 value: 10.482 - type: precision_at_100 value: 1.099 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 33.257999999999996 - type: precision_at_5 value: 20.465 - type: recall_at_1 value: 78.036 - type: recall_at_10 value: 94.517 - type: recall_at_100 value: 96.828 - type: recall_at_1000 value: 98.261 - type: recall_at_3 value: 91.12 - type: recall_at_5 value: 92.946 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 20.191 - type: map_at_10 value: 32.369 - type: map_at_100 value: 34.123999999999995 - type: map_at_1000 value: 34.317 - type: map_at_3 value: 28.71 - type: map_at_5 value: 30.607 - type: mrr_at_1 value: 40.894999999999996 - type: mrr_at_10 value: 48.842 - type: mrr_at_100 value: 49.599 - type: mrr_at_1000 value: 49.647000000000006 - type: mrr_at_3 value: 46.785 - type: mrr_at_5 value: 47.672 - type: ndcg_at_1 value: 40.894999999999996 - type: ndcg_at_10 value: 39.872 - type: ndcg_at_100 value: 46.126 - type: ndcg_at_1000 value: 49.476 - type: ndcg_at_3 value: 37.153000000000006 - type: ndcg_at_5 value: 37.433 - type: precision_at_1 value: 40.894999999999996 - type: precision_at_10 value: 10.818 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.231 - type: precision_at_3 value: 25.051000000000002 - type: precision_at_5 value: 17.531 - type: recall_at_1 value: 20.191 - type: recall_at_10 value: 45.768 - type: recall_at_100 value: 68.82000000000001 - type: recall_at_1000 value: 89.133 - type: recall_at_3 value: 33.296 - type: recall_at_5 value: 38.022 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.257 - type: map_at_10 value: 61.467000000000006 - type: map_at_100 value: 62.364 - type: map_at_1000 value: 62.424 - type: map_at_3 value: 58.228 - type: map_at_5 value: 60.283 - type: mrr_at_1 value: 78.515 - type: mrr_at_10 value: 84.191 - type: mrr_at_100 value: 84.378 - type: mrr_at_1000 value: 84.385 - type: mrr_at_3 value: 83.284 - type: mrr_at_5 value: 83.856 - type: ndcg_at_1 value: 78.515 - type: ndcg_at_10 value: 69.78999999999999 - type: ndcg_at_100 value: 72.886 - type: ndcg_at_1000 value: 74.015 - type: ndcg_at_3 value: 65.23 - type: ndcg_at_5 value: 67.80199999999999 - type: precision_at_1 value: 78.515 - type: precision_at_10 value: 14.519000000000002 - type: precision_at_100 value: 1.694 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 41.702 - type: precision_at_5 value: 27.046999999999997 - type: recall_at_1 value: 39.257 - type: recall_at_10 value: 72.59299999999999 - type: recall_at_100 value: 84.679 - type: recall_at_1000 value: 92.12 - type: recall_at_3 value: 62.552 - type: recall_at_5 value: 67.616 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 91.5152 - type: ap value: 87.64584669595709 - type: f1 value: 91.50605576428437 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.926000000000002 - type: map_at_10 value: 34.049 - type: map_at_100 value: 35.213 - type: map_at_1000 value: 35.265 - type: map_at_3 value: 30.309 - type: map_at_5 value: 32.407000000000004 - type: mrr_at_1 value: 22.55 - type: mrr_at_10 value: 34.657 - type: mrr_at_100 value: 35.760999999999996 - type: mrr_at_1000 value: 35.807 - type: mrr_at_3 value: 30.989 - type: mrr_at_5 value: 33.039 - type: ndcg_at_1 value: 22.55 - type: ndcg_at_10 value: 40.842 - type: ndcg_at_100 value: 46.436 - type: ndcg_at_1000 value: 47.721999999999994 - type: ndcg_at_3 value: 33.209 - type: ndcg_at_5 value: 36.943 - type: precision_at_1 value: 22.55 - type: precision_at_10 value: 6.447 - type: precision_at_100 value: 0.9249999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.136000000000001 - type: precision_at_5 value: 10.381 - type: recall_at_1 value: 21.926000000000002 - type: recall_at_10 value: 61.724999999999994 - type: recall_at_100 value: 87.604 - type: recall_at_1000 value: 97.421 - type: recall_at_3 value: 40.944 - type: recall_at_5 value: 49.915 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.54765161878704 - type: f1 value: 93.3298945415573 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 75.71591427268582 - type: f1 value: 59.32113870474471 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 75.83053127101547 - type: f1 value: 73.60757944876475 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.72562205783457 - type: f1 value: 78.63761662505502 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 33.37935633767996 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.55270546130387 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.462692753143834 - type: mrr value: 31.497569753511563 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.646 - type: map_at_10 value: 12.498 - type: map_at_100 value: 15.486 - type: map_at_1000 value: 16.805999999999997 - type: map_at_3 value: 9.325 - type: map_at_5 value: 10.751 - type: mrr_at_1 value: 43.034 - type: mrr_at_10 value: 52.662 - type: mrr_at_100 value: 53.189 - type: mrr_at_1000 value: 53.25 - type: mrr_at_3 value: 50.929 - type: mrr_at_5 value: 51.92 - type: ndcg_at_1 value: 41.796 - type: ndcg_at_10 value: 33.477000000000004 - type: ndcg_at_100 value: 29.996000000000002 - type: ndcg_at_1000 value: 38.864 - type: ndcg_at_3 value: 38.940000000000005 - type: ndcg_at_5 value: 36.689 - type: precision_at_1 value: 43.034 - type: precision_at_10 value: 24.799 - type: precision_at_100 value: 7.432999999999999 - type: precision_at_1000 value: 1.9929999999999999 - type: precision_at_3 value: 36.842000000000006 - type: precision_at_5 value: 32.135999999999996 - type: recall_at_1 value: 5.646 - type: recall_at_10 value: 15.963 - type: recall_at_100 value: 29.492 - type: recall_at_1000 value: 61.711000000000006 - type: recall_at_3 value: 10.585 - type: recall_at_5 value: 12.753999999999998 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 27.602 - type: map_at_10 value: 41.545 - type: map_at_100 value: 42.644999999999996 - type: map_at_1000 value: 42.685 - type: map_at_3 value: 37.261 - type: map_at_5 value: 39.706 - type: mrr_at_1 value: 31.141000000000002 - type: mrr_at_10 value: 44.139 - type: mrr_at_100 value: 44.997 - type: mrr_at_1000 value: 45.025999999999996 - type: mrr_at_3 value: 40.503 - type: mrr_at_5 value: 42.64 - type: ndcg_at_1 value: 31.141000000000002 - type: ndcg_at_10 value: 48.995 - type: ndcg_at_100 value: 53.788000000000004 - type: ndcg_at_1000 value: 54.730000000000004 - type: ndcg_at_3 value: 40.844 - type: ndcg_at_5 value: 44.955 - type: precision_at_1 value: 31.141000000000002 - type: precision_at_10 value: 8.233 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 18.579 - type: precision_at_5 value: 13.533999999999999 - type: recall_at_1 value: 27.602 - type: recall_at_10 value: 69.216 - type: recall_at_100 value: 90.252 - type: recall_at_1000 value: 97.27 - type: recall_at_3 value: 47.987 - type: recall_at_5 value: 57.438 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.949 - type: map_at_10 value: 84.89999999999999 - type: map_at_100 value: 85.531 - type: map_at_1000 value: 85.548 - type: map_at_3 value: 82.027 - type: map_at_5 value: 83.853 - type: mrr_at_1 value: 81.69999999999999 - type: mrr_at_10 value: 87.813 - type: mrr_at_100 value: 87.917 - type: mrr_at_1000 value: 87.91799999999999 - type: mrr_at_3 value: 86.938 - type: mrr_at_5 value: 87.53999999999999 - type: ndcg_at_1 value: 81.75 - type: ndcg_at_10 value: 88.55499999999999 - type: ndcg_at_100 value: 89.765 - type: ndcg_at_1000 value: 89.871 - type: ndcg_at_3 value: 85.905 - type: ndcg_at_5 value: 87.41 - type: precision_at_1 value: 81.75 - type: precision_at_10 value: 13.403 - type: precision_at_100 value: 1.528 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.597 - type: precision_at_5 value: 24.69 - type: recall_at_1 value: 70.949 - type: recall_at_10 value: 95.423 - type: recall_at_100 value: 99.509 - type: recall_at_1000 value: 99.982 - type: recall_at_3 value: 87.717 - type: recall_at_5 value: 92.032 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 51.76962893449579 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.32897690686379 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.478 - type: map_at_10 value: 11.994 - type: map_at_100 value: 13.977 - type: map_at_1000 value: 14.295 - type: map_at_3 value: 8.408999999999999 - type: map_at_5 value: 10.024 - type: mrr_at_1 value: 22.1 - type: mrr_at_10 value: 33.526 - type: mrr_at_100 value: 34.577000000000005 - type: mrr_at_1000 value: 34.632000000000005 - type: mrr_at_3 value: 30.217 - type: mrr_at_5 value: 31.962000000000003 - type: ndcg_at_1 value: 22.1 - type: ndcg_at_10 value: 20.191 - type: ndcg_at_100 value: 27.954 - type: ndcg_at_1000 value: 33.491 - type: ndcg_at_3 value: 18.787000000000003 - type: ndcg_at_5 value: 16.378999999999998 - type: precision_at_1 value: 22.1 - type: precision_at_10 value: 10.69 - type: precision_at_100 value: 2.1919999999999997 - type: precision_at_1000 value: 0.35200000000000004 - type: precision_at_3 value: 17.732999999999997 - type: precision_at_5 value: 14.499999999999998 - type: recall_at_1 value: 4.478 - type: recall_at_10 value: 21.657 - type: recall_at_100 value: 44.54 - type: recall_at_1000 value: 71.542 - type: recall_at_3 value: 10.778 - type: recall_at_5 value: 14.687 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.82325259156718 - type: cos_sim_spearman value: 79.2463589100662 - type: euclidean_pearson value: 80.48318380496771 - type: euclidean_spearman value: 79.34451935199979 - type: manhattan_pearson value: 80.39041824178759 - type: manhattan_spearman value: 79.23002892700211 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.74130231431258 - type: cos_sim_spearman value: 78.36856568042397 - type: euclidean_pearson value: 82.48301631890303 - type: euclidean_spearman value: 78.28376980722732 - type: manhattan_pearson value: 82.43552075450525 - type: manhattan_spearman value: 78.22702443947126 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 79.96138619461459 - type: cos_sim_spearman value: 81.85436343502379 - type: euclidean_pearson value: 81.82895226665367 - type: euclidean_spearman value: 82.22707349602916 - type: manhattan_pearson value: 81.66303369445873 - type: manhattan_spearman value: 82.05030197179455 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 80.05481244198648 - type: cos_sim_spearman value: 80.85052504637808 - type: euclidean_pearson value: 80.86728419744497 - type: euclidean_spearman value: 81.033786401512 - type: manhattan_pearson value: 80.90107531061103 - type: manhattan_spearman value: 81.11374116827795 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 84.615220756399 - type: cos_sim_spearman value: 86.46858500002092 - type: euclidean_pearson value: 86.08307800247586 - type: euclidean_spearman value: 86.72691443870013 - type: manhattan_pearson value: 85.96155594487269 - type: manhattan_spearman value: 86.605909505275 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.14363913634436 - type: cos_sim_spearman value: 84.48430226487102 - type: euclidean_pearson value: 83.75303424801902 - type: euclidean_spearman value: 84.56762380734538 - type: manhattan_pearson value: 83.6135447165928 - type: manhattan_spearman value: 84.39898212616731 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.09909252554525 - type: cos_sim_spearman value: 85.70951402743276 - type: euclidean_pearson value: 87.1991936239908 - type: euclidean_spearman value: 86.07745840612071 - type: manhattan_pearson value: 87.25039137549952 - type: manhattan_spearman value: 85.99938746659761 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.529332093413615 - type: cos_sim_spearman value: 65.38177340147439 - type: euclidean_pearson value: 66.35278011412136 - type: euclidean_spearman value: 65.47147267032997 - type: manhattan_pearson value: 66.71804682408693 - type: manhattan_spearman value: 65.67406521423597 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 82.45802942885662 - type: cos_sim_spearman value: 84.8853341842566 - type: euclidean_pearson value: 84.60915021096707 - type: euclidean_spearman value: 85.11181242913666 - type: manhattan_pearson value: 84.38600521210364 - type: manhattan_spearman value: 84.89045417981723 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.92793380635129 - type: mrr value: 95.85834191226348 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 55.74400000000001 - type: map_at_10 value: 65.455 - type: map_at_100 value: 66.106 - type: map_at_1000 value: 66.129 - type: map_at_3 value: 62.719 - type: map_at_5 value: 64.441 - type: mrr_at_1 value: 58.667 - type: mrr_at_10 value: 66.776 - type: mrr_at_100 value: 67.363 - type: mrr_at_1000 value: 67.384 - type: mrr_at_3 value: 64.889 - type: mrr_at_5 value: 66.122 - type: ndcg_at_1 value: 58.667 - type: ndcg_at_10 value: 69.904 - type: ndcg_at_100 value: 72.807 - type: ndcg_at_1000 value: 73.423 - type: ndcg_at_3 value: 65.405 - type: ndcg_at_5 value: 67.86999999999999 - type: precision_at_1 value: 58.667 - type: precision_at_10 value: 9.3 - type: precision_at_100 value: 1.08 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 25.444 - type: precision_at_5 value: 17 - type: recall_at_1 value: 55.74400000000001 - type: recall_at_10 value: 82.122 - type: recall_at_100 value: 95.167 - type: recall_at_1000 value: 100 - type: recall_at_3 value: 70.14399999999999 - type: recall_at_5 value: 76.417 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.86534653465347 - type: cos_sim_ap value: 96.54142419791388 - type: cos_sim_f1 value: 93.07535641547861 - type: cos_sim_precision value: 94.81327800829875 - type: cos_sim_recall value: 91.4 - type: dot_accuracy value: 99.86435643564356 - type: dot_ap value: 96.53682260449868 - type: dot_f1 value: 92.98515104966718 - type: dot_precision value: 95.27806925498426 - type: dot_recall value: 90.8 - type: euclidean_accuracy value: 99.86336633663366 - type: euclidean_ap value: 96.5228676185697 - type: euclidean_f1 value: 92.9735234215886 - type: euclidean_precision value: 94.70954356846472 - type: euclidean_recall value: 91.3 - type: manhattan_accuracy value: 99.85841584158416 - type: manhattan_ap value: 96.50392760934032 - type: manhattan_f1 value: 92.84642321160581 - type: manhattan_precision value: 92.8928928928929 - type: manhattan_recall value: 92.80000000000001 - type: max_accuracy value: 99.86534653465347 - type: max_ap value: 96.54142419791388 - type: max_f1 value: 93.07535641547861 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 61.08285408766616 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.640675309010604 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 53.20333913710715 - type: mrr value: 54.088813555725324 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.79465221925075 - type: cos_sim_spearman value: 30.530816059163634 - type: dot_pearson value: 31.364837244718043 - type: dot_spearman value: 30.79726823684003 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22599999999999998 - type: map_at_10 value: 1.735 - type: map_at_100 value: 8.978 - type: map_at_1000 value: 20.851 - type: map_at_3 value: 0.613 - type: map_at_5 value: 0.964 - type: mrr_at_1 value: 88 - type: mrr_at_10 value: 92.867 - type: mrr_at_100 value: 92.867 - type: mrr_at_1000 value: 92.867 - type: mrr_at_3 value: 92.667 - type: mrr_at_5 value: 92.667 - type: ndcg_at_1 value: 82 - type: ndcg_at_10 value: 73.164 - type: ndcg_at_100 value: 51.878 - type: ndcg_at_1000 value: 44.864 - type: ndcg_at_3 value: 79.184 - type: ndcg_at_5 value: 76.39 - type: precision_at_1 value: 88 - type: precision_at_10 value: 76.2 - type: precision_at_100 value: 52.459999999999994 - type: precision_at_1000 value: 19.692 - type: precision_at_3 value: 82.667 - type: precision_at_5 value: 80 - type: recall_at_1 value: 0.22599999999999998 - type: recall_at_10 value: 1.942 - type: recall_at_100 value: 12.342 - type: recall_at_1000 value: 41.42 - type: recall_at_3 value: 0.637 - type: recall_at_5 value: 1.034 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.567 - type: map_at_10 value: 13.116 - type: map_at_100 value: 19.39 - type: map_at_1000 value: 20.988 - type: map_at_3 value: 7.109 - type: map_at_5 value: 9.950000000000001 - type: mrr_at_1 value: 42.857 - type: mrr_at_10 value: 57.404999999999994 - type: mrr_at_100 value: 58.021 - type: mrr_at_1000 value: 58.021 - type: mrr_at_3 value: 54.762 - type: mrr_at_5 value: 56.19 - type: ndcg_at_1 value: 38.775999999999996 - type: ndcg_at_10 value: 30.359 - type: ndcg_at_100 value: 41.284 - type: ndcg_at_1000 value: 52.30200000000001 - type: ndcg_at_3 value: 36.744 - type: ndcg_at_5 value: 34.326 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 26.122 - type: precision_at_100 value: 8.082 - type: precision_at_1000 value: 1.559 - type: precision_at_3 value: 40.136 - type: precision_at_5 value: 35.510000000000005 - type: recall_at_1 value: 3.567 - type: recall_at_10 value: 19.045 - type: recall_at_100 value: 49.979 - type: recall_at_1000 value: 84.206 - type: recall_at_3 value: 8.52 - type: recall_at_5 value: 13.103000000000002 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 68.8394 - type: ap value: 13.454399712443099 - type: f1 value: 53.04963076364322 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 60.546123372948514 - type: f1 value: 60.86952793277713 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.10042955060234 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.03308100375514 - type: cos_sim_ap value: 71.08284605869684 - type: cos_sim_f1 value: 65.42539436255494 - type: cos_sim_precision value: 64.14807302231237 - type: cos_sim_recall value: 66.75461741424802 - type: dot_accuracy value: 84.68736961316088 - type: dot_ap value: 69.20524036530992 - type: dot_f1 value: 63.54893953365829 - type: dot_precision value: 63.45698500394633 - type: dot_recall value: 63.641160949868066 - type: euclidean_accuracy value: 85.07480479227513 - type: euclidean_ap value: 71.14592761009864 - type: euclidean_f1 value: 65.43814432989691 - type: euclidean_precision value: 63.95465994962216 - type: euclidean_recall value: 66.99208443271768 - type: manhattan_accuracy value: 85.06288370984085 - type: manhattan_ap value: 71.07289742593868 - type: manhattan_f1 value: 65.37585421412301 - type: manhattan_precision value: 62.816147859922175 - type: manhattan_recall value: 68.15303430079156 - type: max_accuracy value: 85.07480479227513 - type: max_ap value: 71.14592761009864 - type: max_f1 value: 65.43814432989691 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 87.79058485659952 - type: cos_sim_ap value: 83.7183187008759 - type: cos_sim_f1 value: 75.86921142180798 - type: cos_sim_precision value: 73.00683371298405 - type: cos_sim_recall value: 78.96519864490298 - type: dot_accuracy value: 87.0085768618776 - type: dot_ap value: 81.87467488474279 - type: dot_f1 value: 74.04188363990559 - type: dot_precision value: 72.10507114191901 - type: dot_recall value: 76.08561749307053 - type: euclidean_accuracy value: 87.8332751193387 - type: euclidean_ap value: 83.83585648120315 - type: euclidean_f1 value: 76.02582177042369 - type: euclidean_precision value: 73.36388371759989 - type: euclidean_recall value: 78.88820449645827 - type: manhattan_accuracy value: 87.87208444910156 - type: manhattan_ap value: 83.8101950642973 - type: manhattan_f1 value: 75.90454195535027 - type: manhattan_precision value: 72.44419564761039 - type: manhattan_recall value: 79.71204188481676 - type: max_accuracy value: 87.87208444910156 - type: max_ap value: 83.83585648120315 - type: max_f1 value: 76.02582177042369 --- **Recommend switching to newest [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5), which has more reasonable similarity distribution and same method of usage.** <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding focus on retrieval-augmented LLMs, consisting of following projects currently: - **Fine-tuning of LM** : [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail) - **Dense Retrieval**: [LLM Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), [BGE Embedding](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/baai_general_embedding), [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) - **Reranker Model**: [BGE Reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## News - 11/23/2023: Release [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail), a method to maintain general capabilities during fine-tuning by merging multiple language models. [Technical Report](https://arxiv.org/abs/2311.13534) :fire: - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Technical Report](https://arxiv.org/pdf/2310.07554.pdf) - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [LM-Cocktail](https://huggingface.co/Shitao) | English | | fine-tuned models (Llama and BGE) which can be used to reproduce the results of LM-Cocktail | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR", "BIOSSES", "SCIFACT" ]
Snowflake/snowflake-arctic-embed-m
Snowflake
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "mteb", "arctic", "snowflake-arctic-embed", "transformers.js", "arxiv:2407.18887", "arxiv:2405.05374", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-04-11T11:07:56Z"
2024-12-13T20:51:22+00:00
418,779
153
--- license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb - arctic - snowflake-arctic-embed - transformers.js new_version: Snowflake/snowflake-arctic-embed-m-v2.0 model-index: - name: snowflake-arctic-embed-m results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 76.80597014925374 - type: ap value: 39.31198155789558 - type: f1 value: 70.48198448222148 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 82.831525 - type: ap value: 77.4474050181638 - type: f1 value: 82.77204845110204 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.93000000000001 - type: f1 value: 37.98013371053459 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 31.223 - type: map_at_10 value: 47.43 - type: map_at_100 value: 48.208 - type: map_at_1000 value: 48.211 - type: map_at_3 value: 42.579 - type: map_at_5 value: 45.263999999999996 - type: mrr_at_1 value: 31.65 - type: mrr_at_10 value: 47.573 - type: mrr_at_100 value: 48.359 - type: mrr_at_1000 value: 48.362 - type: mrr_at_3 value: 42.734 - type: mrr_at_5 value: 45.415 - type: ndcg_at_1 value: 31.223 - type: ndcg_at_10 value: 56.436 - type: ndcg_at_100 value: 59.657000000000004 - type: ndcg_at_1000 value: 59.731 - type: ndcg_at_3 value: 46.327 - type: ndcg_at_5 value: 51.178000000000004 - type: precision_at_1 value: 31.223 - type: precision_at_10 value: 8.527999999999999 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 19.061 - type: precision_at_5 value: 13.797999999999998 - type: recall_at_1 value: 31.223 - type: recall_at_10 value: 85.277 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 57.18299999999999 - type: recall_at_5 value: 68.99 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.23625429411296 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 37.433880471403654 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 60.53175025582013 - type: mrr value: 74.51160796728664 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.93746103286769 - type: cos_sim_spearman value: 86.62245567912619 - type: euclidean_pearson value: 87.154173907501 - type: euclidean_spearman value: 86.62245567912619 - type: manhattan_pearson value: 87.17682026633462 - type: manhattan_spearman value: 86.74775973908348 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 80.33766233766232 - type: f1 value: 79.64931422442245 - task: type: Clustering dataset: name: MTEB BigPatentClustering type: jinaai/big-patent-clustering config: default split: test revision: 62d5330920bca426ce9d3c76ea914f15fc83e891 metrics: - type: v_measure value: 19.116028913890613 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 36.966921852810174 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 31.98019698537654 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 34.079 - type: map_at_10 value: 46.35 - type: map_at_100 value: 47.785 - type: map_at_1000 value: 47.903 - type: map_at_3 value: 42.620999999999995 - type: map_at_5 value: 44.765 - type: mrr_at_1 value: 41.345 - type: mrr_at_10 value: 52.032000000000004 - type: mrr_at_100 value: 52.690000000000005 - type: mrr_at_1000 value: 52.727999999999994 - type: mrr_at_3 value: 49.428 - type: mrr_at_5 value: 51.093999999999994 - type: ndcg_at_1 value: 41.345 - type: ndcg_at_10 value: 53.027 - type: ndcg_at_100 value: 57.962 - type: ndcg_at_1000 value: 59.611999999999995 - type: ndcg_at_3 value: 47.687000000000005 - type: ndcg_at_5 value: 50.367 - type: precision_at_1 value: 41.345 - type: precision_at_10 value: 10.157 - type: precision_at_100 value: 1.567 - type: precision_at_1000 value: 0.199 - type: precision_at_3 value: 23.081 - type: precision_at_5 value: 16.738 - type: recall_at_1 value: 34.079 - type: recall_at_10 value: 65.93900000000001 - type: recall_at_100 value: 86.42699999999999 - type: recall_at_1000 value: 96.61 - type: recall_at_3 value: 50.56699999999999 - type: recall_at_5 value: 57.82000000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 33.289 - type: map_at_10 value: 43.681 - type: map_at_100 value: 45.056000000000004 - type: map_at_1000 value: 45.171 - type: map_at_3 value: 40.702 - type: map_at_5 value: 42.292 - type: mrr_at_1 value: 41.146 - type: mrr_at_10 value: 49.604 - type: mrr_at_100 value: 50.28399999999999 - type: mrr_at_1000 value: 50.322 - type: mrr_at_3 value: 47.611 - type: mrr_at_5 value: 48.717 - type: ndcg_at_1 value: 41.146 - type: ndcg_at_10 value: 49.43 - type: ndcg_at_100 value: 54.01899999999999 - type: ndcg_at_1000 value: 55.803000000000004 - type: ndcg_at_3 value: 45.503 - type: ndcg_at_5 value: 47.198 - type: precision_at_1 value: 41.146 - type: precision_at_10 value: 9.268 - type: precision_at_100 value: 1.4749999999999999 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 21.932 - type: precision_at_5 value: 15.389 - type: recall_at_1 value: 33.289 - type: recall_at_10 value: 59.209999999999994 - type: recall_at_100 value: 78.676 - type: recall_at_1000 value: 89.84100000000001 - type: recall_at_3 value: 47.351 - type: recall_at_5 value: 52.178999999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 44.483 - type: map_at_10 value: 56.862 - type: map_at_100 value: 57.901 - type: map_at_1000 value: 57.948 - type: map_at_3 value: 53.737 - type: map_at_5 value: 55.64 - type: mrr_at_1 value: 50.658 - type: mrr_at_10 value: 60.281 - type: mrr_at_100 value: 60.946 - type: mrr_at_1000 value: 60.967000000000006 - type: mrr_at_3 value: 58.192 - type: mrr_at_5 value: 59.531 - type: ndcg_at_1 value: 50.658 - type: ndcg_at_10 value: 62.339 - type: ndcg_at_100 value: 66.28399999999999 - type: ndcg_at_1000 value: 67.166 - type: ndcg_at_3 value: 57.458 - type: ndcg_at_5 value: 60.112 - type: precision_at_1 value: 50.658 - type: precision_at_10 value: 9.762 - type: precision_at_100 value: 1.26 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 25.329 - type: precision_at_5 value: 17.254 - type: recall_at_1 value: 44.483 - type: recall_at_10 value: 74.819 - type: recall_at_100 value: 91.702 - type: recall_at_1000 value: 97.84 - type: recall_at_3 value: 62.13999999999999 - type: recall_at_5 value: 68.569 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 26.489 - type: map_at_10 value: 37.004999999999995 - type: map_at_100 value: 38.001000000000005 - type: map_at_1000 value: 38.085 - type: map_at_3 value: 34.239999999999995 - type: map_at_5 value: 35.934 - type: mrr_at_1 value: 28.362 - type: mrr_at_10 value: 38.807 - type: mrr_at_100 value: 39.671 - type: mrr_at_1000 value: 39.736 - type: mrr_at_3 value: 36.29 - type: mrr_at_5 value: 37.906 - type: ndcg_at_1 value: 28.362 - type: ndcg_at_10 value: 42.510999999999996 - type: ndcg_at_100 value: 47.226 - type: ndcg_at_1000 value: 49.226 - type: ndcg_at_3 value: 37.295 - type: ndcg_at_5 value: 40.165 - type: precision_at_1 value: 28.362 - type: precision_at_10 value: 6.633 - type: precision_at_100 value: 0.9490000000000001 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 16.234 - type: precision_at_5 value: 11.434999999999999 - type: recall_at_1 value: 26.489 - type: recall_at_10 value: 57.457 - type: recall_at_100 value: 78.712 - type: recall_at_1000 value: 93.565 - type: recall_at_3 value: 43.748 - type: recall_at_5 value: 50.589 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 12.418999999999999 - type: map_at_10 value: 22.866 - type: map_at_100 value: 24.365000000000002 - type: map_at_1000 value: 24.479 - type: map_at_3 value: 19.965 - type: map_at_5 value: 21.684 - type: mrr_at_1 value: 14.677000000000001 - type: mrr_at_10 value: 26.316 - type: mrr_at_100 value: 27.514 - type: mrr_at_1000 value: 27.57 - type: mrr_at_3 value: 23.3 - type: mrr_at_5 value: 25.191000000000003 - type: ndcg_at_1 value: 14.677000000000001 - type: ndcg_at_10 value: 28.875 - type: ndcg_at_100 value: 35.607 - type: ndcg_at_1000 value: 38.237 - type: ndcg_at_3 value: 23.284 - type: ndcg_at_5 value: 26.226 - type: precision_at_1 value: 14.677000000000001 - type: precision_at_10 value: 5.771 - type: precision_at_100 value: 1.058 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_3 value: 11.940000000000001 - type: precision_at_5 value: 9.229 - type: recall_at_1 value: 12.418999999999999 - type: recall_at_10 value: 43.333 - type: recall_at_100 value: 71.942 - type: recall_at_1000 value: 90.67399999999999 - type: recall_at_3 value: 28.787000000000003 - type: recall_at_5 value: 35.638 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 31.686999999999998 - type: map_at_10 value: 42.331 - type: map_at_100 value: 43.655 - type: map_at_1000 value: 43.771 - type: map_at_3 value: 38.944 - type: map_at_5 value: 40.991 - type: mrr_at_1 value: 37.921 - type: mrr_at_10 value: 47.534 - type: mrr_at_100 value: 48.362 - type: mrr_at_1000 value: 48.405 - type: mrr_at_3 value: 44.995000000000005 - type: mrr_at_5 value: 46.617 - type: ndcg_at_1 value: 37.921 - type: ndcg_at_10 value: 48.236000000000004 - type: ndcg_at_100 value: 53.705000000000005 - type: ndcg_at_1000 value: 55.596000000000004 - type: ndcg_at_3 value: 43.11 - type: ndcg_at_5 value: 45.862 - type: precision_at_1 value: 37.921 - type: precision_at_10 value: 8.643 - type: precision_at_100 value: 1.336 - type: precision_at_1000 value: 0.166 - type: precision_at_3 value: 20.308 - type: precision_at_5 value: 14.514 - type: recall_at_1 value: 31.686999999999998 - type: recall_at_10 value: 60.126999999999995 - type: recall_at_100 value: 83.10600000000001 - type: recall_at_1000 value: 95.15 - type: recall_at_3 value: 46.098 - type: recall_at_5 value: 53.179 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.686 - type: map_at_10 value: 39.146 - type: map_at_100 value: 40.543 - type: map_at_1000 value: 40.644999999999996 - type: map_at_3 value: 36.195 - type: map_at_5 value: 37.919000000000004 - type: mrr_at_1 value: 35.160000000000004 - type: mrr_at_10 value: 44.711 - type: mrr_at_100 value: 45.609 - type: mrr_at_1000 value: 45.655 - type: mrr_at_3 value: 42.409 - type: mrr_at_5 value: 43.779 - type: ndcg_at_1 value: 35.160000000000004 - type: ndcg_at_10 value: 44.977000000000004 - type: ndcg_at_100 value: 50.663000000000004 - type: ndcg_at_1000 value: 52.794 - type: ndcg_at_3 value: 40.532000000000004 - type: ndcg_at_5 value: 42.641 - type: precision_at_1 value: 35.160000000000004 - type: precision_at_10 value: 8.014000000000001 - type: precision_at_100 value: 1.269 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 19.444 - type: precision_at_5 value: 13.653 - type: recall_at_1 value: 28.686 - type: recall_at_10 value: 56.801 - type: recall_at_100 value: 80.559 - type: recall_at_1000 value: 95.052 - type: recall_at_3 value: 43.675999999999995 - type: recall_at_5 value: 49.703 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.173833333333338 - type: map_at_10 value: 38.202083333333334 - type: map_at_100 value: 39.47475 - type: map_at_1000 value: 39.586499999999994 - type: map_at_3 value: 35.17308333333334 - type: map_at_5 value: 36.914 - type: mrr_at_1 value: 32.92958333333333 - type: mrr_at_10 value: 42.16758333333333 - type: mrr_at_100 value: 43.04108333333333 - type: mrr_at_1000 value: 43.092499999999994 - type: mrr_at_3 value: 39.69166666666666 - type: mrr_at_5 value: 41.19458333333333 - type: ndcg_at_1 value: 32.92958333333333 - type: ndcg_at_10 value: 43.80583333333333 - type: ndcg_at_100 value: 49.060916666666664 - type: ndcg_at_1000 value: 51.127250000000004 - type: ndcg_at_3 value: 38.80383333333333 - type: ndcg_at_5 value: 41.29658333333333 - type: precision_at_1 value: 32.92958333333333 - type: precision_at_10 value: 7.655666666666666 - type: precision_at_100 value: 1.2094166666666668 - type: precision_at_1000 value: 0.15750000000000003 - type: precision_at_3 value: 17.87975 - type: precision_at_5 value: 12.741833333333332 - type: recall_at_1 value: 28.173833333333338 - type: recall_at_10 value: 56.219249999999995 - type: recall_at_100 value: 79.01416666666665 - type: recall_at_1000 value: 93.13425000000001 - type: recall_at_3 value: 42.39241666666667 - type: recall_at_5 value: 48.764833333333335 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 25.625999999999998 - type: map_at_10 value: 32.808 - type: map_at_100 value: 33.951 - type: map_at_1000 value: 34.052 - type: map_at_3 value: 30.536 - type: map_at_5 value: 31.77 - type: mrr_at_1 value: 28.374 - type: mrr_at_10 value: 35.527 - type: mrr_at_100 value: 36.451 - type: mrr_at_1000 value: 36.522 - type: mrr_at_3 value: 33.410000000000004 - type: mrr_at_5 value: 34.537 - type: ndcg_at_1 value: 28.374 - type: ndcg_at_10 value: 37.172 - type: ndcg_at_100 value: 42.474000000000004 - type: ndcg_at_1000 value: 44.853 - type: ndcg_at_3 value: 32.931 - type: ndcg_at_5 value: 34.882999999999996 - type: precision_at_1 value: 28.374 - type: precision_at_10 value: 5.813 - type: precision_at_100 value: 0.928 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 14.008000000000001 - type: precision_at_5 value: 9.754999999999999 - type: recall_at_1 value: 25.625999999999998 - type: recall_at_10 value: 47.812 - type: recall_at_100 value: 71.61800000000001 - type: recall_at_1000 value: 88.881 - type: recall_at_3 value: 35.876999999999995 - type: recall_at_5 value: 40.839 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.233 - type: map_at_10 value: 26.375999999999998 - type: map_at_100 value: 27.575 - type: map_at_1000 value: 27.706999999999997 - type: map_at_3 value: 23.619 - type: map_at_5 value: 25.217 - type: mrr_at_1 value: 22.023 - type: mrr_at_10 value: 30.122 - type: mrr_at_100 value: 31.083 - type: mrr_at_1000 value: 31.163999999999998 - type: mrr_at_3 value: 27.541 - type: mrr_at_5 value: 29.061999999999998 - type: ndcg_at_1 value: 22.023 - type: ndcg_at_10 value: 31.476 - type: ndcg_at_100 value: 37.114000000000004 - type: ndcg_at_1000 value: 39.981 - type: ndcg_at_3 value: 26.538 - type: ndcg_at_5 value: 29.016 - type: precision_at_1 value: 22.023 - type: precision_at_10 value: 5.819 - type: precision_at_100 value: 1.018 - type: precision_at_1000 value: 0.14300000000000002 - type: precision_at_3 value: 12.583 - type: precision_at_5 value: 9.36 - type: recall_at_1 value: 18.233 - type: recall_at_10 value: 43.029 - type: recall_at_100 value: 68.253 - type: recall_at_1000 value: 88.319 - type: recall_at_3 value: 29.541 - type: recall_at_5 value: 35.783 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 28.923 - type: map_at_10 value: 39.231 - type: map_at_100 value: 40.483000000000004 - type: map_at_1000 value: 40.575 - type: map_at_3 value: 35.94 - type: map_at_5 value: 37.683 - type: mrr_at_1 value: 33.955 - type: mrr_at_10 value: 43.163000000000004 - type: mrr_at_100 value: 44.054 - type: mrr_at_1000 value: 44.099 - type: mrr_at_3 value: 40.361000000000004 - type: mrr_at_5 value: 41.905 - type: ndcg_at_1 value: 33.955 - type: ndcg_at_10 value: 45.068000000000005 - type: ndcg_at_100 value: 50.470000000000006 - type: ndcg_at_1000 value: 52.349000000000004 - type: ndcg_at_3 value: 39.298 - type: ndcg_at_5 value: 41.821999999999996 - type: precision_at_1 value: 33.955 - type: precision_at_10 value: 7.649 - type: precision_at_100 value: 1.173 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_3 value: 17.817 - type: precision_at_5 value: 12.537 - type: recall_at_1 value: 28.923 - type: recall_at_10 value: 58.934 - type: recall_at_100 value: 81.809 - type: recall_at_1000 value: 94.71300000000001 - type: recall_at_3 value: 42.975 - type: recall_at_5 value: 49.501 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 28.596 - type: map_at_10 value: 38.735 - type: map_at_100 value: 40.264 - type: map_at_1000 value: 40.48 - type: map_at_3 value: 35.394999999999996 - type: map_at_5 value: 37.099 - type: mrr_at_1 value: 33.992 - type: mrr_at_10 value: 43.076 - type: mrr_at_100 value: 44.005 - type: mrr_at_1000 value: 44.043 - type: mrr_at_3 value: 40.415 - type: mrr_at_5 value: 41.957 - type: ndcg_at_1 value: 33.992 - type: ndcg_at_10 value: 44.896 - type: ndcg_at_100 value: 50.44499999999999 - type: ndcg_at_1000 value: 52.675000000000004 - type: ndcg_at_3 value: 39.783 - type: ndcg_at_5 value: 41.997 - type: precision_at_1 value: 33.992 - type: precision_at_10 value: 8.498 - type: precision_at_100 value: 1.585 - type: precision_at_1000 value: 0.248 - type: precision_at_3 value: 18.511 - type: precision_at_5 value: 13.241 - type: recall_at_1 value: 28.596 - type: recall_at_10 value: 56.885 - type: recall_at_100 value: 82.306 - type: recall_at_1000 value: 95.813 - type: recall_at_3 value: 42.168 - type: recall_at_5 value: 48.32 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 25.576 - type: map_at_10 value: 33.034 - type: map_at_100 value: 34.117999999999995 - type: map_at_1000 value: 34.222 - type: map_at_3 value: 30.183 - type: map_at_5 value: 31.974000000000004 - type: mrr_at_1 value: 27.542 - type: mrr_at_10 value: 34.838 - type: mrr_at_100 value: 35.824 - type: mrr_at_1000 value: 35.899 - type: mrr_at_3 value: 32.348 - type: mrr_at_5 value: 34.039 - type: ndcg_at_1 value: 27.542 - type: ndcg_at_10 value: 37.663000000000004 - type: ndcg_at_100 value: 42.762 - type: ndcg_at_1000 value: 45.235 - type: ndcg_at_3 value: 32.227 - type: ndcg_at_5 value: 35.27 - type: precision_at_1 value: 27.542 - type: precision_at_10 value: 5.840999999999999 - type: precision_at_100 value: 0.895 - type: precision_at_1000 value: 0.123 - type: precision_at_3 value: 13.370000000000001 - type: precision_at_5 value: 9.797 - type: recall_at_1 value: 25.576 - type: recall_at_10 value: 50.285000000000004 - type: recall_at_100 value: 73.06 - type: recall_at_1000 value: 91.15299999999999 - type: recall_at_3 value: 35.781 - type: recall_at_5 value: 43.058 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 17.061 - type: map_at_10 value: 29.464000000000002 - type: map_at_100 value: 31.552999999999997 - type: map_at_1000 value: 31.707 - type: map_at_3 value: 24.834999999999997 - type: map_at_5 value: 27.355 - type: mrr_at_1 value: 38.958 - type: mrr_at_10 value: 51.578 - type: mrr_at_100 value: 52.262 - type: mrr_at_1000 value: 52.283 - type: mrr_at_3 value: 48.599 - type: mrr_at_5 value: 50.404 - type: ndcg_at_1 value: 38.958 - type: ndcg_at_10 value: 39.367999999999995 - type: ndcg_at_100 value: 46.521 - type: ndcg_at_1000 value: 49.086999999999996 - type: ndcg_at_3 value: 33.442 - type: ndcg_at_5 value: 35.515 - type: precision_at_1 value: 38.958 - type: precision_at_10 value: 12.110999999999999 - type: precision_at_100 value: 1.982 - type: precision_at_1000 value: 0.247 - type: precision_at_3 value: 25.102999999999998 - type: precision_at_5 value: 18.971 - type: recall_at_1 value: 17.061 - type: recall_at_10 value: 45.198 - type: recall_at_100 value: 69.18900000000001 - type: recall_at_1000 value: 83.38499999999999 - type: recall_at_3 value: 30.241 - type: recall_at_5 value: 36.851 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.398 - type: map_at_10 value: 21.421 - type: map_at_100 value: 31.649 - type: map_at_1000 value: 33.469 - type: map_at_3 value: 15.310000000000002 - type: map_at_5 value: 17.946 - type: mrr_at_1 value: 71 - type: mrr_at_10 value: 78.92099999999999 - type: mrr_at_100 value: 79.225 - type: mrr_at_1000 value: 79.23 - type: mrr_at_3 value: 77.792 - type: mrr_at_5 value: 78.467 - type: ndcg_at_1 value: 57.99999999999999 - type: ndcg_at_10 value: 44.733000000000004 - type: ndcg_at_100 value: 50.646 - type: ndcg_at_1000 value: 57.903999999999996 - type: ndcg_at_3 value: 49.175999999999995 - type: ndcg_at_5 value: 46.800999999999995 - type: precision_at_1 value: 71 - type: precision_at_10 value: 36.25 - type: precision_at_100 value: 12.135 - type: precision_at_1000 value: 2.26 - type: precision_at_3 value: 52.75 - type: precision_at_5 value: 45.65 - type: recall_at_1 value: 9.398 - type: recall_at_10 value: 26.596999999999998 - type: recall_at_100 value: 57.943 - type: recall_at_1000 value: 81.147 - type: recall_at_3 value: 16.634 - type: recall_at_5 value: 20.7 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 46.535000000000004 - type: f1 value: 42.53702746452163 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 77.235 - type: map_at_10 value: 85.504 - type: map_at_100 value: 85.707 - type: map_at_1000 value: 85.718 - type: map_at_3 value: 84.425 - type: map_at_5 value: 85.13 - type: mrr_at_1 value: 83.363 - type: mrr_at_10 value: 89.916 - type: mrr_at_100 value: 89.955 - type: mrr_at_1000 value: 89.956 - type: mrr_at_3 value: 89.32600000000001 - type: mrr_at_5 value: 89.79 - type: ndcg_at_1 value: 83.363 - type: ndcg_at_10 value: 89.015 - type: ndcg_at_100 value: 89.649 - type: ndcg_at_1000 value: 89.825 - type: ndcg_at_3 value: 87.45100000000001 - type: ndcg_at_5 value: 88.39399999999999 - type: precision_at_1 value: 83.363 - type: precision_at_10 value: 10.659 - type: precision_at_100 value: 1.122 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 33.338 - type: precision_at_5 value: 20.671999999999997 - type: recall_at_1 value: 77.235 - type: recall_at_10 value: 95.389 - type: recall_at_100 value: 97.722 - type: recall_at_1000 value: 98.744 - type: recall_at_3 value: 91.19800000000001 - type: recall_at_5 value: 93.635 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 20.835 - type: map_at_10 value: 34.459 - type: map_at_100 value: 36.335 - type: map_at_1000 value: 36.518 - type: map_at_3 value: 30.581000000000003 - type: map_at_5 value: 32.859 - type: mrr_at_1 value: 40.894999999999996 - type: mrr_at_10 value: 50.491 - type: mrr_at_100 value: 51.243 - type: mrr_at_1000 value: 51.286 - type: mrr_at_3 value: 47.994 - type: mrr_at_5 value: 49.429 - type: ndcg_at_1 value: 40.894999999999996 - type: ndcg_at_10 value: 42.403 - type: ndcg_at_100 value: 48.954 - type: ndcg_at_1000 value: 51.961 - type: ndcg_at_3 value: 39.11 - type: ndcg_at_5 value: 40.152 - type: precision_at_1 value: 40.894999999999996 - type: precision_at_10 value: 11.466 - type: precision_at_100 value: 1.833 - type: precision_at_1000 value: 0.23700000000000002 - type: precision_at_3 value: 25.874000000000002 - type: precision_at_5 value: 19.012 - type: recall_at_1 value: 20.835 - type: recall_at_10 value: 49.535000000000004 - type: recall_at_100 value: 73.39099999999999 - type: recall_at_1000 value: 91.01599999999999 - type: recall_at_3 value: 36.379 - type: recall_at_5 value: 42.059999999999995 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 40.945 - type: map_at_10 value: 65.376 - type: map_at_100 value: 66.278 - type: map_at_1000 value: 66.33 - type: map_at_3 value: 61.753 - type: map_at_5 value: 64.077 - type: mrr_at_1 value: 81.891 - type: mrr_at_10 value: 87.256 - type: mrr_at_100 value: 87.392 - type: mrr_at_1000 value: 87.395 - type: mrr_at_3 value: 86.442 - type: mrr_at_5 value: 86.991 - type: ndcg_at_1 value: 81.891 - type: ndcg_at_10 value: 73.654 - type: ndcg_at_100 value: 76.62299999999999 - type: ndcg_at_1000 value: 77.60000000000001 - type: ndcg_at_3 value: 68.71199999999999 - type: ndcg_at_5 value: 71.563 - type: precision_at_1 value: 81.891 - type: precision_at_10 value: 15.409 - type: precision_at_100 value: 1.77 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 44.15 - type: precision_at_5 value: 28.732000000000003 - type: recall_at_1 value: 40.945 - type: recall_at_10 value: 77.04299999999999 - type: recall_at_100 value: 88.508 - type: recall_at_1000 value: 94.943 - type: recall_at_3 value: 66.226 - type: recall_at_5 value: 71.83 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 74.08200000000001 - type: ap value: 68.10929101713998 - type: f1 value: 73.98447117652009 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 21.729000000000003 - type: map_at_10 value: 34.602 - type: map_at_100 value: 35.756 - type: map_at_1000 value: 35.803000000000004 - type: map_at_3 value: 30.619000000000003 - type: map_at_5 value: 32.914 - type: mrr_at_1 value: 22.364 - type: mrr_at_10 value: 35.183 - type: mrr_at_100 value: 36.287000000000006 - type: mrr_at_1000 value: 36.327999999999996 - type: mrr_at_3 value: 31.258000000000003 - type: mrr_at_5 value: 33.542 - type: ndcg_at_1 value: 22.364 - type: ndcg_at_10 value: 41.765 - type: ndcg_at_100 value: 47.293 - type: ndcg_at_1000 value: 48.457 - type: ndcg_at_3 value: 33.676 - type: ndcg_at_5 value: 37.783 - type: precision_at_1 value: 22.364 - type: precision_at_10 value: 6.662 - type: precision_at_100 value: 0.943 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.435999999999998 - type: precision_at_5 value: 10.764999999999999 - type: recall_at_1 value: 21.729000000000003 - type: recall_at_10 value: 63.815999999999995 - type: recall_at_100 value: 89.265 - type: recall_at_1000 value: 98.149 - type: recall_at_3 value: 41.898 - type: recall_at_5 value: 51.76500000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.73141814865483 - type: f1 value: 92.17518476408004 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 65.18011855905152 - type: f1 value: 46.70999638311856 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (eng) type: masakhane/masakhanews config: eng split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 75.24261603375525 - type: f1 value: 74.07895183913367 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (eng) type: masakhane/masakhanews config: eng split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 28.43855875387446 - type: v_measure value: 29.05331990256969 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.92333557498318 - type: f1 value: 64.29789389602692 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.74714189643578 - type: f1 value: 71.672585608315 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.503564225501613 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.410225127136457 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 29.170019896091908 - type: mrr value: 29.881276831500976 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 6.544 - type: map_at_10 value: 14.116999999999999 - type: map_at_100 value: 17.522 - type: map_at_1000 value: 19 - type: map_at_3 value: 10.369 - type: map_at_5 value: 12.189 - type: mrr_at_1 value: 47.988 - type: mrr_at_10 value: 56.84 - type: mrr_at_100 value: 57.367000000000004 - type: mrr_at_1000 value: 57.403000000000006 - type: mrr_at_3 value: 54.592 - type: mrr_at_5 value: 56.233 - type: ndcg_at_1 value: 45.82 - type: ndcg_at_10 value: 36.767 - type: ndcg_at_100 value: 33.356 - type: ndcg_at_1000 value: 42.062 - type: ndcg_at_3 value: 42.15 - type: ndcg_at_5 value: 40.355000000000004 - type: precision_at_1 value: 47.988 - type: precision_at_10 value: 27.121000000000002 - type: precision_at_100 value: 8.455 - type: precision_at_1000 value: 2.103 - type: precision_at_3 value: 39.628 - type: precision_at_5 value: 35.356 - type: recall_at_1 value: 6.544 - type: recall_at_10 value: 17.928 - type: recall_at_100 value: 32.843 - type: recall_at_1000 value: 65.752 - type: recall_at_3 value: 11.297 - type: recall_at_5 value: 14.357000000000001 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 39.262 - type: map_at_10 value: 55.095000000000006 - type: map_at_100 value: 55.93900000000001 - type: map_at_1000 value: 55.955999999999996 - type: map_at_3 value: 50.93 - type: map_at_5 value: 53.491 - type: mrr_at_1 value: 43.598 - type: mrr_at_10 value: 57.379999999999995 - type: mrr_at_100 value: 57.940999999999995 - type: mrr_at_1000 value: 57.952000000000005 - type: mrr_at_3 value: 53.998000000000005 - type: mrr_at_5 value: 56.128 - type: ndcg_at_1 value: 43.598 - type: ndcg_at_10 value: 62.427 - type: ndcg_at_100 value: 65.759 - type: ndcg_at_1000 value: 66.133 - type: ndcg_at_3 value: 54.745999999999995 - type: ndcg_at_5 value: 58.975 - type: precision_at_1 value: 43.598 - type: precision_at_10 value: 9.789 - type: precision_at_100 value: 1.171 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 24.295 - type: precision_at_5 value: 17.028 - type: recall_at_1 value: 39.262 - type: recall_at_10 value: 82.317 - type: recall_at_100 value: 96.391 - type: recall_at_1000 value: 99.116 - type: recall_at_3 value: 62.621 - type: recall_at_5 value: 72.357 - task: type: Classification dataset: name: MTEB NewsClassification type: ag_news config: default split: test revision: eb185aade064a813bc0b7f42de02595523103ca4 metrics: - type: accuracy value: 78.17500000000001 - type: f1 value: 78.01940892857273 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (en) type: GEM/opusparcus config: en split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_accuracy value: 99.89816700610999 - type: cos_sim_ap value: 100 - type: cos_sim_f1 value: 99.9490575649516 - type: cos_sim_precision value: 100 - type: cos_sim_recall value: 99.89816700610999 - type: dot_accuracy value: 99.89816700610999 - type: dot_ap value: 100 - type: dot_f1 value: 99.9490575649516 - type: dot_precision value: 100 - type: dot_recall value: 99.89816700610999 - type: euclidean_accuracy value: 99.89816700610999 - type: euclidean_ap value: 100 - type: euclidean_f1 value: 99.9490575649516 - type: euclidean_precision value: 100 - type: euclidean_recall value: 99.89816700610999 - type: manhattan_accuracy value: 99.89816700610999 - type: manhattan_ap value: 100 - type: manhattan_f1 value: 99.9490575649516 - type: manhattan_precision value: 100 - type: manhattan_recall value: 99.89816700610999 - type: max_accuracy value: 99.89816700610999 - type: max_ap value: 100 - type: max_f1 value: 99.9490575649516 - task: type: PairClassification dataset: name: MTEB PawsX (en) type: paws-x config: en split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_accuracy value: 61 - type: cos_sim_ap value: 59.630757252602464 - type: cos_sim_f1 value: 62.37521514629949 - type: cos_sim_precision value: 45.34534534534534 - type: cos_sim_recall value: 99.88974641675854 - type: dot_accuracy value: 61 - type: dot_ap value: 59.631527308059006 - type: dot_f1 value: 62.37521514629949 - type: dot_precision value: 45.34534534534534 - type: dot_recall value: 99.88974641675854 - type: euclidean_accuracy value: 61 - type: euclidean_ap value: 59.630757252602464 - type: euclidean_f1 value: 62.37521514629949 - type: euclidean_precision value: 45.34534534534534 - type: euclidean_recall value: 99.88974641675854 - type: manhattan_accuracy value: 60.9 - type: manhattan_ap value: 59.613947780462254 - type: manhattan_f1 value: 62.37521514629949 - type: manhattan_precision value: 45.34534534534534 - type: manhattan_recall value: 99.88974641675854 - type: max_accuracy value: 61 - type: max_ap value: 59.631527308059006 - type: max_f1 value: 62.37521514629949 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 69.963 - type: map_at_10 value: 83.59400000000001 - type: map_at_100 value: 84.236 - type: map_at_1000 value: 84.255 - type: map_at_3 value: 80.69800000000001 - type: map_at_5 value: 82.568 - type: mrr_at_1 value: 80.58999999999999 - type: mrr_at_10 value: 86.78200000000001 - type: mrr_at_100 value: 86.89099999999999 - type: mrr_at_1000 value: 86.893 - type: mrr_at_3 value: 85.757 - type: mrr_at_5 value: 86.507 - type: ndcg_at_1 value: 80.60000000000001 - type: ndcg_at_10 value: 87.41799999999999 - type: ndcg_at_100 value: 88.723 - type: ndcg_at_1000 value: 88.875 - type: ndcg_at_3 value: 84.565 - type: ndcg_at_5 value: 86.236 - type: precision_at_1 value: 80.60000000000001 - type: precision_at_10 value: 13.239 - type: precision_at_100 value: 1.5150000000000001 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.947 - type: precision_at_5 value: 24.354 - type: recall_at_1 value: 69.963 - type: recall_at_10 value: 94.553 - type: recall_at_100 value: 99.104 - type: recall_at_1000 value: 99.872 - type: recall_at_3 value: 86.317 - type: recall_at_5 value: 91.023 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 47.52890410998761 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 62.760692287940486 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 5.093 - type: map_at_10 value: 12.695 - type: map_at_100 value: 14.824000000000002 - type: map_at_1000 value: 15.123000000000001 - type: map_at_3 value: 8.968 - type: map_at_5 value: 10.828 - type: mrr_at_1 value: 25.1 - type: mrr_at_10 value: 35.894999999999996 - type: mrr_at_100 value: 36.966 - type: mrr_at_1000 value: 37.019999999999996 - type: mrr_at_3 value: 32.467 - type: mrr_at_5 value: 34.416999999999994 - type: ndcg_at_1 value: 25.1 - type: ndcg_at_10 value: 21.096999999999998 - type: ndcg_at_100 value: 29.202 - type: ndcg_at_1000 value: 34.541 - type: ndcg_at_3 value: 19.875 - type: ndcg_at_5 value: 17.497 - type: precision_at_1 value: 25.1 - type: precision_at_10 value: 10.9 - type: precision_at_100 value: 2.255 - type: precision_at_1000 value: 0.35400000000000004 - type: precision_at_3 value: 18.367 - type: precision_at_5 value: 15.299999999999999 - type: recall_at_1 value: 5.093 - type: recall_at_10 value: 22.092 - type: recall_at_100 value: 45.778 - type: recall_at_1000 value: 71.985 - type: recall_at_3 value: 11.167 - type: recall_at_5 value: 15.501999999999999 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cos_sim_pearson value: 74.04386981759481 - type: cos_sim_spearman value: 69.12484963763646 - type: euclidean_pearson value: 71.49384353291062 - type: euclidean_spearman value: 69.12484548317074 - type: manhattan_pearson value: 71.49828173987272 - type: manhattan_spearman value: 69.08350274367014 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 66.95372527615659 - type: cos_sim_spearman value: 66.96821894433991 - type: euclidean_pearson value: 64.675348002074 - type: euclidean_spearman value: 66.96821894433991 - type: manhattan_pearson value: 64.5965887073831 - type: manhattan_spearman value: 66.88569076794741 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 77.34698437961983 - type: cos_sim_spearman value: 79.1153001117325 - type: euclidean_pearson value: 78.53562874696966 - type: euclidean_spearman value: 79.11530018205724 - type: manhattan_pearson value: 78.46484988944093 - type: manhattan_spearman value: 79.01416027493104 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 68.81220371935373 - type: cos_sim_spearman value: 68.50538405089604 - type: euclidean_pearson value: 68.69204272683749 - type: euclidean_spearman value: 68.50534223912419 - type: manhattan_pearson value: 68.67300120149523 - type: manhattan_spearman value: 68.45404301623115 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 78.2464678879813 - type: cos_sim_spearman value: 79.92003940566667 - type: euclidean_pearson value: 79.8080778793964 - type: euclidean_spearman value: 79.92003940566667 - type: manhattan_pearson value: 79.80153621444681 - type: manhattan_spearman value: 79.91293261418134 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 76.31179207708662 - type: cos_sim_spearman value: 78.65597349856115 - type: euclidean_pearson value: 78.76937027472678 - type: euclidean_spearman value: 78.65597349856115 - type: manhattan_pearson value: 78.77129513300605 - type: manhattan_spearman value: 78.62640467680775 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 79.43158429552561 - type: cos_sim_spearman value: 81.46108646565362 - type: euclidean_pearson value: 81.47071791452292 - type: euclidean_spearman value: 81.46108646565362 - type: manhattan_pearson value: 81.56920643846031 - type: manhattan_spearman value: 81.42226241399516 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 66.89546474141514 - type: cos_sim_spearman value: 65.8393752170531 - type: euclidean_pearson value: 67.2580522762307 - type: euclidean_spearman value: 65.8393752170531 - type: manhattan_pearson value: 67.45157729300522 - type: manhattan_spearman value: 66.19470854403802 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 71.39566306334434 - type: cos_sim_spearman value: 74.0981396086974 - type: euclidean_pearson value: 73.7834496259745 - type: euclidean_spearman value: 74.09803741302046 - type: manhattan_pearson value: 73.79958138780945 - type: manhattan_spearman value: 74.09894837555905 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (en) type: PhilipMay/stsb_multi_mt config: en split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_pearson value: 71.39566311006806 - type: cos_sim_spearman value: 74.0981396086974 - type: euclidean_pearson value: 73.78344970897099 - type: euclidean_spearman value: 74.09803741302046 - type: manhattan_pearson value: 73.79958147136705 - type: manhattan_spearman value: 74.09894837555905 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 80.81059564334683 - type: mrr value: 94.62696617108381 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 57.760999999999996 - type: map_at_10 value: 68.614 - type: map_at_100 value: 69.109 - type: map_at_1000 value: 69.134 - type: map_at_3 value: 65.735 - type: map_at_5 value: 67.42099999999999 - type: mrr_at_1 value: 60.667 - type: mrr_at_10 value: 69.94200000000001 - type: mrr_at_100 value: 70.254 - type: mrr_at_1000 value: 70.28 - type: mrr_at_3 value: 67.72200000000001 - type: mrr_at_5 value: 69.18900000000001 - type: ndcg_at_1 value: 60.667 - type: ndcg_at_10 value: 73.548 - type: ndcg_at_100 value: 75.381 - type: ndcg_at_1000 value: 75.991 - type: ndcg_at_3 value: 68.685 - type: ndcg_at_5 value: 71.26 - type: precision_at_1 value: 60.667 - type: precision_at_10 value: 9.833 - type: precision_at_100 value: 1.08 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.889000000000003 - type: precision_at_5 value: 17.8 - type: recall_at_1 value: 57.760999999999996 - type: recall_at_10 value: 87.13300000000001 - type: recall_at_100 value: 95 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 74.211 - type: recall_at_5 value: 80.63900000000001 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.81881188118813 - type: cos_sim_ap value: 95.21196473745837 - type: cos_sim_f1 value: 90.69767441860465 - type: cos_sim_precision value: 91.71779141104295 - type: cos_sim_recall value: 89.7 - type: dot_accuracy value: 99.81881188118813 - type: dot_ap value: 95.21196473745837 - type: dot_f1 value: 90.69767441860465 - type: dot_precision value: 91.71779141104295 - type: dot_recall value: 89.7 - type: euclidean_accuracy value: 99.81881188118813 - type: euclidean_ap value: 95.21196473745839 - type: euclidean_f1 value: 90.69767441860465 - type: euclidean_precision value: 91.71779141104295 - type: euclidean_recall value: 89.7 - type: manhattan_accuracy value: 99.81287128712871 - type: manhattan_ap value: 95.16667174835017 - type: manhattan_f1 value: 90.41095890410959 - type: manhattan_precision value: 91.7610710607621 - type: manhattan_recall value: 89.1 - type: max_accuracy value: 99.81881188118813 - type: max_ap value: 95.21196473745839 - type: max_f1 value: 90.69767441860465 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 59.54942204515638 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 39.42892282672948 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.189033075914324 - type: mrr value: 51.97014790764791 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.09466569775977 - type: cos_sim_spearman value: 30.31058660775912 - type: dot_pearson value: 30.09466438861689 - type: dot_spearman value: 30.31058660775912 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.253 - type: map_at_10 value: 2.07 - type: map_at_100 value: 12.679000000000002 - type: map_at_1000 value: 30.412 - type: map_at_3 value: 0.688 - type: map_at_5 value: 1.079 - type: mrr_at_1 value: 96 - type: mrr_at_10 value: 98 - type: mrr_at_100 value: 98 - type: mrr_at_1000 value: 98 - type: mrr_at_3 value: 98 - type: mrr_at_5 value: 98 - type: ndcg_at_1 value: 89 - type: ndcg_at_10 value: 79.646 - type: ndcg_at_100 value: 62.217999999999996 - type: ndcg_at_1000 value: 55.13400000000001 - type: ndcg_at_3 value: 83.458 - type: ndcg_at_5 value: 80.982 - type: precision_at_1 value: 96 - type: precision_at_10 value: 84.6 - type: precision_at_100 value: 64.34 - type: precision_at_1000 value: 24.534 - type: precision_at_3 value: 88.667 - type: precision_at_5 value: 85.6 - type: recall_at_1 value: 0.253 - type: recall_at_10 value: 2.253 - type: recall_at_100 value: 15.606 - type: recall_at_1000 value: 51.595 - type: recall_at_3 value: 0.7100000000000001 - type: recall_at_5 value: 1.139 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.0540000000000003 - type: map_at_10 value: 13.078999999999999 - type: map_at_100 value: 19.468 - type: map_at_1000 value: 21.006 - type: map_at_3 value: 6.8629999999999995 - type: map_at_5 value: 9.187 - type: mrr_at_1 value: 42.857 - type: mrr_at_10 value: 56.735 - type: mrr_at_100 value: 57.352000000000004 - type: mrr_at_1000 value: 57.352000000000004 - type: mrr_at_3 value: 52.721 - type: mrr_at_5 value: 54.66 - type: ndcg_at_1 value: 38.775999999999996 - type: ndcg_at_10 value: 31.469 - type: ndcg_at_100 value: 42.016999999999996 - type: ndcg_at_1000 value: 52.60399999999999 - type: ndcg_at_3 value: 35.894 - type: ndcg_at_5 value: 33.873 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 27.346999999999998 - type: precision_at_100 value: 8.327 - type: precision_at_1000 value: 1.551 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.469 - type: recall_at_1 value: 3.0540000000000003 - type: recall_at_10 value: 19.185 - type: recall_at_100 value: 51.056000000000004 - type: recall_at_1000 value: 82.814 - type: recall_at_3 value: 7.961 - type: recall_at_5 value: 11.829 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 64.9346 - type: ap value: 12.121605736777527 - type: f1 value: 50.169902005887955 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 56.72608941709111 - type: f1 value: 57.0702928875253 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 37.72671554400943 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 82.84556237706384 - type: cos_sim_ap value: 63.28364215788651 - type: cos_sim_f1 value: 60.00000000000001 - type: cos_sim_precision value: 54.45161290322581 - type: cos_sim_recall value: 66.80738786279683 - type: dot_accuracy value: 82.84556237706384 - type: dot_ap value: 63.28364302860433 - type: dot_f1 value: 60.00000000000001 - type: dot_precision value: 54.45161290322581 - type: dot_recall value: 66.80738786279683 - type: euclidean_accuracy value: 82.84556237706384 - type: euclidean_ap value: 63.28363625097978 - type: euclidean_f1 value: 60.00000000000001 - type: euclidean_precision value: 54.45161290322581 - type: euclidean_recall value: 66.80738786279683 - type: manhattan_accuracy value: 82.86940454193241 - type: manhattan_ap value: 63.244773709836764 - type: manhattan_f1 value: 60.12680942696495 - type: manhattan_precision value: 55.00109433136353 - type: manhattan_recall value: 66.3060686015831 - type: max_accuracy value: 82.86940454193241 - type: max_ap value: 63.28364302860433 - type: max_f1 value: 60.12680942696495 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.32033220786278 - type: cos_sim_ap value: 84.71928176006863 - type: cos_sim_f1 value: 76.51483333969684 - type: cos_sim_precision value: 75.89184276300841 - type: cos_sim_recall value: 77.14813674160764 - type: dot_accuracy value: 88.32033220786278 - type: dot_ap value: 84.71928330149228 - type: dot_f1 value: 76.51483333969684 - type: dot_precision value: 75.89184276300841 - type: dot_recall value: 77.14813674160764 - type: euclidean_accuracy value: 88.32033220786278 - type: euclidean_ap value: 84.71928045384345 - type: euclidean_f1 value: 76.51483333969684 - type: euclidean_precision value: 75.89184276300841 - type: euclidean_recall value: 77.14813674160764 - type: manhattan_accuracy value: 88.27570147863545 - type: manhattan_ap value: 84.68523541579755 - type: manhattan_f1 value: 76.51512269355146 - type: manhattan_precision value: 75.62608107091825 - type: manhattan_recall value: 77.42531567600862 - type: max_accuracy value: 88.32033220786278 - type: max_ap value: 84.71928330149228 - type: max_f1 value: 76.51512269355146 - task: type: Clustering dataset: name: MTEB WikiCitiesClustering type: jinaai/cities_wiki_clustering config: default split: test revision: ddc9ee9242fa65332597f70e967ecc38b9d734fa metrics: - type: v_measure value: 85.30624598674467 --- <h1 align="center">Snowflake's Arctic-embed-m</h1> <h4 align="center"> <p> <a href=#news>News</a> | <a href=#models>Models</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#contact">Contact</a> | <a href="#faq">FAQ</a> <a href="#license">License</a> | <a href="#acknowledgement">Acknowledgement</a> <p> </h4> ## News 12/04/2024: Release of [snowflake-arctic-embed-l-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-l-v2.0) and [snowflake-arctic-embed-m-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-v2.0) our newest models with multilingual workloads in mind. These models outperform prior versions of Arctic Embed and we suggest these replace prior versions! 07/26/2024: Release preprint [[2407.18887] Embedding And Clustering Your Data Can Improve Contrastive Pretraining](https://arxiv.org/abs/2407.18887) on arXiv. 07/18/2024: Release of `snowflake-arctic-embed-m-v1.5`, capable of producing highly compressible embedding vectors that preserve quality even when squished as small as 128 bytes per vector. Details about the development of this model are available in the [launch post on the Snowflake engineering blog](https://www.snowflake.com/engineering-blog/arctic-embed-m-v1-5-enterprise-retrieval/). 05/10/2024: Release the [technical report on Arctic Embed](https://arxiv.org/abs/2405.05374) 04/16/2024: Release the ** snowflake-arctic-embed ** family of text embedding models. The releases are state-of-the-art for Retrieval quality at each of their representative size profiles. [Technical Report]() is coming shortly. For more details, please refer to our Github: [Arctic-Text-Embed](https://github.com/Snowflake-Labs/arctic-embed). ## Models snowflake-arctic-embed is a suite of text embedding models that focuses on creating high-quality retrieval models optimized for performance. The `snowflake-arctic-embedding` models achieve **state-of-the-art performance on the MTEB/BEIR leaderboard** for each of their size variants. Evaluation is performed using these [scripts](https://github.com/Snowflake-Labs/snowflake-arctic-embed/tree/main/src). As shown below, each class of model size achieves SOTA retrieval accuracy compared to other top models. The models are trained by leveraging existing open-source text representation models, such as bert-base-uncased, and are trained in a multi-stage pipeline to optimize their retrieval performance. First, the models are trained with large batches of query-document pairs where negatives are derived in-batch—pretraining leverages about 400m samples of a mix of public datasets and proprietary web search data. Following pretraining models are further optimized with long training on a smaller dataset (about 1m samples) of triplets of query, positive document, and negative document derived from hard harmful mining. Mining of the negatives and data curation is crucial to retrieval accuracy. A detailed technical report can be found [here](https://arxiv.org/abs/2405.05374). | Name | MTEB Retrieval Score (NDCG @ 10) | Parameters (Millions) | Embedding Dimension | | ----------------------------------------------------------------------- | -------------------------------- | --------------------- | ------------------- | | [snowflake-arctic-embed-xs](https://huggingface.co/Snowflake/snowflake-arctic-embed-xs/) | 50.15 | 22 | 384 | | [snowflake-arctic-embed-s](https://huggingface.co/Snowflake/snowflake-arctic-embed-s/) | 51.98 | 33 | 384 | | [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) | 54.90 | 110 | 768 | | [snowflake-arctic-embed-m-long](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-long/) | 54.83 | 137 | 768 | | [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) | 55.98 | 335 | 1024 | Aside from being great open-source models, the largest model, [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/), can serve as a natural replacement for closed-source embedding, as shown below. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) | 55.98 | | Google-gecko-text-embedding | 55.7 | | text-embedding-3-large | 55.44 | | Cohere-embed-english-v3.0 | 55.00 | | bge-large-en-v1.5 | 54.29 | ### [snowflake-arctic-embed-xs](https://huggingface.co/Snowflake/snowflake-arctic-embed-xs) This tiny model packs quite the punch. Based on the [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) model with only 22m parameters and 384 dimensions, this model should meet even the strictest latency/TCO budgets. Despite its size, its retrieval accuracy is closer to that of models with 100m paramers. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------- | -------------------------------- | | [snowflake-arctic-embed-xs](https://huggingface.co/Snowflake/snowflake-arctic-embed-xs/) | 50.15 | | GIST-all-MiniLM-L6-v2 | 45.12 | | gte-tiny | 44.92 | | all-MiniLM-L6-v2 | 41.95 | | bge-micro-v2 | 42.56 | ### [snowflake-arctic-embed-s](https://huggingface.co/Snowflake/snowflake-arctic-embed-s) Based on the [intfloat/e5-small-unsupervised](https://huggingface.co/intfloat/e5-small-unsupervised) model, this small model does not trade off retrieval accuracy for its small size. With only 33m parameters and 384 dimensions, this model should easily allow scaling to large datasets. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-s](https://huggingface.co/Snowflake/snowflake-arctic-embed-s/) | 51.98 | | bge-small-en-v1.5 | 51.68 | | Cohere-embed-english-light-v3.0 | 51.34 | | text-embedding-3-small | 51.08 | | e5-small-v2 | 49.04 | ### [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) Based on the [intfloat/e5-base-unsupervised](https://huggingface.co/intfloat/e5-base-unsupervised) model, this medium model is the workhorse that provides the best retrieval performance without slowing down inference. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) | 54.90 | | bge-base-en-v1.5 | 53.25 | | nomic-embed-text-v1.5 | 53.25 | | GIST-Embedding-v0 | 52.31 | | gte-base | 52.31 | ### [snowflake-arctic-embed-m-long](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-long/) Based on the [nomic-ai/nomic-embed-text-v1-unsupervised](https://huggingface.co/nomic-ai/nomic-embed-text-v1-unsupervised) model, this long-context variant of our medium-sized model is perfect for workloads that can be constrained by the regular 512 token context of our other models. Without the use of RPE, this model supports up to 2048 tokens. With RPE, it can scale to 8192! | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-m-long](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-long/) | 54.83 | | nomic-embed-text-v1.5 | 53.01 | | nomic-embed-text-v1 | 52.81 | ### [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) Based on the [intfloat/e5-large-unsupervised](https://huggingface.co/intfloat/e5-large-unsupervised) model, this large model is a direct drop-in for closed APIs and delivers the most accurate retrieval experience. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) | 55.98 | | UAE-Large-V1 | 54.66 | | bge-large-en-v1.5 | 54.29 | | mxbai-embed-large-v1 | 54.39 | | e5-Large-v2 | 50.56 | ## Usage ### Using Sentence Transformers You can use the sentence-transformers package to use an snowflake-arctic-embed model, as shown below. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("Snowflake/snowflake-arctic-embed-m") queries = ['what is snowflake?', 'Where can I get the best tacos?'] documents = ['The Data Cloud!', 'Mexico City of Course!'] query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) scores = query_embeddings @ document_embeddings.T for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) # Output passages & scores print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` Produces: ``` Query: what is snowflake? 0.2747492 The Data Cloud! 0.19998045 Mexico City of Course! Query: Where can I get the best tacos? 0.29974818 Mexico City of Course! 0.2344071 The Data Cloud! ``` ### Using Huggingface transformers You can use the transformers package to use an snowflake-arctic-embed model, as shown below. For optimal retrieval quality, use the CLS token to embed each text portion and use the query prefix below (just on the query). ```python import torch from transformers import AutoModel, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('Snowflake/snowflake-arctic-embed-m') model = AutoModel.from_pretrained('Snowflake/snowflake-arctic-embed-m', add_pooling_layer=False) model.eval() query_prefix = 'Represent this sentence for searching relevant passages: ' queries = ['what is snowflake?', 'Where can I get the best tacos?'] queries_with_prefix = ["{}{}".format(query_prefix, i) for i in queries] query_tokens = tokenizer(queries_with_prefix, padding=True, truncation=True, return_tensors='pt', max_length=512) documents = ['The Data Cloud!', 'Mexico City of Course!'] document_tokens = tokenizer(documents, padding=True, truncation=True, return_tensors='pt', max_length=512) # Compute token embeddings with torch.no_grad(): query_embeddings = model(**query_tokens)[0][:, 0] document_embeddings = model(**document_tokens)[0][:, 0] # normalize embeddings query_embeddings = torch.nn.functional.normalize(query_embeddings, p=2, dim=1) document_embeddings = torch.nn.functional.normalize(document_embeddings, p=2, dim=1) scores = torch.mm(query_embeddings, document_embeddings.transpose(0, 1)) for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) #Output passages & scores print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` ### Using Transformers.js If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@xenova/transformers) by running: ```bash npm i @xenova/transformers ``` You can then use the model to compute embeddings as follows: ```js import { pipeline, dot } from '@xenova/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Snowflake/snowflake-arctic-embed-m', { quantized: false, // Comment out this line to use the quantized version }); // Generate sentence embeddings const sentences = [ 'Represent this sentence for searching relevant passages: Where can I get the best tacos?', 'The Data Cloud!', 'Mexico City of Course!', ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => dot(source_embeddings, x)); console.log(similarities); // [0.15664823859882132, 0.24481869975470627] ``` ## Using Infinity OpenAI compatible API deployment with [Infinity](https://github.com/michaelfeil/infinity) and Docker. ```bash docker run --gpus all -v $PWD/data:/app/.cache -p "7997":"7997" \ michaelf34/infinity:0.0.70 \ v2 --model-id Snowflake/snowflake-arctic-embed-m --dtype float16 --batch-size 32 --engine torch --port 7997 ``` ## FAQ TBD ## Contact Feel free to open an issue or pull request if you have any questions or suggestions about this project. You also can email Daniel Campos([email protected]). ## License Arctic is licensed under the [Apache-2](https://www.apache.org/licenses/LICENSE-2.0). The released models can be used for commercial purposes free of charge. ## Acknowledgement We want to thank the open-source community, which has provided the great building blocks upon which we could make our models. We thank our modeling engineers, Danmei Xu, Luke Merrick, Gaurav Nuti, and Daniel Campos, for making these great models possible. We thank our leadership, Himabindu Pucha, Kelvin So, Vivek Raghunathan, and Sridhar Ramaswamy, for supporting this work. We also thank the open-source community for producing the great models we could build on top of and making these releases possible. Finally, we thank the researchers who created BEIR and MTEB benchmarks. It is largely thanks to their tireless work to define what better looks like that we could improve model performance. <img referrerpolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=bda4e7d8-e0d8-4f43-8ecc-7bc1d1c4ed04" />
[ "BIOSSES", "SCIFACT" ]
intfloat/e5-base-v2
intfloat
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "openvino", "bert", "mteb", "Sentence Transformers", "sentence-similarity", "en", "arxiv:2212.03533", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-05-19T07:21:14Z"
2025-02-17T03:25:40+00:00
396,638
110
--- language: - en license: mit tags: - mteb - Sentence Transformers - sentence-similarity - sentence-transformers model-index: - name: e5-base-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 77.77611940298506 - type: ap value: 42.052710266606056 - type: f1 value: 72.12040628266567 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 92.81012500000001 - type: ap value: 89.4213700757244 - type: f1 value: 92.8039091197065 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 46.711999999999996 - type: f1 value: 46.11544975436018 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 23.186 - type: map_at_10 value: 36.632999999999996 - type: map_at_100 value: 37.842 - type: map_at_1000 value: 37.865 - type: map_at_3 value: 32.278 - type: map_at_5 value: 34.760999999999996 - type: mrr_at_1 value: 23.400000000000002 - type: mrr_at_10 value: 36.721 - type: mrr_at_100 value: 37.937 - type: mrr_at_1000 value: 37.96 - type: mrr_at_3 value: 32.302 - type: mrr_at_5 value: 34.894 - type: ndcg_at_1 value: 23.186 - type: ndcg_at_10 value: 44.49 - type: ndcg_at_100 value: 50.065000000000005 - type: ndcg_at_1000 value: 50.629999999999995 - type: ndcg_at_3 value: 35.461 - type: ndcg_at_5 value: 39.969 - type: precision_at_1 value: 23.186 - type: precision_at_10 value: 6.97 - type: precision_at_100 value: 0.951 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 14.912 - type: precision_at_5 value: 11.152 - type: recall_at_1 value: 23.186 - type: recall_at_10 value: 69.70100000000001 - type: recall_at_100 value: 95.092 - type: recall_at_1000 value: 99.431 - type: recall_at_3 value: 44.737 - type: recall_at_5 value: 55.761 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 46.10312401440185 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 39.67275326095384 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 58.97793816337376 - type: mrr value: 72.76832431957087 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 83.11646947018187 - type: cos_sim_spearman value: 81.40064994975234 - type: euclidean_pearson value: 82.37355689019232 - type: euclidean_spearman value: 81.6777646977348 - type: manhattan_pearson value: 82.61101422716945 - type: manhattan_spearman value: 81.80427360442245 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 83.52922077922076 - type: f1 value: 83.45298679360866 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 37.495115019668496 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 32.724792944166765 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.361000000000004 - type: map_at_10 value: 43.765 - type: map_at_100 value: 45.224 - type: map_at_1000 value: 45.35 - type: map_at_3 value: 40.353 - type: map_at_5 value: 42.195 - type: mrr_at_1 value: 40.629 - type: mrr_at_10 value: 50.458000000000006 - type: mrr_at_100 value: 51.06699999999999 - type: mrr_at_1000 value: 51.12 - type: mrr_at_3 value: 47.902 - type: mrr_at_5 value: 49.447 - type: ndcg_at_1 value: 40.629 - type: ndcg_at_10 value: 50.376 - type: ndcg_at_100 value: 55.065 - type: ndcg_at_1000 value: 57.196000000000005 - type: ndcg_at_3 value: 45.616 - type: ndcg_at_5 value: 47.646 - type: precision_at_1 value: 40.629 - type: precision_at_10 value: 9.785 - type: precision_at_100 value: 1.562 - type: precision_at_1000 value: 0.2 - type: precision_at_3 value: 22.031 - type: precision_at_5 value: 15.737000000000002 - type: recall_at_1 value: 32.361000000000004 - type: recall_at_10 value: 62.214000000000006 - type: recall_at_100 value: 81.464 - type: recall_at_1000 value: 95.905 - type: recall_at_3 value: 47.5 - type: recall_at_5 value: 53.69500000000001 - type: map_at_1 value: 27.971 - type: map_at_10 value: 37.444 - type: map_at_100 value: 38.607 - type: map_at_1000 value: 38.737 - type: map_at_3 value: 34.504000000000005 - type: map_at_5 value: 36.234 - type: mrr_at_1 value: 35.35 - type: mrr_at_10 value: 43.441 - type: mrr_at_100 value: 44.147999999999996 - type: mrr_at_1000 value: 44.196000000000005 - type: mrr_at_3 value: 41.285 - type: mrr_at_5 value: 42.552 - type: ndcg_at_1 value: 35.35 - type: ndcg_at_10 value: 42.903999999999996 - type: ndcg_at_100 value: 47.406 - type: ndcg_at_1000 value: 49.588 - type: ndcg_at_3 value: 38.778 - type: ndcg_at_5 value: 40.788000000000004 - type: precision_at_1 value: 35.35 - type: precision_at_10 value: 8.083 - type: precision_at_100 value: 1.313 - type: precision_at_1000 value: 0.18 - type: precision_at_3 value: 18.769 - type: precision_at_5 value: 13.439 - type: recall_at_1 value: 27.971 - type: recall_at_10 value: 52.492000000000004 - type: recall_at_100 value: 71.642 - type: recall_at_1000 value: 85.488 - type: recall_at_3 value: 40.1 - type: recall_at_5 value: 45.800000000000004 - type: map_at_1 value: 39.898 - type: map_at_10 value: 51.819 - type: map_at_100 value: 52.886 - type: map_at_1000 value: 52.941 - type: map_at_3 value: 48.619 - type: map_at_5 value: 50.493 - type: mrr_at_1 value: 45.391999999999996 - type: mrr_at_10 value: 55.230000000000004 - type: mrr_at_100 value: 55.887 - type: mrr_at_1000 value: 55.916 - type: mrr_at_3 value: 52.717000000000006 - type: mrr_at_5 value: 54.222 - type: ndcg_at_1 value: 45.391999999999996 - type: ndcg_at_10 value: 57.586999999999996 - type: ndcg_at_100 value: 61.745000000000005 - type: ndcg_at_1000 value: 62.83800000000001 - type: ndcg_at_3 value: 52.207 - type: ndcg_at_5 value: 54.925999999999995 - type: precision_at_1 value: 45.391999999999996 - type: precision_at_10 value: 9.21 - type: precision_at_100 value: 1.226 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 23.177 - type: precision_at_5 value: 16.038 - type: recall_at_1 value: 39.898 - type: recall_at_10 value: 71.18900000000001 - type: recall_at_100 value: 89.082 - type: recall_at_1000 value: 96.865 - type: recall_at_3 value: 56.907 - type: recall_at_5 value: 63.397999999999996 - type: map_at_1 value: 22.706 - type: map_at_10 value: 30.818 - type: map_at_100 value: 32.038 - type: map_at_1000 value: 32.123000000000005 - type: map_at_3 value: 28.077 - type: map_at_5 value: 29.709999999999997 - type: mrr_at_1 value: 24.407 - type: mrr_at_10 value: 32.555 - type: mrr_at_100 value: 33.692 - type: mrr_at_1000 value: 33.751 - type: mrr_at_3 value: 29.848999999999997 - type: mrr_at_5 value: 31.509999999999998 - type: ndcg_at_1 value: 24.407 - type: ndcg_at_10 value: 35.624 - type: ndcg_at_100 value: 41.454 - type: ndcg_at_1000 value: 43.556 - type: ndcg_at_3 value: 30.217 - type: ndcg_at_5 value: 33.111000000000004 - type: precision_at_1 value: 24.407 - type: precision_at_10 value: 5.548 - type: precision_at_100 value: 0.8869999999999999 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 12.731 - type: precision_at_5 value: 9.22 - type: recall_at_1 value: 22.706 - type: recall_at_10 value: 48.772 - type: recall_at_100 value: 75.053 - type: recall_at_1000 value: 90.731 - type: recall_at_3 value: 34.421 - type: recall_at_5 value: 41.427 - type: map_at_1 value: 13.424 - type: map_at_10 value: 21.09 - type: map_at_100 value: 22.264999999999997 - type: map_at_1000 value: 22.402 - type: map_at_3 value: 18.312 - type: map_at_5 value: 19.874 - type: mrr_at_1 value: 16.915 - type: mrr_at_10 value: 25.258000000000003 - type: mrr_at_100 value: 26.228 - type: mrr_at_1000 value: 26.31 - type: mrr_at_3 value: 22.492 - type: mrr_at_5 value: 24.04 - type: ndcg_at_1 value: 16.915 - type: ndcg_at_10 value: 26.266000000000002 - type: ndcg_at_100 value: 32.08 - type: ndcg_at_1000 value: 35.086 - type: ndcg_at_3 value: 21.049 - type: ndcg_at_5 value: 23.508000000000003 - type: precision_at_1 value: 16.915 - type: precision_at_10 value: 5.1 - type: precision_at_100 value: 0.9329999999999999 - type: precision_at_1000 value: 0.131 - type: precision_at_3 value: 10.282 - type: precision_at_5 value: 7.836 - type: recall_at_1 value: 13.424 - type: recall_at_10 value: 38.179 - type: recall_at_100 value: 63.906 - type: recall_at_1000 value: 84.933 - type: recall_at_3 value: 23.878 - type: recall_at_5 value: 30.037999999999997 - type: map_at_1 value: 26.154 - type: map_at_10 value: 35.912 - type: map_at_100 value: 37.211 - type: map_at_1000 value: 37.327 - type: map_at_3 value: 32.684999999999995 - type: map_at_5 value: 34.562 - type: mrr_at_1 value: 32.435 - type: mrr_at_10 value: 41.411 - type: mrr_at_100 value: 42.297000000000004 - type: mrr_at_1000 value: 42.345 - type: mrr_at_3 value: 38.771 - type: mrr_at_5 value: 40.33 - type: ndcg_at_1 value: 32.435 - type: ndcg_at_10 value: 41.785 - type: ndcg_at_100 value: 47.469 - type: ndcg_at_1000 value: 49.685 - type: ndcg_at_3 value: 36.618 - type: ndcg_at_5 value: 39.101 - type: precision_at_1 value: 32.435 - type: precision_at_10 value: 7.642 - type: precision_at_100 value: 1.244 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 17.485 - type: precision_at_5 value: 12.57 - type: recall_at_1 value: 26.154 - type: recall_at_10 value: 54.111 - type: recall_at_100 value: 78.348 - type: recall_at_1000 value: 92.996 - type: recall_at_3 value: 39.189 - type: recall_at_5 value: 45.852 - type: map_at_1 value: 26.308999999999997 - type: map_at_10 value: 35.524 - type: map_at_100 value: 36.774 - type: map_at_1000 value: 36.891 - type: map_at_3 value: 32.561 - type: map_at_5 value: 34.034 - type: mrr_at_1 value: 31.735000000000003 - type: mrr_at_10 value: 40.391 - type: mrr_at_100 value: 41.227000000000004 - type: mrr_at_1000 value: 41.288000000000004 - type: mrr_at_3 value: 37.938 - type: mrr_at_5 value: 39.193 - type: ndcg_at_1 value: 31.735000000000003 - type: ndcg_at_10 value: 41.166000000000004 - type: ndcg_at_100 value: 46.702 - type: ndcg_at_1000 value: 49.157000000000004 - type: ndcg_at_3 value: 36.274 - type: ndcg_at_5 value: 38.177 - type: precision_at_1 value: 31.735000000000003 - type: precision_at_10 value: 7.5569999999999995 - type: precision_at_100 value: 1.2109999999999999 - type: precision_at_1000 value: 0.16 - type: precision_at_3 value: 17.199 - type: precision_at_5 value: 12.123000000000001 - type: recall_at_1 value: 26.308999999999997 - type: recall_at_10 value: 53.083000000000006 - type: recall_at_100 value: 76.922 - type: recall_at_1000 value: 93.767 - type: recall_at_3 value: 39.262 - type: recall_at_5 value: 44.413000000000004 - type: map_at_1 value: 24.391250000000003 - type: map_at_10 value: 33.280166666666666 - type: map_at_100 value: 34.49566666666667 - type: map_at_1000 value: 34.61533333333333 - type: map_at_3 value: 30.52183333333333 - type: map_at_5 value: 32.06608333333333 - type: mrr_at_1 value: 29.105083333333337 - type: mrr_at_10 value: 37.44766666666666 - type: mrr_at_100 value: 38.32491666666667 - type: mrr_at_1000 value: 38.385666666666665 - type: mrr_at_3 value: 35.06883333333333 - type: mrr_at_5 value: 36.42066666666667 - type: ndcg_at_1 value: 29.105083333333337 - type: ndcg_at_10 value: 38.54358333333333 - type: ndcg_at_100 value: 43.833583333333344 - type: ndcg_at_1000 value: 46.215333333333334 - type: ndcg_at_3 value: 33.876 - type: ndcg_at_5 value: 36.05208333333333 - type: precision_at_1 value: 29.105083333333337 - type: precision_at_10 value: 6.823416666666665 - type: precision_at_100 value: 1.1270833333333334 - type: precision_at_1000 value: 0.15208333333333332 - type: precision_at_3 value: 15.696750000000002 - type: precision_at_5 value: 11.193499999999998 - type: recall_at_1 value: 24.391250000000003 - type: recall_at_10 value: 49.98808333333333 - type: recall_at_100 value: 73.31616666666666 - type: recall_at_1000 value: 89.96291666666667 - type: recall_at_3 value: 36.86666666666667 - type: recall_at_5 value: 42.54350000000001 - type: map_at_1 value: 21.995 - type: map_at_10 value: 28.807 - type: map_at_100 value: 29.813000000000002 - type: map_at_1000 value: 29.903000000000002 - type: map_at_3 value: 26.636 - type: map_at_5 value: 27.912 - type: mrr_at_1 value: 24.847 - type: mrr_at_10 value: 31.494 - type: mrr_at_100 value: 32.381 - type: mrr_at_1000 value: 32.446999999999996 - type: mrr_at_3 value: 29.473 - type: mrr_at_5 value: 30.7 - type: ndcg_at_1 value: 24.847 - type: ndcg_at_10 value: 32.818999999999996 - type: ndcg_at_100 value: 37.835 - type: ndcg_at_1000 value: 40.226 - type: ndcg_at_3 value: 28.811999999999998 - type: ndcg_at_5 value: 30.875999999999998 - type: precision_at_1 value: 24.847 - type: precision_at_10 value: 5.244999999999999 - type: precision_at_100 value: 0.856 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 12.577 - type: precision_at_5 value: 8.895999999999999 - type: recall_at_1 value: 21.995 - type: recall_at_10 value: 42.479 - type: recall_at_100 value: 65.337 - type: recall_at_1000 value: 83.23700000000001 - type: recall_at_3 value: 31.573 - type: recall_at_5 value: 36.684 - type: map_at_1 value: 15.751000000000001 - type: map_at_10 value: 21.909 - type: map_at_100 value: 23.064 - type: map_at_1000 value: 23.205000000000002 - type: map_at_3 value: 20.138 - type: map_at_5 value: 20.973 - type: mrr_at_1 value: 19.305 - type: mrr_at_10 value: 25.647 - type: mrr_at_100 value: 26.659 - type: mrr_at_1000 value: 26.748 - type: mrr_at_3 value: 23.933 - type: mrr_at_5 value: 24.754 - type: ndcg_at_1 value: 19.305 - type: ndcg_at_10 value: 25.886 - type: ndcg_at_100 value: 31.56 - type: ndcg_at_1000 value: 34.799 - type: ndcg_at_3 value: 22.708000000000002 - type: ndcg_at_5 value: 23.838 - type: precision_at_1 value: 19.305 - type: precision_at_10 value: 4.677 - type: precision_at_100 value: 0.895 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 10.771 - type: precision_at_5 value: 7.46 - type: recall_at_1 value: 15.751000000000001 - type: recall_at_10 value: 34.156 - type: recall_at_100 value: 59.899 - type: recall_at_1000 value: 83.08 - type: recall_at_3 value: 24.772 - type: recall_at_5 value: 28.009 - type: map_at_1 value: 23.34 - type: map_at_10 value: 32.383 - type: map_at_100 value: 33.629999999999995 - type: map_at_1000 value: 33.735 - type: map_at_3 value: 29.68 - type: map_at_5 value: 31.270999999999997 - type: mrr_at_1 value: 27.612 - type: mrr_at_10 value: 36.381 - type: mrr_at_100 value: 37.351 - type: mrr_at_1000 value: 37.411 - type: mrr_at_3 value: 33.893 - type: mrr_at_5 value: 35.353 - type: ndcg_at_1 value: 27.612 - type: ndcg_at_10 value: 37.714999999999996 - type: ndcg_at_100 value: 43.525000000000006 - type: ndcg_at_1000 value: 45.812999999999995 - type: ndcg_at_3 value: 32.796 - type: ndcg_at_5 value: 35.243 - type: precision_at_1 value: 27.612 - type: precision_at_10 value: 6.465 - type: precision_at_100 value: 1.0619999999999998 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 15.049999999999999 - type: precision_at_5 value: 10.764999999999999 - type: recall_at_1 value: 23.34 - type: recall_at_10 value: 49.856 - type: recall_at_100 value: 75.334 - type: recall_at_1000 value: 91.156 - type: recall_at_3 value: 36.497 - type: recall_at_5 value: 42.769 - type: map_at_1 value: 25.097 - type: map_at_10 value: 34.599999999999994 - type: map_at_100 value: 36.174 - type: map_at_1000 value: 36.398 - type: map_at_3 value: 31.781 - type: map_at_5 value: 33.22 - type: mrr_at_1 value: 31.225 - type: mrr_at_10 value: 39.873 - type: mrr_at_100 value: 40.853 - type: mrr_at_1000 value: 40.904 - type: mrr_at_3 value: 37.681 - type: mrr_at_5 value: 38.669 - type: ndcg_at_1 value: 31.225 - type: ndcg_at_10 value: 40.586 - type: ndcg_at_100 value: 46.226 - type: ndcg_at_1000 value: 48.788 - type: ndcg_at_3 value: 36.258 - type: ndcg_at_5 value: 37.848 - type: precision_at_1 value: 31.225 - type: precision_at_10 value: 7.707999999999999 - type: precision_at_100 value: 1.536 - type: precision_at_1000 value: 0.242 - type: precision_at_3 value: 17.26 - type: precision_at_5 value: 12.253 - type: recall_at_1 value: 25.097 - type: recall_at_10 value: 51.602000000000004 - type: recall_at_100 value: 76.854 - type: recall_at_1000 value: 93.303 - type: recall_at_3 value: 38.68 - type: recall_at_5 value: 43.258 - type: map_at_1 value: 17.689 - type: map_at_10 value: 25.291000000000004 - type: map_at_100 value: 26.262 - type: map_at_1000 value: 26.372 - type: map_at_3 value: 22.916 - type: map_at_5 value: 24.315 - type: mrr_at_1 value: 19.409000000000002 - type: mrr_at_10 value: 27.233 - type: mrr_at_100 value: 28.109 - type: mrr_at_1000 value: 28.192 - type: mrr_at_3 value: 24.892 - type: mrr_at_5 value: 26.278000000000002 - type: ndcg_at_1 value: 19.409000000000002 - type: ndcg_at_10 value: 29.809 - type: ndcg_at_100 value: 34.936 - type: ndcg_at_1000 value: 37.852000000000004 - type: ndcg_at_3 value: 25.179000000000002 - type: ndcg_at_5 value: 27.563 - type: precision_at_1 value: 19.409000000000002 - type: precision_at_10 value: 4.861 - type: precision_at_100 value: 0.8 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 11.029 - type: precision_at_5 value: 7.985 - type: recall_at_1 value: 17.689 - type: recall_at_10 value: 41.724 - type: recall_at_100 value: 65.95299999999999 - type: recall_at_1000 value: 88.094 - type: recall_at_3 value: 29.621 - type: recall_at_5 value: 35.179 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 10.581 - type: map_at_10 value: 18.944 - type: map_at_100 value: 20.812 - type: map_at_1000 value: 21.002000000000002 - type: map_at_3 value: 15.661 - type: map_at_5 value: 17.502000000000002 - type: mrr_at_1 value: 23.388 - type: mrr_at_10 value: 34.263 - type: mrr_at_100 value: 35.364000000000004 - type: mrr_at_1000 value: 35.409 - type: mrr_at_3 value: 30.586000000000002 - type: mrr_at_5 value: 32.928000000000004 - type: ndcg_at_1 value: 23.388 - type: ndcg_at_10 value: 26.56 - type: ndcg_at_100 value: 34.248 - type: ndcg_at_1000 value: 37.779 - type: ndcg_at_3 value: 21.179000000000002 - type: ndcg_at_5 value: 23.504 - type: precision_at_1 value: 23.388 - type: precision_at_10 value: 8.476 - type: precision_at_100 value: 1.672 - type: precision_at_1000 value: 0.233 - type: precision_at_3 value: 15.852 - type: precision_at_5 value: 12.73 - type: recall_at_1 value: 10.581 - type: recall_at_10 value: 32.512 - type: recall_at_100 value: 59.313 - type: recall_at_1000 value: 79.25 - type: recall_at_3 value: 19.912 - type: recall_at_5 value: 25.832 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.35 - type: map_at_10 value: 20.134 - type: map_at_100 value: 28.975 - type: map_at_1000 value: 30.709999999999997 - type: map_at_3 value: 14.513000000000002 - type: map_at_5 value: 16.671 - type: mrr_at_1 value: 69.75 - type: mrr_at_10 value: 77.67699999999999 - type: mrr_at_100 value: 77.97500000000001 - type: mrr_at_1000 value: 77.985 - type: mrr_at_3 value: 76.292 - type: mrr_at_5 value: 77.179 - type: ndcg_at_1 value: 56.49999999999999 - type: ndcg_at_10 value: 42.226 - type: ndcg_at_100 value: 47.562 - type: ndcg_at_1000 value: 54.923 - type: ndcg_at_3 value: 46.564 - type: ndcg_at_5 value: 43.830000000000005 - type: precision_at_1 value: 69.75 - type: precision_at_10 value: 33.525 - type: precision_at_100 value: 11.035 - type: precision_at_1000 value: 2.206 - type: precision_at_3 value: 49.75 - type: precision_at_5 value: 42 - type: recall_at_1 value: 9.35 - type: recall_at_10 value: 25.793 - type: recall_at_100 value: 54.186 - type: recall_at_1000 value: 77.81 - type: recall_at_3 value: 15.770000000000001 - type: recall_at_5 value: 19.09 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 46.945 - type: f1 value: 42.07407842992542 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 71.04599999999999 - type: map_at_10 value: 80.718 - type: map_at_100 value: 80.961 - type: map_at_1000 value: 80.974 - type: map_at_3 value: 79.49199999999999 - type: map_at_5 value: 80.32000000000001 - type: mrr_at_1 value: 76.388 - type: mrr_at_10 value: 85.214 - type: mrr_at_100 value: 85.302 - type: mrr_at_1000 value: 85.302 - type: mrr_at_3 value: 84.373 - type: mrr_at_5 value: 84.979 - type: ndcg_at_1 value: 76.388 - type: ndcg_at_10 value: 84.987 - type: ndcg_at_100 value: 85.835 - type: ndcg_at_1000 value: 86.04899999999999 - type: ndcg_at_3 value: 83.04 - type: ndcg_at_5 value: 84.22500000000001 - type: precision_at_1 value: 76.388 - type: precision_at_10 value: 10.35 - type: precision_at_100 value: 1.099 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 32.108 - type: precision_at_5 value: 20.033 - type: recall_at_1 value: 71.04599999999999 - type: recall_at_10 value: 93.547 - type: recall_at_100 value: 96.887 - type: recall_at_1000 value: 98.158 - type: recall_at_3 value: 88.346 - type: recall_at_5 value: 91.321 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 19.8 - type: map_at_10 value: 31.979999999999997 - type: map_at_100 value: 33.876 - type: map_at_1000 value: 34.056999999999995 - type: map_at_3 value: 28.067999999999998 - type: map_at_5 value: 30.066 - type: mrr_at_1 value: 38.735 - type: mrr_at_10 value: 47.749 - type: mrr_at_100 value: 48.605 - type: mrr_at_1000 value: 48.644999999999996 - type: mrr_at_3 value: 45.165 - type: mrr_at_5 value: 46.646 - type: ndcg_at_1 value: 38.735 - type: ndcg_at_10 value: 39.883 - type: ndcg_at_100 value: 46.983000000000004 - type: ndcg_at_1000 value: 50.043000000000006 - type: ndcg_at_3 value: 35.943000000000005 - type: ndcg_at_5 value: 37.119 - type: precision_at_1 value: 38.735 - type: precision_at_10 value: 10.940999999999999 - type: precision_at_100 value: 1.836 - type: precision_at_1000 value: 0.23900000000000002 - type: precision_at_3 value: 23.817 - type: precision_at_5 value: 17.346 - type: recall_at_1 value: 19.8 - type: recall_at_10 value: 47.082 - type: recall_at_100 value: 73.247 - type: recall_at_1000 value: 91.633 - type: recall_at_3 value: 33.201 - type: recall_at_5 value: 38.81 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 38.102999999999994 - type: map_at_10 value: 60.547 - type: map_at_100 value: 61.466 - type: map_at_1000 value: 61.526 - type: map_at_3 value: 56.973 - type: map_at_5 value: 59.244 - type: mrr_at_1 value: 76.205 - type: mrr_at_10 value: 82.816 - type: mrr_at_100 value: 83.002 - type: mrr_at_1000 value: 83.009 - type: mrr_at_3 value: 81.747 - type: mrr_at_5 value: 82.467 - type: ndcg_at_1 value: 76.205 - type: ndcg_at_10 value: 69.15 - type: ndcg_at_100 value: 72.297 - type: ndcg_at_1000 value: 73.443 - type: ndcg_at_3 value: 64.07000000000001 - type: ndcg_at_5 value: 66.96600000000001 - type: precision_at_1 value: 76.205 - type: precision_at_10 value: 14.601 - type: precision_at_100 value: 1.7049999999999998 - type: precision_at_1000 value: 0.186 - type: precision_at_3 value: 41.202 - type: precision_at_5 value: 27.006000000000004 - type: recall_at_1 value: 38.102999999999994 - type: recall_at_10 value: 73.005 - type: recall_at_100 value: 85.253 - type: recall_at_1000 value: 92.795 - type: recall_at_3 value: 61.803 - type: recall_at_5 value: 67.515 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 86.15 - type: ap value: 80.36282825265391 - type: f1 value: 86.07368510726472 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.6 - type: map_at_10 value: 34.887 - type: map_at_100 value: 36.069 - type: map_at_1000 value: 36.115 - type: map_at_3 value: 31.067 - type: map_at_5 value: 33.300000000000004 - type: mrr_at_1 value: 23.238 - type: mrr_at_10 value: 35.47 - type: mrr_at_100 value: 36.599 - type: mrr_at_1000 value: 36.64 - type: mrr_at_3 value: 31.735999999999997 - type: mrr_at_5 value: 33.939 - type: ndcg_at_1 value: 23.252 - type: ndcg_at_10 value: 41.765 - type: ndcg_at_100 value: 47.402 - type: ndcg_at_1000 value: 48.562 - type: ndcg_at_3 value: 34.016999999999996 - type: ndcg_at_5 value: 38.016 - type: precision_at_1 value: 23.252 - type: precision_at_10 value: 6.569 - type: precision_at_100 value: 0.938 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.479000000000001 - type: precision_at_5 value: 10.722 - type: recall_at_1 value: 22.6 - type: recall_at_10 value: 62.919000000000004 - type: recall_at_100 value: 88.82 - type: recall_at_1000 value: 97.71600000000001 - type: recall_at_3 value: 41.896 - type: recall_at_5 value: 51.537 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.69357045143639 - type: f1 value: 93.55489858177597 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 75.31235750114 - type: f1 value: 57.891491963121155 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.04303967720243 - type: f1 value: 70.51516022297616 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.65299260255549 - type: f1 value: 77.49059766538576 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.458906115906597 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.9851513122443 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.2916268497217 - type: mrr value: 32.328276715593816 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.3740000000000006 - type: map_at_10 value: 13.089999999999998 - type: map_at_100 value: 16.512 - type: map_at_1000 value: 18.014 - type: map_at_3 value: 9.671000000000001 - type: map_at_5 value: 11.199 - type: mrr_at_1 value: 46.749 - type: mrr_at_10 value: 55.367 - type: mrr_at_100 value: 56.021 - type: mrr_at_1000 value: 56.058 - type: mrr_at_3 value: 53.30200000000001 - type: mrr_at_5 value: 54.773 - type: ndcg_at_1 value: 45.046 - type: ndcg_at_10 value: 35.388999999999996 - type: ndcg_at_100 value: 32.175 - type: ndcg_at_1000 value: 41.018 - type: ndcg_at_3 value: 40.244 - type: ndcg_at_5 value: 38.267 - type: precision_at_1 value: 46.749 - type: precision_at_10 value: 26.563 - type: precision_at_100 value: 8.074 - type: precision_at_1000 value: 2.099 - type: precision_at_3 value: 37.358000000000004 - type: precision_at_5 value: 33.003 - type: recall_at_1 value: 6.3740000000000006 - type: recall_at_10 value: 16.805999999999997 - type: recall_at_100 value: 31.871 - type: recall_at_1000 value: 64.098 - type: recall_at_3 value: 10.383000000000001 - type: recall_at_5 value: 13.166 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 34.847 - type: map_at_10 value: 50.532 - type: map_at_100 value: 51.504000000000005 - type: map_at_1000 value: 51.528 - type: map_at_3 value: 46.219 - type: map_at_5 value: 48.868 - type: mrr_at_1 value: 39.137 - type: mrr_at_10 value: 53.157 - type: mrr_at_100 value: 53.839999999999996 - type: mrr_at_1000 value: 53.857 - type: mrr_at_3 value: 49.667 - type: mrr_at_5 value: 51.847 - type: ndcg_at_1 value: 39.108 - type: ndcg_at_10 value: 58.221000000000004 - type: ndcg_at_100 value: 62.021 - type: ndcg_at_1000 value: 62.57 - type: ndcg_at_3 value: 50.27199999999999 - type: ndcg_at_5 value: 54.623999999999995 - type: precision_at_1 value: 39.108 - type: precision_at_10 value: 9.397 - type: precision_at_100 value: 1.1520000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 22.644000000000002 - type: precision_at_5 value: 16.141 - type: recall_at_1 value: 34.847 - type: recall_at_10 value: 78.945 - type: recall_at_100 value: 94.793 - type: recall_at_1000 value: 98.904 - type: recall_at_3 value: 58.56 - type: recall_at_5 value: 68.535 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 68.728 - type: map_at_10 value: 82.537 - type: map_at_100 value: 83.218 - type: map_at_1000 value: 83.238 - type: map_at_3 value: 79.586 - type: map_at_5 value: 81.416 - type: mrr_at_1 value: 79.17999999999999 - type: mrr_at_10 value: 85.79299999999999 - type: mrr_at_100 value: 85.937 - type: mrr_at_1000 value: 85.938 - type: mrr_at_3 value: 84.748 - type: mrr_at_5 value: 85.431 - type: ndcg_at_1 value: 79.17 - type: ndcg_at_10 value: 86.555 - type: ndcg_at_100 value: 88.005 - type: ndcg_at_1000 value: 88.146 - type: ndcg_at_3 value: 83.557 - type: ndcg_at_5 value: 85.152 - type: precision_at_1 value: 79.17 - type: precision_at_10 value: 13.163 - type: precision_at_100 value: 1.52 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.53 - type: precision_at_5 value: 24.046 - type: recall_at_1 value: 68.728 - type: recall_at_10 value: 94.217 - type: recall_at_100 value: 99.295 - type: recall_at_1000 value: 99.964 - type: recall_at_3 value: 85.646 - type: recall_at_5 value: 90.113 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.15680266226348 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 63.4318549229047 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.353 - type: map_at_10 value: 10.956000000000001 - type: map_at_100 value: 12.873999999999999 - type: map_at_1000 value: 13.177 - type: map_at_3 value: 7.854 - type: map_at_5 value: 9.327 - type: mrr_at_1 value: 21.4 - type: mrr_at_10 value: 31.948999999999998 - type: mrr_at_100 value: 33.039 - type: mrr_at_1000 value: 33.106 - type: mrr_at_3 value: 28.449999999999996 - type: mrr_at_5 value: 30.535 - type: ndcg_at_1 value: 21.4 - type: ndcg_at_10 value: 18.694 - type: ndcg_at_100 value: 26.275 - type: ndcg_at_1000 value: 31.836 - type: ndcg_at_3 value: 17.559 - type: ndcg_at_5 value: 15.372 - type: precision_at_1 value: 21.4 - type: precision_at_10 value: 9.790000000000001 - type: precision_at_100 value: 2.0709999999999997 - type: precision_at_1000 value: 0.34099999999999997 - type: precision_at_3 value: 16.467000000000002 - type: precision_at_5 value: 13.54 - type: recall_at_1 value: 4.353 - type: recall_at_10 value: 19.892000000000003 - type: recall_at_100 value: 42.067 - type: recall_at_1000 value: 69.268 - type: recall_at_3 value: 10.042 - type: recall_at_5 value: 13.741999999999999 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.75433886279843 - type: cos_sim_spearman value: 78.29727771767095 - type: euclidean_pearson value: 80.83057828506621 - type: euclidean_spearman value: 78.35203149750356 - type: manhattan_pearson value: 80.7403553891142 - type: manhattan_spearman value: 78.33670488531051 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.59999465280839 - type: cos_sim_spearman value: 75.79279003980383 - type: euclidean_pearson value: 82.29895375956758 - type: euclidean_spearman value: 77.33856514102094 - type: manhattan_pearson value: 82.22694214534756 - type: manhattan_spearman value: 77.3028993008695 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 83.09296929691297 - type: cos_sim_spearman value: 83.58056936846941 - type: euclidean_pearson value: 83.84067483060005 - type: euclidean_spearman value: 84.45155680480985 - type: manhattan_pearson value: 83.82353052971942 - type: manhattan_spearman value: 84.43030567861112 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 82.74616852320915 - type: cos_sim_spearman value: 79.948683747966 - type: euclidean_pearson value: 81.55702283757084 - type: euclidean_spearman value: 80.1721505114231 - type: manhattan_pearson value: 81.52251518619441 - type: manhattan_spearman value: 80.1469800135577 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.97170104226318 - type: cos_sim_spearman value: 88.82021731518206 - type: euclidean_pearson value: 87.92950547187615 - type: euclidean_spearman value: 88.67043634645866 - type: manhattan_pearson value: 87.90668112827639 - type: manhattan_spearman value: 88.64471082785317 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.02790375770599 - type: cos_sim_spearman value: 84.46308496590792 - type: euclidean_pearson value: 84.29430000414911 - type: euclidean_spearman value: 84.77298303589936 - type: manhattan_pearson value: 84.23919291368665 - type: manhattan_spearman value: 84.75272234871308 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.62885108477064 - type: cos_sim_spearman value: 87.58456196391622 - type: euclidean_pearson value: 88.2602775281007 - type: euclidean_spearman value: 87.51556278299846 - type: manhattan_pearson value: 88.11224053672842 - type: manhattan_spearman value: 87.4336094383095 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.98187965128411 - type: cos_sim_spearman value: 64.0653163219731 - type: euclidean_pearson value: 62.30616725924099 - type: euclidean_spearman value: 61.556971332295916 - type: manhattan_pearson value: 62.07642330128549 - type: manhattan_spearman value: 61.155494129828 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.6089703921826 - type: cos_sim_spearman value: 86.52303197250791 - type: euclidean_pearson value: 85.95801955963246 - type: euclidean_spearman value: 86.25242424112962 - type: manhattan_pearson value: 85.88829100470312 - type: manhattan_spearman value: 86.18742955805165 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 83.02282098487036 - type: mrr value: 95.05126409538174 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 55.928 - type: map_at_10 value: 67.308 - type: map_at_100 value: 67.89500000000001 - type: map_at_1000 value: 67.91199999999999 - type: map_at_3 value: 65.091 - type: map_at_5 value: 66.412 - type: mrr_at_1 value: 58.667 - type: mrr_at_10 value: 68.401 - type: mrr_at_100 value: 68.804 - type: mrr_at_1000 value: 68.819 - type: mrr_at_3 value: 66.72200000000001 - type: mrr_at_5 value: 67.72200000000001 - type: ndcg_at_1 value: 58.667 - type: ndcg_at_10 value: 71.944 - type: ndcg_at_100 value: 74.464 - type: ndcg_at_1000 value: 74.82799999999999 - type: ndcg_at_3 value: 68.257 - type: ndcg_at_5 value: 70.10300000000001 - type: precision_at_1 value: 58.667 - type: precision_at_10 value: 9.533 - type: precision_at_100 value: 1.09 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 27.222 - type: precision_at_5 value: 17.533 - type: recall_at_1 value: 55.928 - type: recall_at_10 value: 84.65 - type: recall_at_100 value: 96.267 - type: recall_at_1000 value: 99 - type: recall_at_3 value: 74.656 - type: recall_at_5 value: 79.489 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.79009900990098 - type: cos_sim_ap value: 94.5795129511524 - type: cos_sim_f1 value: 89.34673366834171 - type: cos_sim_precision value: 89.79797979797979 - type: cos_sim_recall value: 88.9 - type: dot_accuracy value: 99.53465346534654 - type: dot_ap value: 81.56492504352725 - type: dot_f1 value: 76.33816908454227 - type: dot_precision value: 76.37637637637637 - type: dot_recall value: 76.3 - type: euclidean_accuracy value: 99.78514851485149 - type: euclidean_ap value: 94.59134620408962 - type: euclidean_f1 value: 88.96484375 - type: euclidean_precision value: 86.92748091603053 - type: euclidean_recall value: 91.10000000000001 - type: manhattan_accuracy value: 99.78415841584159 - type: manhattan_ap value: 94.5190197328845 - type: manhattan_f1 value: 88.84462151394423 - type: manhattan_precision value: 88.4920634920635 - type: manhattan_recall value: 89.2 - type: max_accuracy value: 99.79009900990098 - type: max_ap value: 94.59134620408962 - type: max_f1 value: 89.34673366834171 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.1487505617497 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.502518166001856 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.33775480236701 - type: mrr value: 51.17302223919871 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.561111309808208 - type: cos_sim_spearman value: 30.2839254379273 - type: dot_pearson value: 29.560242291401973 - type: dot_spearman value: 30.51527274679116 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.215 - type: map_at_10 value: 1.752 - type: map_at_100 value: 9.258 - type: map_at_1000 value: 23.438 - type: map_at_3 value: 0.6 - type: map_at_5 value: 0.968 - type: mrr_at_1 value: 84 - type: mrr_at_10 value: 91.333 - type: mrr_at_100 value: 91.333 - type: mrr_at_1000 value: 91.333 - type: mrr_at_3 value: 91.333 - type: mrr_at_5 value: 91.333 - type: ndcg_at_1 value: 75 - type: ndcg_at_10 value: 69.596 - type: ndcg_at_100 value: 51.970000000000006 - type: ndcg_at_1000 value: 48.864999999999995 - type: ndcg_at_3 value: 73.92699999999999 - type: ndcg_at_5 value: 73.175 - type: precision_at_1 value: 84 - type: precision_at_10 value: 74 - type: precision_at_100 value: 53.2 - type: precision_at_1000 value: 21.836 - type: precision_at_3 value: 79.333 - type: precision_at_5 value: 78.4 - type: recall_at_1 value: 0.215 - type: recall_at_10 value: 1.9609999999999999 - type: recall_at_100 value: 12.809999999999999 - type: recall_at_1000 value: 46.418 - type: recall_at_3 value: 0.6479999999999999 - type: recall_at_5 value: 1.057 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.066 - type: map_at_10 value: 10.508000000000001 - type: map_at_100 value: 16.258 - type: map_at_1000 value: 17.705000000000002 - type: map_at_3 value: 6.157 - type: map_at_5 value: 7.510999999999999 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 48.786 - type: mrr_at_100 value: 49.619 - type: mrr_at_1000 value: 49.619 - type: mrr_at_3 value: 45.918 - type: mrr_at_5 value: 46.837 - type: ndcg_at_1 value: 31.633 - type: ndcg_at_10 value: 26.401999999999997 - type: ndcg_at_100 value: 37.139 - type: ndcg_at_1000 value: 48.012 - type: ndcg_at_3 value: 31.875999999999998 - type: ndcg_at_5 value: 27.383000000000003 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 22.857 - type: precision_at_100 value: 7.611999999999999 - type: precision_at_1000 value: 1.492 - type: precision_at_3 value: 33.333 - type: precision_at_5 value: 26.122 - type: recall_at_1 value: 3.066 - type: recall_at_10 value: 16.239 - type: recall_at_100 value: 47.29 - type: recall_at_1000 value: 81.137 - type: recall_at_3 value: 7.069 - type: recall_at_5 value: 9.483 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 72.1126 - type: ap value: 14.710862719285753 - type: f1 value: 55.437808972378846 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 60.39049235993209 - type: f1 value: 60.69810537250234 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 48.15576640316866 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.52917684925792 - type: cos_sim_ap value: 75.97497873817315 - type: cos_sim_f1 value: 70.01151926276718 - type: cos_sim_precision value: 67.98409147402435 - type: cos_sim_recall value: 72.16358839050132 - type: dot_accuracy value: 82.47004828038385 - type: dot_ap value: 62.48739894974198 - type: dot_f1 value: 59.13107511045656 - type: dot_precision value: 55.27765029830197 - type: dot_recall value: 63.562005277044854 - type: euclidean_accuracy value: 86.46361089586935 - type: euclidean_ap value: 75.59282886839452 - type: euclidean_f1 value: 69.6465443945099 - type: euclidean_precision value: 64.52847175331982 - type: euclidean_recall value: 75.64643799472296 - type: manhattan_accuracy value: 86.43380818978363 - type: manhattan_ap value: 75.5742420974403 - type: manhattan_f1 value: 69.8636926889715 - type: manhattan_precision value: 65.8644859813084 - type: manhattan_recall value: 74.37994722955145 - type: max_accuracy value: 86.52917684925792 - type: max_ap value: 75.97497873817315 - type: max_f1 value: 70.01151926276718 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.29056545193464 - type: cos_sim_ap value: 86.63028865482376 - type: cos_sim_f1 value: 79.18166458532285 - type: cos_sim_precision value: 75.70585756426465 - type: cos_sim_recall value: 82.99199260856174 - type: dot_accuracy value: 85.23305002522606 - type: dot_ap value: 76.0482687263196 - type: dot_f1 value: 70.80484330484332 - type: dot_precision value: 65.86933474688577 - type: dot_recall value: 76.53988296889437 - type: euclidean_accuracy value: 89.26145845461248 - type: euclidean_ap value: 86.54073288416006 - type: euclidean_f1 value: 78.9721371479794 - type: euclidean_precision value: 76.68649354417525 - type: euclidean_recall value: 81.39821373575609 - type: manhattan_accuracy value: 89.22847052431405 - type: manhattan_ap value: 86.51250729037905 - type: manhattan_f1 value: 78.94601825044894 - type: manhattan_precision value: 75.32694594027555 - type: manhattan_recall value: 82.93039728980598 - type: max_accuracy value: 89.29056545193464 - type: max_ap value: 86.63028865482376 - type: max_f1 value: 79.18166458532285 --- # E5-base-v2 [Text Embeddings by Weakly-Supervised Contrastive Pre-training](https://arxiv.org/pdf/2212.03533.pdf). Liang Wang, Nan Yang, Xiaolong Huang, Binxing Jiao, Linjun Yang, Daxin Jiang, Rangan Majumder, Furu Wei, arXiv 2022 This model has 12 layers and the embedding size is 768. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ". # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments."] tokenizer = AutoTokenizer.from_pretrained('intfloat/e5-base-v2') model = AutoModel.from_pretrained('intfloat/e5-base-v2') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Training Details Please refer to our paper at [https://arxiv.org/pdf/2212.03533.pdf](https://arxiv.org/pdf/2212.03533.pdf). ## Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/e5-base-v2') input_texts = [ 'query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2022text, title={Text Embeddings by Weakly-Supervised Contrastive Pre-training}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Jiao, Binxing and Yang, Linjun and Jiang, Daxin and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2212.03533}, year={2022} } ``` ## Limitations This model only works for English texts. Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
minishlab/potion-base-8M
minishlab
null
[ "model2vec", "onnx", "safetensors", "embeddings", "static-embeddings", "mteb", "sentence-transformers", "license:mit", "model-index", "region:us" ]
"2024-10-29T09:35:47Z"
2025-01-21T17:53:06+00:00
325,384
50
--- library_name: model2vec license: mit tags: - embeddings - static-embeddings - mteb - sentence-transformers model-index: - name: potion-base-8M results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 72.15142428785607 - type: ap value: 20.626102291010103 - type: ap_weighted value: 20.626102291010103 - type: f1 value: 59.187001923736894 - type: f1_weighted value: 77.34906471545477 - type: main_score value: 72.15142428785607 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.7910447761194 - type: ap value: 33.038020188116036 - type: ap_weighted value: 33.038020188116036 - type: f1 value: 65.03799728338926 - type: f1_weighted value: 74.32788084269461 - type: main_score value: 71.7910447761194 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 72.47644999999999 - type: ap value: 66.91002822830875 - type: ap_weighted value: 66.91002822830875 - type: f1 value: 72.2600863044581 - type: f1_weighted value: 72.2600863044581 - type: main_score value: 72.47644999999999 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 36.012 - type: f1 value: 35.38209336470206 - type: f1_weighted value: 35.38209336470206 - type: main_score value: 36.012 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 41.966 - type: map_at_1 value: 21.124000000000002 - type: map_at_10 value: 34.335 - type: map_at_100 value: 35.618 - type: map_at_1000 value: 35.653 - type: map_at_20 value: 35.21 - type: map_at_3 value: 30.287 - type: map_at_5 value: 32.364 - type: mrr_at_1 value: 21.62162162162162 - type: mrr_at_10 value: 34.509104969631224 - type: mrr_at_100 value: 35.79229946325059 - type: mrr_at_1000 value: 35.82767320968403 - type: mrr_at_20 value: 35.38485605181455 - type: mrr_at_3 value: 30.405405405405343 - type: mrr_at_5 value: 32.539118065433755 - type: nauc_map_at_1000_diff1 value: 7.960826255212609 - type: nauc_map_at_1000_max value: -0.036381315067780806 - type: nauc_map_at_1000_std value: 4.317766293607543 - type: nauc_map_at_100_diff1 value: 7.96318422584977 - type: nauc_map_at_100_max value: -0.007800758201736421 - type: nauc_map_at_100_std value: 4.362078927714198 - type: nauc_map_at_10_diff1 value: 7.718022643886373 - type: nauc_map_at_10_max value: -0.28312250079415263 - type: nauc_map_at_10_std value: 4.079196099329437 - type: nauc_map_at_1_diff1 value: 9.240393281366906 - type: nauc_map_at_1_max value: -4.35798405693968 - type: nauc_map_at_1_std value: 1.5076565659508505 - type: nauc_map_at_20_diff1 value: 8.028053857747947 - type: nauc_map_at_20_max value: 0.0719807687813251 - type: nauc_map_at_20_std value: 4.394812024847373 - type: nauc_map_at_3_diff1 value: 7.953781299828595 - type: nauc_map_at_3_max value: -0.573072664182506 - type: nauc_map_at_3_std value: 3.110821611511372 - type: nauc_map_at_5_diff1 value: 7.3135486297676415 - type: nauc_map_at_5_max value: -1.2456304709603878 - type: nauc_map_at_5_std value: 3.2332006196074805 - type: nauc_mrr_at_1000_diff1 value: 6.511595076207588 - type: nauc_mrr_at_1000_max value: -0.4777573692286575 - type: nauc_mrr_at_1000_std value: 4.19518565742107 - type: nauc_mrr_at_100_diff1 value: 6.515632481906436 - type: nauc_mrr_at_100_max value: -0.44877259463397945 - type: nauc_mrr_at_100_std value: 4.23945026873963 - type: nauc_mrr_at_10_diff1 value: 6.325261150908693 - type: nauc_mrr_at_10_max value: -0.6968688229450172 - type: nauc_mrr_at_10_std value: 3.9631303923167294 - type: nauc_mrr_at_1_diff1 value: 7.4844946822832785 - type: nauc_mrr_at_1_max value: -4.0195803039697315 - type: nauc_mrr_at_1_std value: 1.3908984330415426 - type: nauc_mrr_at_20_diff1 value: 6.596479652899773 - type: nauc_mrr_at_20_max value: -0.3643520262705732 - type: nauc_mrr_at_20_std value: 4.273437423781988 - type: nauc_mrr_at_3_diff1 value: 6.3669450211955745 - type: nauc_mrr_at_3_max value: -1.2252447747465325 - type: nauc_mrr_at_3_std value: 2.941708547001192 - type: nauc_mrr_at_5_diff1 value: 5.907234785613739 - type: nauc_mrr_at_5_max value: -1.6860364992754489 - type: nauc_mrr_at_5_std value: 3.0737345356263406 - type: nauc_ndcg_at_1000_diff1 value: 7.9706658500975704 - type: nauc_ndcg_at_1000_max value: 1.5533941879318276 - type: nauc_ndcg_at_1000_std value: 5.933724413159287 - type: nauc_ndcg_at_100_diff1 value: 8.107414913432397 - type: nauc_ndcg_at_100_max value: 2.5869418793842778 - type: nauc_ndcg_at_100_std value: 7.322146884970876 - type: nauc_ndcg_at_10_diff1 value: 7.669807780113455 - type: nauc_ndcg_at_10_max value: 1.886214180834648 - type: nauc_ndcg_at_10_std value: 6.055781567147952 - type: nauc_ndcg_at_1_diff1 value: 9.240393281366906 - type: nauc_ndcg_at_1_max value: -4.35798405693968 - type: nauc_ndcg_at_1_std value: 1.5076565659508505 - type: nauc_ndcg_at_20_diff1 value: 8.661303229272372 - type: nauc_ndcg_at_20_max value: 3.303174862536166 - type: nauc_ndcg_at_20_std value: 7.493758825967179 - type: nauc_ndcg_at_3_diff1 value: 7.858281169135036 - type: nauc_ndcg_at_3_max value: 0.7079724865506055 - type: nauc_ndcg_at_3_std value: 3.7402042497720958 - type: nauc_ndcg_at_5_diff1 value: 6.68694262946663 - type: nauc_ndcg_at_5_max value: -0.43002529778264326 - type: nauc_ndcg_at_5_std value: 3.9597009492387265 - type: nauc_precision_at_1000_diff1 value: -28.217119971169463 - type: nauc_precision_at_1000_max value: 17.425278660692022 - type: nauc_precision_at_1000_std value: 46.7473304347162 - type: nauc_precision_at_100_diff1 value: 8.738254686624805 - type: nauc_precision_at_100_max value: 32.88945783040687 - type: nauc_precision_at_100_std value: 48.42583030760342 - type: nauc_precision_at_10_diff1 value: 7.873361516017592 - type: nauc_precision_at_10_max value: 9.802552072953949 - type: nauc_precision_at_10_std value: 13.506647301311148 - type: nauc_precision_at_1_diff1 value: 9.240393281366906 - type: nauc_precision_at_1_max value: -4.35798405693968 - type: nauc_precision_at_1_std value: 1.5076565659508505 - type: nauc_precision_at_20_diff1 value: 13.008220519097161 - type: nauc_precision_at_20_max value: 20.829507014709748 - type: nauc_precision_at_20_std value: 25.02998005000373 - type: nauc_precision_at_3_diff1 value: 7.685752623087433 - type: nauc_precision_at_3_max value: 4.126629771323765 - type: nauc_precision_at_3_std value: 5.440817692025366 - type: nauc_precision_at_5_diff1 value: 4.879990376967901 - type: nauc_precision_at_5_max value: 1.7076492862153407 - type: nauc_precision_at_5_std value: 6.009634283832547 - type: nauc_recall_at_1000_diff1 value: -28.217119971166543 - type: nauc_recall_at_1000_max value: 17.425278660689965 - type: nauc_recall_at_1000_std value: 46.74733043471749 - type: nauc_recall_at_100_diff1 value: 8.738254686625181 - type: nauc_recall_at_100_max value: 32.8894578304071 - type: nauc_recall_at_100_std value: 48.425830307603746 - type: nauc_recall_at_10_diff1 value: 7.87336151601764 - type: nauc_recall_at_10_max value: 9.802552072953997 - type: nauc_recall_at_10_std value: 13.506647301311201 - type: nauc_recall_at_1_diff1 value: 9.240393281366906 - type: nauc_recall_at_1_max value: -4.35798405693968 - type: nauc_recall_at_1_std value: 1.5076565659508505 - type: nauc_recall_at_20_diff1 value: 13.008220519097097 - type: nauc_recall_at_20_max value: 20.82950701470975 - type: nauc_recall_at_20_std value: 25.02998005000377 - type: nauc_recall_at_3_diff1 value: 7.685752623087458 - type: nauc_recall_at_3_max value: 4.126629771323791 - type: nauc_recall_at_3_std value: 5.440817692025401 - type: nauc_recall_at_5_diff1 value: 4.879990376967856 - type: nauc_recall_at_5_max value: 1.7076492862153638 - type: nauc_recall_at_5_std value: 6.009634283832578 - type: ndcg_at_1 value: 21.124000000000002 - type: ndcg_at_10 value: 41.966 - type: ndcg_at_100 value: 47.751 - type: ndcg_at_1000 value: 48.635 - type: ndcg_at_20 value: 45.08 - type: ndcg_at_3 value: 33.505 - type: ndcg_at_5 value: 37.266 - type: precision_at_1 value: 21.124000000000002 - type: precision_at_10 value: 6.643000000000001 - type: precision_at_100 value: 0.9249999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 3.93 - type: precision_at_3 value: 14.296000000000001 - type: precision_at_5 value: 10.413 - type: recall_at_1 value: 21.124000000000002 - type: recall_at_10 value: 66.43 - type: recall_at_100 value: 92.461 - type: recall_at_1000 value: 99.289 - type: recall_at_20 value: 78.592 - type: recall_at_3 value: 42.888 - type: recall_at_5 value: 52.063 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 35.387660145946825 - type: v_measure value: 35.387660145946825 - type: v_measure_std value: 14.022525689022785 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 25.26058942964131 - type: v_measure value: 25.26058942964131 - type: v_measure_std value: 14.850432186356857 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 54.13950871400633 - type: map value: 54.13950871400633 - type: mrr value: 68.87437892978059 - type: nAUC_map_diff1 value: 3.489277672557011 - type: nAUC_map_max value: 15.848457273691064 - type: nAUC_map_std value: 5.166813098270773 - type: nAUC_mrr_diff1 value: 4.9924344024669765 - type: nAUC_mrr_max value: 21.861692980537956 - type: nAUC_mrr_std value: 8.256966784037171 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 79.11612010879227 - type: cosine_spearman value: 75.85775256673794 - type: euclidean_pearson value: 77.46080265077437 - type: euclidean_spearman value: 75.85775256673794 - type: main_score value: 75.85775256673794 - type: manhattan_pearson value: 77.73191375456281 - type: manhattan_spearman value: 75.98908086034702 - type: pearson value: 79.11612010879227 - type: spearman value: 75.85775256673794 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 72.63636363636363 - type: f1 value: 71.69751597573539 - type: f1_weighted value: 71.69751597573539 - type: main_score value: 72.63636363636363 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 30.861840536151014 - type: v_measure value: 30.861840536151014 - type: v_measure_std value: 0.8096483751274005 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 20.219544420664455 - type: v_measure value: 20.219544420664455 - type: v_measure_std value: 0.7431903039116942 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: main_score value: 31.835 - type: map_at_1 value: 19.939 - type: map_at_10 value: 26.924 - type: map_at_100 value: 28.16 - type: map_at_1000 value: 28.316999999999997 - type: map_at_20 value: 27.554000000000002 - type: map_at_3 value: 24.45 - type: map_at_5 value: 25.751 - type: mrr_at_1 value: 25.894134477825464 - type: mrr_at_10 value: 32.65152031246451 - type: mrr_at_100 value: 33.58362210177363 - type: mrr_at_1000 value: 33.66415578481638 - type: mrr_at_20 value: 33.158616397714056 - type: mrr_at_3 value: 30.51979017644255 - type: mrr_at_5 value: 31.67143538388174 - type: nauc_map_at_1000_diff1 value: 43.61649840733464 - type: nauc_map_at_1000_max value: 27.361709993841355 - type: nauc_map_at_1000_std value: -1.47509416166404 - type: nauc_map_at_100_diff1 value: 43.63694784277137 - type: nauc_map_at_100_max value: 27.3675446795805 - type: nauc_map_at_100_std value: -1.4918015679743737 - type: nauc_map_at_10_diff1 value: 43.85263484013946 - type: nauc_map_at_10_max value: 26.810142038619045 - type: nauc_map_at_10_std value: -1.9884710880957612 - type: nauc_map_at_1_diff1 value: 48.66149039458694 - type: nauc_map_at_1_max value: 25.719796249226828 - type: nauc_map_at_1_std value: -3.291830544258096 - type: nauc_map_at_20_diff1 value: 43.70511471916722 - type: nauc_map_at_20_max value: 27.211922285560092 - type: nauc_map_at_20_std value: -1.621254133243609 - type: nauc_map_at_3_diff1 value: 45.678378884966854 - type: nauc_map_at_3_max value: 26.263363796878807 - type: nauc_map_at_3_std value: -3.067861673919005 - type: nauc_map_at_5_diff1 value: 44.28820868486158 - type: nauc_map_at_5_max value: 27.02028586800064 - type: nauc_map_at_5_std value: -2.8993536712942554 - type: nauc_mrr_at_1000_diff1 value: 41.91452307309703 - type: nauc_mrr_at_1000_max value: 28.25542784321284 - type: nauc_mrr_at_1000_std value: -1.2881473492995474 - type: nauc_mrr_at_100_diff1 value: 41.887361041816355 - type: nauc_mrr_at_100_max value: 28.242674898536045 - type: nauc_mrr_at_100_std value: -1.2962789057617752 - type: nauc_mrr_at_10_diff1 value: 41.839392429152184 - type: nauc_mrr_at_10_max value: 28.18109937160502 - type: nauc_mrr_at_10_std value: -1.760338307129395 - type: nauc_mrr_at_1_diff1 value: 46.97337896088234 - type: nauc_mrr_at_1_max value: 28.47299575870196 - type: nauc_mrr_at_1_std value: -2.699423724792112 - type: nauc_mrr_at_20_diff1 value: 41.87609128070427 - type: nauc_mrr_at_20_max value: 28.275298954521837 - type: nauc_mrr_at_20_std value: -1.3019240483529069 - type: nauc_mrr_at_3_diff1 value: 43.7337496151517 - type: nauc_mrr_at_3_max value: 27.798267478018285 - type: nauc_mrr_at_3_std value: -2.840593072947404 - type: nauc_mrr_at_5_diff1 value: 42.334483231228894 - type: nauc_mrr_at_5_max value: 28.312298246235912 - type: nauc_mrr_at_5_std value: -2.4627148837425574 - type: nauc_ndcg_at_1000_diff1 value: 41.15727539315947 - type: nauc_ndcg_at_1000_max value: 28.221291832726013 - type: nauc_ndcg_at_1000_std value: 2.0023108110987686 - type: nauc_ndcg_at_100_diff1 value: 40.696711368737986 - type: nauc_ndcg_at_100_max value: 28.3380433133816 - type: nauc_ndcg_at_100_std value: 1.6747741379499974 - type: nauc_ndcg_at_10_diff1 value: 40.68084799209197 - type: nauc_ndcg_at_10_max value: 27.001668531808047 - type: nauc_ndcg_at_10_std value: -0.6698055635076909 - type: nauc_ndcg_at_1_diff1 value: 46.97337896088234 - type: nauc_ndcg_at_1_max value: 28.47299575870196 - type: nauc_ndcg_at_1_std value: -2.699423724792112 - type: nauc_ndcg_at_20_diff1 value: 40.66080469225681 - type: nauc_ndcg_at_20_max value: 27.65886977082646 - type: nauc_ndcg_at_20_std value: 0.7450066458769301 - type: nauc_ndcg_at_3_diff1 value: 42.76104820392522 - type: nauc_ndcg_at_3_max value: 26.519613853147632 - type: nauc_ndcg_at_3_std value: -2.4350130293906034 - type: nauc_ndcg_at_5_diff1 value: 41.019172353488194 - type: nauc_ndcg_at_5_max value: 27.496046368143357 - type: nauc_ndcg_at_5_std value: -2.2882580326645177 - type: nauc_precision_at_1000_diff1 value: -14.261675661323125 - type: nauc_precision_at_1000_max value: -1.183805005826827 - type: nauc_precision_at_1000_std value: 3.344837871953594 - type: nauc_precision_at_100_diff1 value: 2.705968352361474 - type: nauc_precision_at_100_max value: 15.123914801051598 - type: nauc_precision_at_100_std value: 6.622282531987529 - type: nauc_precision_at_10_diff1 value: 21.143497652137974 - type: nauc_precision_at_10_max value: 22.754667045964673 - type: nauc_precision_at_10_std value: 2.56769270957959 - type: nauc_precision_at_1_diff1 value: 46.97337896088234 - type: nauc_precision_at_1_max value: 28.47299575870196 - type: nauc_precision_at_1_std value: -2.699423724792112 - type: nauc_precision_at_20_diff1 value: 15.750482341955857 - type: nauc_precision_at_20_max value: 22.860380841938827 - type: nauc_precision_at_20_std value: 4.22745838192582 - type: nauc_precision_at_3_diff1 value: 35.61809209460161 - type: nauc_precision_at_3_max value: 27.0006337531976 - type: nauc_precision_at_3_std value: -1.4556398881692423 - type: nauc_precision_at_5_diff1 value: 28.851808861899496 - type: nauc_precision_at_5_max value: 27.469054608601784 - type: nauc_precision_at_5_std value: -1.1421142808937477 - type: nauc_recall_at_1000_diff1 value: 33.27567106545891 - type: nauc_recall_at_1000_max value: 30.098997951989325 - type: nauc_recall_at_1000_std value: 37.339251250157766 - type: nauc_recall_at_100_diff1 value: 29.072377336992822 - type: nauc_recall_at_100_max value: 28.48476566182903 - type: nauc_recall_at_100_std value: 14.360417936748082 - type: nauc_recall_at_10_diff1 value: 32.83564819819592 - type: nauc_recall_at_10_max value: 24.465508171060677 - type: nauc_recall_at_10_std value: 3.332253149508536 - type: nauc_recall_at_1_diff1 value: 48.66149039458694 - type: nauc_recall_at_1_max value: 25.719796249226828 - type: nauc_recall_at_1_std value: -3.291830544258096 - type: nauc_recall_at_20_diff1 value: 31.185350107155045 - type: nauc_recall_at_20_max value: 25.812923152751406 - type: nauc_recall_at_20_std value: 8.353054109145367 - type: nauc_recall_at_3_diff1 value: 40.27297484569938 - type: nauc_recall_at_3_max value: 23.81327189620511 - type: nauc_recall_at_3_std value: -2.526830052534271 - type: nauc_recall_at_5_diff1 value: 34.64896359382995 - type: nauc_recall_at_5_max value: 25.750218989139317 - type: nauc_recall_at_5_std value: -1.3789317138918638 - type: ndcg_at_1 value: 25.894000000000002 - type: ndcg_at_10 value: 31.835 - type: ndcg_at_100 value: 37.325 - type: ndcg_at_1000 value: 40.586 - type: ndcg_at_20 value: 33.714 - type: ndcg_at_3 value: 28.143 - type: ndcg_at_5 value: 29.648999999999997 - type: precision_at_1 value: 25.894000000000002 - type: precision_at_10 value: 6.194999999999999 - type: precision_at_100 value: 1.126 - type: precision_at_1000 value: 0.173 - type: precision_at_20 value: 3.7199999999999998 - type: precision_at_3 value: 13.543 - type: precision_at_5 value: 9.757 - type: recall_at_1 value: 19.939 - type: recall_at_10 value: 40.537 - type: recall_at_100 value: 64.717 - type: recall_at_1000 value: 87.01299999999999 - type: recall_at_20 value: 47.677 - type: recall_at_3 value: 29.301 - type: recall_at_5 value: 33.918 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: main_score value: 25.734 - type: map_at_1 value: 16.601 - type: map_at_10 value: 22.07 - type: map_at_100 value: 22.958000000000002 - type: map_at_1000 value: 23.074 - type: map_at_20 value: 22.52 - type: map_at_3 value: 20.137 - type: map_at_5 value: 21.315 - type: mrr_at_1 value: 20.382165605095544 - type: mrr_at_10 value: 25.95447881912849 - type: mrr_at_100 value: 26.72268332839149 - type: mrr_at_1000 value: 26.79228081014276 - type: mrr_at_20 value: 26.372942687112676 - type: mrr_at_3 value: 24.097664543524406 - type: mrr_at_5 value: 25.269639065817373 - type: nauc_map_at_1000_diff1 value: 39.97979443324452 - type: nauc_map_at_1000_max value: 13.65503993855689 - type: nauc_map_at_1000_std value: -2.0265680574493286 - type: nauc_map_at_100_diff1 value: 40.04134376146643 - type: nauc_map_at_100_max value: 13.602473622919186 - type: nauc_map_at_100_std value: -2.1531627932652073 - type: nauc_map_at_10_diff1 value: 40.321538712092966 - type: nauc_map_at_10_max value: 13.5001803982381 - type: nauc_map_at_10_std value: -2.628320244096416 - type: nauc_map_at_1_diff1 value: 47.528556920568896 - type: nauc_map_at_1_max value: 15.848152314768068 - type: nauc_map_at_1_std value: -3.8515029742454763 - type: nauc_map_at_20_diff1 value: 40.22452252482904 - type: nauc_map_at_20_max value: 13.501820277821633 - type: nauc_map_at_20_std value: -2.4849480670127835 - type: nauc_map_at_3_diff1 value: 41.68152420395297 - type: nauc_map_at_3_max value: 13.993359536648425 - type: nauc_map_at_3_std value: -4.120472655476033 - type: nauc_map_at_5_diff1 value: 40.72541498326932 - type: nauc_map_at_5_max value: 13.706855573979945 - type: nauc_map_at_5_std value: -3.168857069165899 - type: nauc_mrr_at_1000_diff1 value: 37.9361528126572 - type: nauc_mrr_at_1000_max value: 14.435169065764649 - type: nauc_mrr_at_1000_std value: -0.3672502634006242 - type: nauc_mrr_at_100_diff1 value: 37.94986436229442 - type: nauc_mrr_at_100_max value: 14.435994989813192 - type: nauc_mrr_at_100_std value: -0.37576385382293837 - type: nauc_mrr_at_10_diff1 value: 38.11900316449423 - type: nauc_mrr_at_10_max value: 14.472293540608746 - type: nauc_mrr_at_10_std value: -0.43716209085613345 - type: nauc_mrr_at_1_diff1 value: 44.21976115137286 - type: nauc_mrr_at_1_max value: 17.82290497090946 - type: nauc_mrr_at_1_std value: -1.547820761457578 - type: nauc_mrr_at_20_diff1 value: 38.024147471792524 - type: nauc_mrr_at_20_max value: 14.385378851779368 - type: nauc_mrr_at_20_std value: -0.47797312999005215 - type: nauc_mrr_at_3_diff1 value: 39.15186528374059 - type: nauc_mrr_at_3_max value: 15.21927102759239 - type: nauc_mrr_at_3_std value: -1.5215890424003806 - type: nauc_mrr_at_5_diff1 value: 38.45626599850357 - type: nauc_mrr_at_5_max value: 14.640408888284732 - type: nauc_mrr_at_5_std value: -0.7311075454359176 - type: nauc_ndcg_at_1000_diff1 value: 36.09833573033763 - type: nauc_ndcg_at_1000_max value: 13.245365815282575 - type: nauc_ndcg_at_1000_std value: 1.5761746506032988 - type: nauc_ndcg_at_100_diff1 value: 36.904025539005644 - type: nauc_ndcg_at_100_max value: 12.957957928970645 - type: nauc_ndcg_at_100_std value: 0.4532239536005292 - type: nauc_ndcg_at_10_diff1 value: 37.32497182133629 - type: nauc_ndcg_at_10_max value: 12.490853969491074 - type: nauc_ndcg_at_10_std value: -0.7416415504597471 - type: nauc_ndcg_at_1_diff1 value: 44.21976115137286 - type: nauc_ndcg_at_1_max value: 17.82290497090946 - type: nauc_ndcg_at_1_std value: -1.547820761457578 - type: nauc_ndcg_at_20_diff1 value: 37.28170904668032 - type: nauc_ndcg_at_20_max value: 12.268080858587759 - type: nauc_ndcg_at_20_std value: -0.7360183931126623 - type: nauc_ndcg_at_3_diff1 value: 39.02888999235542 - type: nauc_ndcg_at_3_max value: 13.901334459489329 - type: nauc_ndcg_at_3_std value: -2.7172751935367647 - type: nauc_ndcg_at_5_diff1 value: 38.02752207740974 - type: nauc_ndcg_at_5_max value: 13.02646174038431 - type: nauc_ndcg_at_5_std value: -1.609904028585218 - type: nauc_precision_at_1000_diff1 value: -6.66757757004073 - type: nauc_precision_at_1000_max value: 9.0023204523236 - type: nauc_precision_at_1000_std value: 23.5060357363243 - type: nauc_precision_at_100_diff1 value: 6.113195112414238 - type: nauc_precision_at_100_max value: 11.685619926894306 - type: nauc_precision_at_100_std value: 19.46517809799074 - type: nauc_precision_at_10_diff1 value: 20.39466712905433 - type: nauc_precision_at_10_max value: 11.42898255449916 - type: nauc_precision_at_10_std value: 9.716462445452729 - type: nauc_precision_at_1_diff1 value: 44.21976115137286 - type: nauc_precision_at_1_max value: 17.82290497090946 - type: nauc_precision_at_1_std value: -1.547820761457578 - type: nauc_precision_at_20_diff1 value: 16.658730057271427 - type: nauc_precision_at_20_max value: 11.1652114440581 - type: nauc_precision_at_20_std value: 11.300027272107469 - type: nauc_precision_at_3_diff1 value: 30.28030907617402 - type: nauc_precision_at_3_max value: 13.794055418422083 - type: nauc_precision_at_3_std value: 0.6048823642224063 - type: nauc_precision_at_5_diff1 value: 25.663334758638058 - type: nauc_precision_at_5_max value: 12.249908938864056 - type: nauc_precision_at_5_std value: 5.0045410071189425 - type: nauc_recall_at_1000_diff1 value: 21.220572448408245 - type: nauc_recall_at_1000_max value: 9.691420267810058 - type: nauc_recall_at_1000_std value: 12.85759827330056 - type: nauc_recall_at_100_diff1 value: 28.21527141094479 - type: nauc_recall_at_100_max value: 9.83831880254868 - type: nauc_recall_at_100_std value: 5.435149253402134 - type: nauc_recall_at_10_diff1 value: 30.716014201487262 - type: nauc_recall_at_10_max value: 8.051593782800182 - type: nauc_recall_at_10_std value: 0.4471610378184442 - type: nauc_recall_at_1_diff1 value: 47.528556920568896 - type: nauc_recall_at_1_max value: 15.848152314768068 - type: nauc_recall_at_1_std value: -3.8515029742454763 - type: nauc_recall_at_20_diff1 value: 29.800603042147905 - type: nauc_recall_at_20_max value: 7.042808403898776 - type: nauc_recall_at_20_std value: 0.8179034283502986 - type: nauc_recall_at_3_diff1 value: 36.05311584515151 - type: nauc_recall_at_3_max value: 11.03138015792514 - type: nauc_recall_at_3_std value: -4.298332543889119 - type: nauc_recall_at_5_diff1 value: 33.34542113435848 - type: nauc_recall_at_5_max value: 9.391429367517976 - type: nauc_recall_at_5_std value: -1.5174868347878459 - type: ndcg_at_1 value: 20.382 - type: ndcg_at_10 value: 25.734 - type: ndcg_at_100 value: 29.952 - type: ndcg_at_1000 value: 32.618 - type: ndcg_at_20 value: 27.181 - type: ndcg_at_3 value: 22.445999999999998 - type: ndcg_at_5 value: 24.162 - type: precision_at_1 value: 20.382 - type: precision_at_10 value: 4.662 - type: precision_at_100 value: 0.8580000000000001 - type: precision_at_1000 value: 0.133 - type: precision_at_20 value: 2.828 - type: precision_at_3 value: 10.446 - type: precision_at_5 value: 7.682 - type: recall_at_1 value: 16.601 - type: recall_at_10 value: 32.882 - type: recall_at_100 value: 51.273 - type: recall_at_1000 value: 69.33200000000001 - type: recall_at_20 value: 38.22 - type: recall_at_3 value: 23.54 - type: recall_at_5 value: 28.054000000000002 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: main_score value: 39.235 - type: map_at_1 value: 25.386999999999997 - type: map_at_10 value: 34.183 - type: map_at_100 value: 35.198 - type: map_at_1000 value: 35.292 - type: map_at_20 value: 34.756 - type: map_at_3 value: 31.466 - type: map_at_5 value: 33.037 - type: mrr_at_1 value: 29.404388714733543 - type: mrr_at_10 value: 37.51880877742944 - type: mrr_at_100 value: 38.30457109532953 - type: mrr_at_1000 value: 38.3645245292866 - type: mrr_at_20 value: 37.94776237222878 - type: mrr_at_3 value: 35.15151515151513 - type: mrr_at_5 value: 36.530825496342715 - type: nauc_map_at_1000_diff1 value: 41.249973220934464 - type: nauc_map_at_1000_max value: 23.416302755877073 - type: nauc_map_at_1000_std value: -10.207899212437999 - type: nauc_map_at_100_diff1 value: 41.24390045906369 - type: nauc_map_at_100_max value: 23.393682611799267 - type: nauc_map_at_100_std value: -10.254556576082482 - type: nauc_map_at_10_diff1 value: 41.382354597936995 - type: nauc_map_at_10_max value: 23.176782265492363 - type: nauc_map_at_10_std value: -10.849718292221906 - type: nauc_map_at_1_diff1 value: 45.39686265513208 - type: nauc_map_at_1_max value: 19.620871905273706 - type: nauc_map_at_1_std value: -12.904987428165654 - type: nauc_map_at_20_diff1 value: 41.27244082919643 - type: nauc_map_at_20_max value: 23.302684773349597 - type: nauc_map_at_20_std value: -10.441842806985154 - type: nauc_map_at_3_diff1 value: 41.8919220244127 - type: nauc_map_at_3_max value: 22.254220793423723 - type: nauc_map_at_3_std value: -12.130298439753705 - type: nauc_map_at_5_diff1 value: 41.58025783631085 - type: nauc_map_at_5_max value: 22.90826213564573 - type: nauc_map_at_5_std value: -11.165811549758352 - type: nauc_mrr_at_1000_diff1 value: 40.53152598499822 - type: nauc_mrr_at_1000_max value: 25.11227665851315 - type: nauc_mrr_at_1000_std value: -8.08741271282522 - type: nauc_mrr_at_100_diff1 value: 40.51963005358264 - type: nauc_mrr_at_100_max value: 25.120293035347625 - type: nauc_mrr_at_100_std value: -8.08477757772673 - type: nauc_mrr_at_10_diff1 value: 40.630254919734845 - type: nauc_mrr_at_10_max value: 25.192263018985 - type: nauc_mrr_at_10_std value: -8.343786686430308 - type: nauc_mrr_at_1_diff1 value: 45.24802769641752 - type: nauc_mrr_at_1_max value: 22.81400229887994 - type: nauc_mrr_at_1_std value: -11.030374885452746 - type: nauc_mrr_at_20_diff1 value: 40.527874579465404 - type: nauc_mrr_at_20_max value: 25.09785309228408 - type: nauc_mrr_at_20_std value: -8.178961300984005 - type: nauc_mrr_at_3_diff1 value: 40.9982110047705 - type: nauc_mrr_at_3_max value: 24.89415486978485 - type: nauc_mrr_at_3_std value: -9.326777261347539 - type: nauc_mrr_at_5_diff1 value: 40.80630420274428 - type: nauc_mrr_at_5_max value: 25.27575084878062 - type: nauc_mrr_at_5_std value: -8.546736722404525 - type: nauc_ndcg_at_1000_diff1 value: 39.53378645935715 - type: nauc_ndcg_at_1000_max value: 25.526492849521226 - type: nauc_ndcg_at_1000_std value: -6.007063152931765 - type: nauc_ndcg_at_100_diff1 value: 39.0880907026097 - type: nauc_ndcg_at_100_max value: 25.27434977919565 - type: nauc_ndcg_at_100_std value: -6.494059729717049 - type: nauc_ndcg_at_10_diff1 value: 39.75643189392527 - type: nauc_ndcg_at_10_max value: 24.79335502116443 - type: nauc_ndcg_at_10_std value: -8.786781322519788 - type: nauc_ndcg_at_1_diff1 value: 45.24802769641752 - type: nauc_ndcg_at_1_max value: 22.81400229887994 - type: nauc_ndcg_at_1_std value: -11.030374885452746 - type: nauc_ndcg_at_20_diff1 value: 39.38115636990762 - type: nauc_ndcg_at_20_max value: 24.830948061340973 - type: nauc_ndcg_at_20_std value: -7.74514857483731 - type: nauc_ndcg_at_3_diff1 value: 40.597424968913295 - type: nauc_ndcg_at_3_max value: 23.83761797284813 - type: nauc_ndcg_at_3_std value: -10.826014984199753 - type: nauc_ndcg_at_5_diff1 value: 40.160243884240955 - type: nauc_ndcg_at_5_max value: 24.641005184802403 - type: nauc_ndcg_at_5_std value: -9.394573143721122 - type: nauc_precision_at_1000_diff1 value: -0.26775483855404 - type: nauc_precision_at_1000_max value: 23.052779599626216 - type: nauc_precision_at_1000_std value: 24.978867586645737 - type: nauc_precision_at_100_diff1 value: 9.73599417323489 - type: nauc_precision_at_100_max value: 26.664612833573067 - type: nauc_precision_at_100_std value: 15.747547424892522 - type: nauc_precision_at_10_diff1 value: 25.384143998683495 - type: nauc_precision_at_10_max value: 28.77515164969203 - type: nauc_precision_at_10_std value: 1.334799782027906 - type: nauc_precision_at_1_diff1 value: 45.24802769641752 - type: nauc_precision_at_1_max value: 22.81400229887994 - type: nauc_precision_at_1_std value: -11.030374885452746 - type: nauc_precision_at_20_diff1 value: 20.21252517032333 - type: nauc_precision_at_20_max value: 28.092242647209847 - type: nauc_precision_at_20_std value: 7.13292725544981 - type: nauc_precision_at_3_diff1 value: 33.31087126292084 - type: nauc_precision_at_3_max value: 28.144729235595268 - type: nauc_precision_at_3_std value: -6.680273865904818 - type: nauc_precision_at_5_diff1 value: 29.65876394876068 - type: nauc_precision_at_5_max value: 29.35126830830009 - type: nauc_precision_at_5_std value: -1.6373943088766274 - type: nauc_recall_at_1000_diff1 value: 28.93648565815677 - type: nauc_recall_at_1000_max value: 35.83681303333163 - type: nauc_recall_at_1000_std value: 33.065249002817446 - type: nauc_recall_at_100_diff1 value: 27.743019102171594 - type: nauc_recall_at_100_max value: 28.027951033595023 - type: nauc_recall_at_100_std value: 9.499502949546343 - type: nauc_recall_at_10_diff1 value: 33.975592980890205 - type: nauc_recall_at_10_max value: 25.654266106207007 - type: nauc_recall_at_10_std value: -4.889087003341999 - type: nauc_recall_at_1_diff1 value: 45.39686265513208 - type: nauc_recall_at_1_max value: 19.620871905273706 - type: nauc_recall_at_1_std value: -12.904987428165654 - type: nauc_recall_at_20_diff1 value: 32.428638046562156 - type: nauc_recall_at_20_max value: 25.811049662670854 - type: nauc_recall_at_20_std value: -1.084167664066214 - type: nauc_recall_at_3_diff1 value: 36.80239523147669 - type: nauc_recall_at_3_max value: 23.70115293826517 - type: nauc_recall_at_3_std value: -10.179865917816631 - type: nauc_recall_at_5_diff1 value: 35.481273082880385 - type: nauc_recall_at_5_max value: 25.22699895557444 - type: nauc_recall_at_5_std value: -6.928154160954265 - type: ndcg_at_1 value: 29.404000000000003 - type: ndcg_at_10 value: 39.235 - type: ndcg_at_100 value: 44.072 - type: ndcg_at_1000 value: 46.272999999999996 - type: ndcg_at_20 value: 40.983000000000004 - type: ndcg_at_3 value: 34.292 - type: ndcg_at_5 value: 36.735 - type: precision_at_1 value: 29.404000000000003 - type: precision_at_10 value: 6.539000000000001 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.125 - type: precision_at_20 value: 3.752 - type: precision_at_3 value: 15.423 - type: precision_at_5 value: 10.984 - type: recall_at_1 value: 25.386999999999997 - type: recall_at_10 value: 51.256 - type: recall_at_100 value: 73.53699999999999 - type: recall_at_1000 value: 89.522 - type: recall_at_20 value: 57.687 - type: recall_at_3 value: 37.830999999999996 - type: recall_at_5 value: 43.811 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: main_score value: 19.197 - type: map_at_1 value: 10.832 - type: map_at_10 value: 16.154 - type: map_at_100 value: 16.863 - type: map_at_1000 value: 16.979 - type: map_at_20 value: 16.494 - type: map_at_3 value: 14.654 - type: map_at_5 value: 15.634 - type: mrr_at_1 value: 11.751412429378531 - type: mrr_at_10 value: 17.286476549188407 - type: mrr_at_100 value: 18.019080515365157 - type: mrr_at_1000 value: 18.122220740371624 - type: mrr_at_20 value: 17.643986643881693 - type: mrr_at_3 value: 15.70621468926553 - type: mrr_at_5 value: 16.774011299435024 - type: nauc_map_at_1000_diff1 value: 37.927063185916786 - type: nauc_map_at_1000_max value: 14.15651072891371 - type: nauc_map_at_1000_std value: -8.124962552251457 - type: nauc_map_at_100_diff1 value: 37.93525025821844 - type: nauc_map_at_100_max value: 14.131523699537288 - type: nauc_map_at_100_std value: -8.170583771371396 - type: nauc_map_at_10_diff1 value: 38.42813636094302 - type: nauc_map_at_10_max value: 14.282120499977891 - type: nauc_map_at_10_std value: -8.577031812934745 - type: nauc_map_at_1_diff1 value: 51.66692699481996 - type: nauc_map_at_1_max value: 17.664646674047123 - type: nauc_map_at_1_std value: -11.782621031162968 - type: nauc_map_at_20_diff1 value: 38.17853788871855 - type: nauc_map_at_20_max value: 14.256213676574742 - type: nauc_map_at_20_std value: -8.310926163301415 - type: nauc_map_at_3_diff1 value: 40.16070984262913 - type: nauc_map_at_3_max value: 14.268693118841725 - type: nauc_map_at_3_std value: -9.133251481752447 - type: nauc_map_at_5_diff1 value: 38.83714248320578 - type: nauc_map_at_5_max value: 14.547528919229999 - type: nauc_map_at_5_std value: -8.916871955060776 - type: nauc_mrr_at_1000_diff1 value: 36.5899689047331 - type: nauc_mrr_at_1000_max value: 15.113884206534985 - type: nauc_mrr_at_1000_std value: -7.170934224974719 - type: nauc_mrr_at_100_diff1 value: 36.58290352969189 - type: nauc_mrr_at_100_max value: 15.10461015425463 - type: nauc_mrr_at_100_std value: -7.193153133255972 - type: nauc_mrr_at_10_diff1 value: 36.886787941126755 - type: nauc_mrr_at_10_max value: 15.127743773603711 - type: nauc_mrr_at_10_std value: -7.450354111586159 - type: nauc_mrr_at_1_diff1 value: 50.4303551964735 - type: nauc_mrr_at_1_max value: 18.974353633454818 - type: nauc_mrr_at_1_std value: -10.667048661688531 - type: nauc_mrr_at_20_diff1 value: 36.748056497939466 - type: nauc_mrr_at_20_max value: 15.240859680475241 - type: nauc_mrr_at_20_std value: -7.288016407850428 - type: nauc_mrr_at_3_diff1 value: 38.37428302171742 - type: nauc_mrr_at_3_max value: 14.8093219575286 - type: nauc_mrr_at_3_std value: -7.809230035161326 - type: nauc_mrr_at_5_diff1 value: 37.2144623683964 - type: nauc_mrr_at_5_max value: 15.28601324524152 - type: nauc_mrr_at_5_std value: -7.7340060832485 - type: nauc_ndcg_at_1000_diff1 value: 32.12453348510246 - type: nauc_ndcg_at_1000_max value: 13.157455004954915 - type: nauc_ndcg_at_1000_std value: -4.92622356811411 - type: nauc_ndcg_at_100_diff1 value: 32.06154877919635 - type: nauc_ndcg_at_100_max value: 12.373862596941047 - type: nauc_ndcg_at_100_std value: -5.679273924705311 - type: nauc_ndcg_at_10_diff1 value: 34.0105889334877 - type: nauc_ndcg_at_10_max value: 13.45850179368671 - type: nauc_ndcg_at_10_std value: -7.129474197823981 - type: nauc_ndcg_at_1_diff1 value: 50.4303551964735 - type: nauc_ndcg_at_1_max value: 18.974353633454818 - type: nauc_ndcg_at_1_std value: -10.667048661688531 - type: nauc_ndcg_at_20_diff1 value: 33.17001669466592 - type: nauc_ndcg_at_20_max value: 13.32565385671001 - type: nauc_ndcg_at_20_std value: -6.284897809311489 - type: nauc_ndcg_at_3_diff1 value: 36.583009335894786 - type: nauc_ndcg_at_3_max value: 13.3100798018976 - type: nauc_ndcg_at_3_std value: -8.166653842277874 - type: nauc_ndcg_at_5_diff1 value: 34.663883470713714 - type: nauc_ndcg_at_5_max value: 13.925348847790179 - type: nauc_ndcg_at_5_std value: -7.8134139319246705 - type: nauc_precision_at_1000_diff1 value: 3.267820129824429 - type: nauc_precision_at_1000_max value: 13.475739290072998 - type: nauc_precision_at_1000_std value: 9.817456700342868 - type: nauc_precision_at_100_diff1 value: 14.543473928222502 - type: nauc_precision_at_100_max value: 9.536842145225432 - type: nauc_precision_at_100_std value: 2.367980716410962 - type: nauc_precision_at_10_diff1 value: 22.83690357863953 - type: nauc_precision_at_10_max value: 12.377338528340081 - type: nauc_precision_at_10_std value: -2.7413618512966442 - type: nauc_precision_at_1_diff1 value: 50.4303551964735 - type: nauc_precision_at_1_max value: 18.974353633454818 - type: nauc_precision_at_1_std value: -10.667048661688531 - type: nauc_precision_at_20_diff1 value: 20.379974384537427 - type: nauc_precision_at_20_max value: 12.277432490519853 - type: nauc_precision_at_20_std value: -0.023357415290595228 - type: nauc_precision_at_3_diff1 value: 28.00128059605776 - type: nauc_precision_at_3_max value: 12.115949162806704 - type: nauc_precision_at_3_std value: -5.111345494119332 - type: nauc_precision_at_5_diff1 value: 23.931333166517064 - type: nauc_precision_at_5_max value: 13.460490076263444 - type: nauc_precision_at_5_std value: -4.566369591299022 - type: nauc_recall_at_1000_diff1 value: 13.901980638817474 - type: nauc_recall_at_1000_max value: 8.169301488452522 - type: nauc_recall_at_1000_std value: 6.977530327014011 - type: nauc_recall_at_100_diff1 value: 18.54699849728289 - type: nauc_recall_at_100_max value: 5.40051681338299 - type: nauc_recall_at_100_std value: -0.2998165893044503 - type: nauc_recall_at_10_diff1 value: 25.158691029447162 - type: nauc_recall_at_10_max value: 10.698096715728344 - type: nauc_recall_at_10_std value: -4.90677955177619 - type: nauc_recall_at_1_diff1 value: 51.66692699481996 - type: nauc_recall_at_1_max value: 17.664646674047123 - type: nauc_recall_at_1_std value: -11.782621031162968 - type: nauc_recall_at_20_diff1 value: 22.315869507893193 - type: nauc_recall_at_20_max value: 9.799239845339486 - type: nauc_recall_at_20_std value: -2.255295176195769 - type: nauc_recall_at_3_diff1 value: 30.21846457670379 - type: nauc_recall_at_3_max value: 10.958491456074727 - type: nauc_recall_at_3_std value: -6.746808382770713 - type: nauc_recall_at_5_diff1 value: 26.24302256225738 - type: nauc_recall_at_5_max value: 11.682268465161725 - type: nauc_recall_at_5_std value: -6.292007648799524 - type: ndcg_at_1 value: 11.751000000000001 - type: ndcg_at_10 value: 19.197 - type: ndcg_at_100 value: 23.159 - type: ndcg_at_1000 value: 26.453 - type: ndcg_at_20 value: 20.448 - type: ndcg_at_3 value: 16.186 - type: ndcg_at_5 value: 17.936 - type: precision_at_1 value: 11.751000000000001 - type: precision_at_10 value: 3.1189999999999998 - type: precision_at_100 value: 0.54 - type: precision_at_1000 value: 0.086 - type: precision_at_20 value: 1.859 - type: precision_at_3 value: 7.194000000000001 - type: precision_at_5 value: 5.311 - type: recall_at_1 value: 10.832 - type: recall_at_10 value: 27.472 - type: recall_at_100 value: 46.471000000000004 - type: recall_at_1000 value: 71.91199999999999 - type: recall_at_20 value: 32.213 - type: recall_at_3 value: 19.417 - type: recall_at_5 value: 23.577 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: main_score value: 12.145 - type: map_at_1 value: 6.019 - type: map_at_10 value: 9.584 - type: map_at_100 value: 10.433 - type: map_at_1000 value: 10.562000000000001 - type: map_at_20 value: 10.024 - type: map_at_3 value: 8.351 - type: map_at_5 value: 9.005 - type: mrr_at_1 value: 7.213930348258707 - type: mrr_at_10 value: 11.619827450051332 - type: mrr_at_100 value: 12.469229814971346 - type: mrr_at_1000 value: 12.577286932589695 - type: mrr_at_20 value: 12.072514356821353 - type: mrr_at_3 value: 10.157545605306801 - type: mrr_at_5 value: 10.89759535655058 - type: nauc_map_at_1000_diff1 value: 18.60219400887139 - type: nauc_map_at_1000_max value: 6.951583595979727 - type: nauc_map_at_1000_std value: -0.36466683994108184 - type: nauc_map_at_100_diff1 value: 18.660733139389524 - type: nauc_map_at_100_max value: 6.903072765131549 - type: nauc_map_at_100_std value: -0.48390217802549257 - type: nauc_map_at_10_diff1 value: 18.573179595835647 - type: nauc_map_at_10_max value: 6.992666771720911 - type: nauc_map_at_10_std value: -0.8874423543023089 - type: nauc_map_at_1_diff1 value: 33.90106432523568 - type: nauc_map_at_1_max value: 9.289205840089235 - type: nauc_map_at_1_std value: 2.1852128418717705 - type: nauc_map_at_20_diff1 value: 18.334656889783485 - type: nauc_map_at_20_max value: 6.931684308001437 - type: nauc_map_at_20_std value: -0.7124186564380448 - type: nauc_map_at_3_diff1 value: 20.32895393313974 - type: nauc_map_at_3_max value: 5.887419026571198 - type: nauc_map_at_3_std value: -0.015273865884840596 - type: nauc_map_at_5_diff1 value: 19.15574225963634 - type: nauc_map_at_5_max value: 6.175933890525402 - type: nauc_map_at_5_std value: -1.468261999387673 - type: nauc_mrr_at_1000_diff1 value: 18.0560339880594 - type: nauc_mrr_at_1000_max value: 8.653214727915024 - type: nauc_mrr_at_1000_std value: 1.6650523107666824 - type: nauc_mrr_at_100_diff1 value: 18.067266124955946 - type: nauc_mrr_at_100_max value: 8.645444544074266 - type: nauc_mrr_at_100_std value: 1.605397143432772 - type: nauc_mrr_at_10_diff1 value: 18.227604303918422 - type: nauc_mrr_at_10_max value: 8.980990643614946 - type: nauc_mrr_at_10_std value: 1.625956129526598 - type: nauc_mrr_at_1_diff1 value: 33.145174271418576 - type: nauc_mrr_at_1_max value: 10.674348159869123 - type: nauc_mrr_at_1_std value: 2.5718912675260843 - type: nauc_mrr_at_20_diff1 value: 17.85361170315467 - type: nauc_mrr_at_20_max value: 8.689966423383293 - type: nauc_mrr_at_20_std value: 1.4845343622374683 - type: nauc_mrr_at_3_diff1 value: 19.72873972100882 - type: nauc_mrr_at_3_max value: 7.818757201820606 - type: nauc_mrr_at_3_std value: 2.317801166782217 - type: nauc_mrr_at_5_diff1 value: 18.70515159747826 - type: nauc_mrr_at_5_max value: 7.8553636278171055 - type: nauc_mrr_at_5_std value: 0.8593300223901442 - type: nauc_ndcg_at_1000_diff1 value: 14.777764985527059 - type: nauc_ndcg_at_1000_max value: 8.001133085293265 - type: nauc_ndcg_at_1000_std value: 2.715094827482056 - type: nauc_ndcg_at_100_diff1 value: 15.873494520058037 - type: nauc_ndcg_at_100_max value: 7.5190091115119 - type: nauc_ndcg_at_100_std value: 0.7430533500967327 - type: nauc_ndcg_at_10_diff1 value: 14.950829327092022 - type: nauc_ndcg_at_10_max value: 7.999425322307154 - type: nauc_ndcg_at_10_std value: -0.5911692617165382 - type: nauc_ndcg_at_1_diff1 value: 33.145174271418576 - type: nauc_ndcg_at_1_max value: 10.674348159869123 - type: nauc_ndcg_at_1_std value: 2.5718912675260843 - type: nauc_ndcg_at_20_diff1 value: 14.28695753335748 - type: nauc_ndcg_at_20_max value: 7.460341211112809 - type: nauc_ndcg_at_20_std value: -0.2734671370134216 - type: nauc_ndcg_at_3_diff1 value: 17.243393543205006 - type: nauc_ndcg_at_3_max value: 6.003682896861271 - type: nauc_ndcg_at_3_std value: 0.3923628664952013 - type: nauc_ndcg_at_5_diff1 value: 15.841455870049076 - type: nauc_ndcg_at_5_max value: 6.163583363661528 - type: nauc_ndcg_at_5_std value: -1.9411356710983478 - type: nauc_precision_at_1000_diff1 value: -3.399817676017686 - type: nauc_precision_at_1000_max value: 5.575723322824422 - type: nauc_precision_at_1000_std value: 5.63779109914318 - type: nauc_precision_at_100_diff1 value: 6.1555220193892435 - type: nauc_precision_at_100_max value: 6.7977343501791045 - type: nauc_precision_at_100_std value: 2.026960062764128 - type: nauc_precision_at_10_diff1 value: 5.864713737249161 - type: nauc_precision_at_10_max value: 10.987539143688663 - type: nauc_precision_at_10_std value: -0.12419185225065871 - type: nauc_precision_at_1_diff1 value: 33.145174271418576 - type: nauc_precision_at_1_max value: 10.674348159869123 - type: nauc_precision_at_1_std value: 2.5718912675260843 - type: nauc_precision_at_20_diff1 value: 4.994637980783556 - type: nauc_precision_at_20_max value: 7.522690866727933 - type: nauc_precision_at_20_std value: 0.027674551460471312 - type: nauc_precision_at_3_diff1 value: 8.451342681964578 - type: nauc_precision_at_3_max value: 5.343253356927528 - type: nauc_precision_at_3_std value: 1.6495845441147832 - type: nauc_precision_at_5_diff1 value: 6.193033041556517 - type: nauc_precision_at_5_max value: 5.77635145338238 - type: nauc_precision_at_5_std value: -3.421797113444559 - type: nauc_recall_at_1000_diff1 value: 7.437110169863727 - type: nauc_recall_at_1000_max value: 9.607314782406986 - type: nauc_recall_at_1000_std value: 13.320498460741362 - type: nauc_recall_at_100_diff1 value: 13.309966057961834 - type: nauc_recall_at_100_max value: 7.748170239579637 - type: nauc_recall_at_100_std value: 2.6798857378517864 - type: nauc_recall_at_10_diff1 value: 8.674278695378167 - type: nauc_recall_at_10_max value: 8.969918415623756 - type: nauc_recall_at_10_std value: -1.4597400700986853 - type: nauc_recall_at_1_diff1 value: 33.90106432523568 - type: nauc_recall_at_1_max value: 9.289205840089235 - type: nauc_recall_at_1_std value: 2.1852128418717705 - type: nauc_recall_at_20_diff1 value: 7.663555921211413 - type: nauc_recall_at_20_max value: 7.420494129425241 - type: nauc_recall_at_20_std value: -0.39971980929980877 - type: nauc_recall_at_3_diff1 value: 10.784631081908223 - type: nauc_recall_at_3_max value: 3.815625872455824 - type: nauc_recall_at_3_std value: -1.1614434404018152 - type: nauc_recall_at_5_diff1 value: 9.60638979119831 - type: nauc_recall_at_5_max value: 5.1710882220553405 - type: nauc_recall_at_5_std value: -4.572280393094789 - type: ndcg_at_1 value: 7.2139999999999995 - type: ndcg_at_10 value: 12.145 - type: ndcg_at_100 value: 16.672 - type: ndcg_at_1000 value: 20.342 - type: ndcg_at_20 value: 13.745 - type: ndcg_at_3 value: 9.607000000000001 - type: ndcg_at_5 value: 10.712000000000002 - type: precision_at_1 value: 7.2139999999999995 - type: precision_at_10 value: 2.338 - type: precision_at_100 value: 0.5459999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 1.6039999999999999 - type: precision_at_3 value: 4.726 - type: precision_at_5 value: 3.5319999999999996 - type: recall_at_1 value: 6.019 - type: recall_at_10 value: 18.102999999999998 - type: recall_at_100 value: 38.482 - type: recall_at_1000 value: 65.436 - type: recall_at_20 value: 23.952 - type: recall_at_3 value: 11.178 - type: recall_at_5 value: 13.877 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: main_score value: 26.667999999999996 - type: map_at_1 value: 16.822 - type: map_at_10 value: 22.476 - type: map_at_100 value: 23.69 - type: map_at_1000 value: 23.827 - type: map_at_20 value: 23.084 - type: map_at_3 value: 20.441000000000003 - type: map_at_5 value: 21.512 - type: mrr_at_1 value: 20.78922040423484 - type: mrr_at_10 value: 26.67445804115679 - type: mrr_at_100 value: 27.67534998291947 - type: mrr_at_1000 value: 27.752906060167692 - type: mrr_at_20 value: 27.19875968774574 - type: mrr_at_3 value: 24.4947064485082 - type: mrr_at_5 value: 25.630413859480278 - type: nauc_map_at_1000_diff1 value: 40.40492447320535 - type: nauc_map_at_1000_max value: 28.548119831633194 - type: nauc_map_at_1000_std value: -0.22424233207141148 - type: nauc_map_at_100_diff1 value: 40.39875847865982 - type: nauc_map_at_100_max value: 28.500575725413096 - type: nauc_map_at_100_std value: -0.2779979908842256 - type: nauc_map_at_10_diff1 value: 40.942304749094085 - type: nauc_map_at_10_max value: 28.429772938475008 - type: nauc_map_at_10_std value: -0.8049874864329988 - type: nauc_map_at_1_diff1 value: 47.17822553627135 - type: nauc_map_at_1_max value: 31.206514215995206 - type: nauc_map_at_1_std value: -1.8984121963184788 - type: nauc_map_at_20_diff1 value: 40.4346381000311 - type: nauc_map_at_20_max value: 28.458128761837536 - type: nauc_map_at_20_std value: -0.7321703207226834 - type: nauc_map_at_3_diff1 value: 42.2424427066743 - type: nauc_map_at_3_max value: 28.16537428952111 - type: nauc_map_at_3_std value: -2.298671243793284 - type: nauc_map_at_5_diff1 value: 41.32690925538059 - type: nauc_map_at_5_max value: 28.53162210264393 - type: nauc_map_at_5_std value: -1.1738320079845177 - type: nauc_mrr_at_1000_diff1 value: 37.69693278594645 - type: nauc_mrr_at_1000_max value: 29.49690742209793 - type: nauc_mrr_at_1000_std value: 3.1815473802020544 - type: nauc_mrr_at_100_diff1 value: 37.65946389835227 - type: nauc_mrr_at_100_max value: 29.479438074437127 - type: nauc_mrr_at_100_std value: 3.166552364873761 - type: nauc_mrr_at_10_diff1 value: 38.06473613801605 - type: nauc_mrr_at_10_max value: 29.79312016758447 - type: nauc_mrr_at_10_std value: 3.111988711521923 - type: nauc_mrr_at_1_diff1 value: 43.69553072839024 - type: nauc_mrr_at_1_max value: 32.142344513289025 - type: nauc_mrr_at_1_std value: 2.696048057380709 - type: nauc_mrr_at_20_diff1 value: 37.626141249327574 - type: nauc_mrr_at_20_max value: 29.559923833552347 - type: nauc_mrr_at_20_std value: 2.9860721770618697 - type: nauc_mrr_at_3_diff1 value: 39.324715416924974 - type: nauc_mrr_at_3_max value: 29.651196356282618 - type: nauc_mrr_at_3_std value: 1.9583884507428824 - type: nauc_mrr_at_5_diff1 value: 38.36691352781637 - type: nauc_mrr_at_5_max value: 29.939763561026002 - type: nauc_mrr_at_5_std value: 2.7317703526814214 - type: nauc_ndcg_at_1000_diff1 value: 36.523136783112406 - type: nauc_ndcg_at_1000_max value: 28.684387654497584 - type: nauc_ndcg_at_1000_std value: 4.732051883634089 - type: nauc_ndcg_at_100_diff1 value: 36.16154861613736 - type: nauc_ndcg_at_100_max value: 27.921202679602143 - type: nauc_ndcg_at_100_std value: 3.560040019944456 - type: nauc_ndcg_at_10_diff1 value: 37.774474422977896 - type: nauc_ndcg_at_10_max value: 27.68147817987237 - type: nauc_ndcg_at_10_std value: 0.8327502237036594 - type: nauc_ndcg_at_1_diff1 value: 43.69553072839024 - type: nauc_ndcg_at_1_max value: 32.142344513289025 - type: nauc_ndcg_at_1_std value: 2.696048057380709 - type: nauc_ndcg_at_20_diff1 value: 36.163233644690266 - type: nauc_ndcg_at_20_max value: 27.4164968525345 - type: nauc_ndcg_at_20_std value: 0.8376631121502218 - type: nauc_ndcg_at_3_diff1 value: 39.707715661307105 - type: nauc_ndcg_at_3_max value: 28.324727845444997 - type: nauc_ndcg_at_3_std value: -0.7238153399588456 - type: nauc_ndcg_at_5_diff1 value: 38.42323115018405 - type: nauc_ndcg_at_5_max value: 28.520234702176587 - type: nauc_ndcg_at_5_std value: 0.4337143091381524 - type: nauc_precision_at_1000_diff1 value: -1.7237517846851018 - type: nauc_precision_at_1000_max value: 16.20499296488572 - type: nauc_precision_at_1000_std value: 20.16360817424688 - type: nauc_precision_at_100_diff1 value: 7.455105305668386 - type: nauc_precision_at_100_max value: 23.35672119353681 - type: nauc_precision_at_100_std value: 18.66911905196039 - type: nauc_precision_at_10_diff1 value: 23.28265657395181 - type: nauc_precision_at_10_max value: 27.533659469131948 - type: nauc_precision_at_10_std value: 9.661356716727099 - type: nauc_precision_at_1_diff1 value: 43.69553072839024 - type: nauc_precision_at_1_max value: 32.142344513289025 - type: nauc_precision_at_1_std value: 2.696048057380709 - type: nauc_precision_at_20_diff1 value: 15.588844976640317 - type: nauc_precision_at_20_max value: 24.89373446940838 - type: nauc_precision_at_20_std value: 9.462736793529547 - type: nauc_precision_at_3_diff1 value: 31.24543977571387 - type: nauc_precision_at_3_max value: 27.88457380895888 - type: nauc_precision_at_3_std value: 3.0400582769598334 - type: nauc_precision_at_5_diff1 value: 27.621476771588156 - type: nauc_precision_at_5_max value: 29.344696084898647 - type: nauc_precision_at_5_std value: 6.279675749763937 - type: nauc_recall_at_1000_diff1 value: 20.19996493542523 - type: nauc_recall_at_1000_max value: 24.65244498292903 - type: nauc_recall_at_1000_std value: 35.312310075738125 - type: nauc_recall_at_100_diff1 value: 22.904431187357847 - type: nauc_recall_at_100_max value: 21.00955732817796 - type: nauc_recall_at_100_std value: 13.938151070174573 - type: nauc_recall_at_10_diff1 value: 30.03923096618402 - type: nauc_recall_at_10_max value: 22.353534397229048 - type: nauc_recall_at_10_std value: 1.2207088824681231 - type: nauc_recall_at_1_diff1 value: 47.17822553627135 - type: nauc_recall_at_1_max value: 31.206514215995206 - type: nauc_recall_at_1_std value: -1.8984121963184788 - type: nauc_recall_at_20_diff1 value: 24.682826207248283 - type: nauc_recall_at_20_max value: 20.777119838220408 - type: nauc_recall_at_20_std value: 1.2286788398315465 - type: nauc_recall_at_3_diff1 value: 35.715604782377035 - type: nauc_recall_at_3_max value: 23.7633639937056 - type: nauc_recall_at_3_std value: -2.868937897653619 - type: nauc_recall_at_5_diff1 value: 32.21252827575707 - type: nauc_recall_at_5_max value: 24.799142864683375 - type: nauc_recall_at_5_std value: 0.36296684299374204 - type: ndcg_at_1 value: 20.788999999999998 - type: ndcg_at_10 value: 26.667999999999996 - type: ndcg_at_100 value: 32.565 - type: ndcg_at_1000 value: 35.634 - type: ndcg_at_20 value: 28.642 - type: ndcg_at_3 value: 22.942 - type: ndcg_at_5 value: 24.514 - type: precision_at_1 value: 20.788999999999998 - type: precision_at_10 value: 4.947 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.14100000000000001 - type: precision_at_20 value: 3.104 - type: precision_at_3 value: 10.748000000000001 - type: precision_at_5 value: 7.68 - type: recall_at_1 value: 16.822 - type: recall_at_10 value: 35.237 - type: recall_at_100 value: 61.219 - type: recall_at_1000 value: 82.499 - type: recall_at_20 value: 42.230000000000004 - type: recall_at_3 value: 24.524 - type: recall_at_5 value: 28.787000000000003 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: main_score value: 21.66 - type: map_at_1 value: 12.416 - type: map_at_10 value: 17.684 - type: map_at_100 value: 18.851000000000003 - type: map_at_1000 value: 18.991 - type: map_at_20 value: 18.360000000000003 - type: map_at_3 value: 15.770999999999999 - type: map_at_5 value: 16.606 - type: mrr_at_1 value: 15.068493150684931 - type: mrr_at_10 value: 21.28823294919185 - type: mrr_at_100 value: 22.306240026063588 - type: mrr_at_1000 value: 22.395578374917164 - type: mrr_at_20 value: 21.90701850599165 - type: mrr_at_3 value: 19.273211567732123 - type: mrr_at_5 value: 20.397640791476412 - type: nauc_map_at_1000_diff1 value: 32.04680475392268 - type: nauc_map_at_1000_max value: 20.9527363509733 - type: nauc_map_at_1000_std value: 1.9775389393996066 - type: nauc_map_at_100_diff1 value: 32.05659071752874 - type: nauc_map_at_100_max value: 20.937669829415213 - type: nauc_map_at_100_std value: 1.8872130027911487 - type: nauc_map_at_10_diff1 value: 32.40493239661906 - type: nauc_map_at_10_max value: 20.24841030282171 - type: nauc_map_at_10_std value: 0.8873591420958411 - type: nauc_map_at_1_diff1 value: 39.50866679123135 - type: nauc_map_at_1_max value: 21.067083493139833 - type: nauc_map_at_1_std value: -1.255629309903365 - type: nauc_map_at_20_diff1 value: 32.06523680001786 - type: nauc_map_at_20_max value: 20.482809699946856 - type: nauc_map_at_20_std value: 1.2900775457613989 - type: nauc_map_at_3_diff1 value: 33.51328659054749 - type: nauc_map_at_3_max value: 19.351150884357097 - type: nauc_map_at_3_std value: -0.9449293271546024 - type: nauc_map_at_5_diff1 value: 32.672807388132 - type: nauc_map_at_5_max value: 19.888696407961916 - type: nauc_map_at_5_std value: -0.21370229639305732 - type: nauc_mrr_at_1000_diff1 value: 29.4702965330427 - type: nauc_mrr_at_1000_max value: 21.5485190959632 - type: nauc_mrr_at_1000_std value: 2.9474086643706716 - type: nauc_mrr_at_100_diff1 value: 29.444301031842237 - type: nauc_mrr_at_100_max value: 21.545652672940818 - type: nauc_mrr_at_100_std value: 2.930083417192537 - type: nauc_mrr_at_10_diff1 value: 29.839809988865028 - type: nauc_mrr_at_10_max value: 21.285084047773285 - type: nauc_mrr_at_10_std value: 2.3023735099948794 - type: nauc_mrr_at_1_diff1 value: 38.253685943964285 - type: nauc_mrr_at_1_max value: 23.506493457282993 - type: nauc_mrr_at_1_std value: 0.36623457899262024 - type: nauc_mrr_at_20_diff1 value: 29.359787332306013 - type: nauc_mrr_at_20_max value: 21.246732134190733 - type: nauc_mrr_at_20_std value: 2.6115784611487087 - type: nauc_mrr_at_3_diff1 value: 31.490392724228837 - type: nauc_mrr_at_3_max value: 21.643605643490904 - type: nauc_mrr_at_3_std value: 1.6756866672672965 - type: nauc_mrr_at_5_diff1 value: 30.18536933081793 - type: nauc_mrr_at_5_max value: 21.27264373907216 - type: nauc_mrr_at_5_std value: 1.7079689552978534 - type: nauc_ndcg_at_1000_diff1 value: 28.11169834333845 - type: nauc_ndcg_at_1000_max value: 22.65134504760621 - type: nauc_ndcg_at_1000_std value: 8.353986044564932 - type: nauc_ndcg_at_100_diff1 value: 28.265985165496417 - type: nauc_ndcg_at_100_max value: 22.530347672551887 - type: nauc_ndcg_at_100_std value: 6.968755339521627 - type: nauc_ndcg_at_10_diff1 value: 29.088878880551906 - type: nauc_ndcg_at_10_max value: 19.918818478137702 - type: nauc_ndcg_at_10_std value: 2.5519795248451795 - type: nauc_ndcg_at_1_diff1 value: 38.253685943964285 - type: nauc_ndcg_at_1_max value: 23.506493457282993 - type: nauc_ndcg_at_1_std value: 0.36623457899262024 - type: nauc_ndcg_at_20_diff1 value: 27.910656458566045 - type: nauc_ndcg_at_20_max value: 20.295061759944723 - type: nauc_ndcg_at_20_std value: 3.6145835770906833 - type: nauc_ndcg_at_3_diff1 value: 31.233680318242634 - type: nauc_ndcg_at_3_max value: 19.494683132285033 - type: nauc_ndcg_at_3_std value: 0.04355647255533374 - type: nauc_ndcg_at_5_diff1 value: 29.60761336088322 - type: nauc_ndcg_at_5_max value: 19.80719438136175 - type: nauc_ndcg_at_5_std value: 0.6195875169583498 - type: nauc_precision_at_1000_diff1 value: -4.9635863591586284 - type: nauc_precision_at_1000_max value: 10.205880001940644 - type: nauc_precision_at_1000_std value: 13.475741604004421 - type: nauc_precision_at_100_diff1 value: 7.633273326571685 - type: nauc_precision_at_100_max value: 23.151284304137622 - type: nauc_precision_at_100_std value: 20.405156194796863 - type: nauc_precision_at_10_diff1 value: 18.705937577794554 - type: nauc_precision_at_10_max value: 20.628035226019335 - type: nauc_precision_at_10_std value: 7.041902045527893 - type: nauc_precision_at_1_diff1 value: 38.253685943964285 - type: nauc_precision_at_1_max value: 23.506493457282993 - type: nauc_precision_at_1_std value: 0.36623457899262024 - type: nauc_precision_at_20_diff1 value: 14.129163643470525 - type: nauc_precision_at_20_max value: 20.39744876825584 - type: nauc_precision_at_20_std value: 10.808780160453079 - type: nauc_precision_at_3_diff1 value: 24.81724694529244 - type: nauc_precision_at_3_max value: 19.750250129235862 - type: nauc_precision_at_3_std value: 1.6383497722612925 - type: nauc_precision_at_5_diff1 value: 20.559816479129896 - type: nauc_precision_at_5_max value: 20.737938153703908 - type: nauc_precision_at_5_std value: 2.9329054609944767 - type: nauc_recall_at_1000_diff1 value: 14.657477263404504 - type: nauc_recall_at_1000_max value: 27.29789317523507 - type: nauc_recall_at_1000_std value: 41.54560242921126 - type: nauc_recall_at_100_diff1 value: 19.668816678808028 - type: nauc_recall_at_100_max value: 24.546392696829855 - type: nauc_recall_at_100_std value: 20.045457113413388 - type: nauc_recall_at_10_diff1 value: 22.57592036080691 - type: nauc_recall_at_10_max value: 17.30186041967476 - type: nauc_recall_at_10_std value: 5.75949108824036 - type: nauc_recall_at_1_diff1 value: 39.50866679123135 - type: nauc_recall_at_1_max value: 21.067083493139833 - type: nauc_recall_at_1_std value: -1.255629309903365 - type: nauc_recall_at_20_diff1 value: 18.597441888297915 - type: nauc_recall_at_20_max value: 17.76783323985467 - type: nauc_recall_at_20_std value: 7.756313900025849 - type: nauc_recall_at_3_diff1 value: 27.928359626631092 - type: nauc_recall_at_3_max value: 16.336637037641772 - type: nauc_recall_at_3_std value: -1.3417417785554366 - type: nauc_recall_at_5_diff1 value: 24.22251676423838 - type: nauc_recall_at_5_max value: 16.857422692031594 - type: nauc_recall_at_5_std value: 0.6185629064463674 - type: ndcg_at_1 value: 15.068000000000001 - type: ndcg_at_10 value: 21.66 - type: ndcg_at_100 value: 27.245 - type: ndcg_at_1000 value: 30.591 - type: ndcg_at_20 value: 23.955000000000002 - type: ndcg_at_3 value: 17.968999999999998 - type: ndcg_at_5 value: 19.352 - type: precision_at_1 value: 15.068000000000001 - type: precision_at_10 value: 4.326 - type: precision_at_100 value: 0.855 - type: precision_at_1000 value: 0.132 - type: precision_at_20 value: 2.8369999999999997 - type: precision_at_3 value: 8.713999999999999 - type: precision_at_5 value: 6.3469999999999995 - type: recall_at_1 value: 12.416 - type: recall_at_10 value: 30.008000000000003 - type: recall_at_100 value: 54.498999999999995 - type: recall_at_1000 value: 78.32000000000001 - type: recall_at_20 value: 38.378 - type: recall_at_3 value: 19.79 - type: recall_at_5 value: 23.376 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 22.302333333333333 - type: ndcg_at_10 value: 22.302333333333333 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: main_score value: 17.253 - type: map_at_1 value: 9.722999999999999 - type: map_at_10 value: 14.280999999999999 - type: map_at_100 value: 15.065000000000001 - type: map_at_1000 value: 15.154 - type: map_at_20 value: 14.704999999999998 - type: map_at_3 value: 13.004 - type: map_at_5 value: 13.626 - type: mrr_at_1 value: 11.809815950920246 - type: mrr_at_10 value: 16.383959002824028 - type: mrr_at_100 value: 17.188709691814985 - type: mrr_at_1000 value: 17.269435610183017 - type: mrr_at_20 value: 16.836972625425393 - type: mrr_at_3 value: 15.081799591002035 - type: mrr_at_5 value: 15.710633946830258 - type: nauc_map_at_1000_diff1 value: 28.431623275634156 - type: nauc_map_at_1000_max value: 14.476316695164403 - type: nauc_map_at_1000_std value: 4.607998508591043 - type: nauc_map_at_100_diff1 value: 28.42367177875125 - type: nauc_map_at_100_max value: 14.394653506060012 - type: nauc_map_at_100_std value: 4.567472357591712 - type: nauc_map_at_10_diff1 value: 28.60653023312716 - type: nauc_map_at_10_max value: 14.78157644547682 - type: nauc_map_at_10_std value: 3.94994519901673 - type: nauc_map_at_1_diff1 value: 34.36968432094878 - type: nauc_map_at_1_max value: 17.456572010137457 - type: nauc_map_at_1_std value: 4.2640515305539415 - type: nauc_map_at_20_diff1 value: 28.510596490501573 - type: nauc_map_at_20_max value: 14.318541992037401 - type: nauc_map_at_20_std value: 4.254075392620963 - type: nauc_map_at_3_diff1 value: 30.539716169861936 - type: nauc_map_at_3_max value: 16.14471431902583 - type: nauc_map_at_3_std value: 4.973502209268125 - type: nauc_map_at_5_diff1 value: 29.261684655915225 - type: nauc_map_at_5_max value: 15.372748605327446 - type: nauc_map_at_5_std value: 4.39285622535654 - type: nauc_mrr_at_1000_diff1 value: 28.972718024301447 - type: nauc_mrr_at_1000_max value: 17.826835397341046 - type: nauc_mrr_at_1000_std value: 6.917284034347911 - type: nauc_mrr_at_100_diff1 value: 28.945997371755087 - type: nauc_mrr_at_100_max value: 17.739278412823893 - type: nauc_mrr_at_100_std value: 6.899424135908487 - type: nauc_mrr_at_10_diff1 value: 29.06935519309891 - type: nauc_mrr_at_10_max value: 18.21083753088906 - type: nauc_mrr_at_10_std value: 6.518493253737144 - type: nauc_mrr_at_1_diff1 value: 35.63041619844435 - type: nauc_mrr_at_1_max value: 22.830306049699338 - type: nauc_mrr_at_1_std value: 7.826683917417351 - type: nauc_mrr_at_20_diff1 value: 29.016004511022537 - type: nauc_mrr_at_20_max value: 17.788437345787926 - type: nauc_mrr_at_20_std value: 6.652263770077456 - type: nauc_mrr_at_3_diff1 value: 30.644333070723466 - type: nauc_mrr_at_3_max value: 19.667632613725225 - type: nauc_mrr_at_3_std value: 7.743380165559918 - type: nauc_mrr_at_5_diff1 value: 29.829376205828805 - type: nauc_mrr_at_5_max value: 18.722595091544253 - type: nauc_mrr_at_5_std value: 6.818524829545593 - type: nauc_ndcg_at_1000_diff1 value: 25.62248172657835 - type: nauc_ndcg_at_1000_max value: 14.223326419511073 - type: nauc_ndcg_at_1000_std value: 7.495752604082028 - type: nauc_ndcg_at_100_diff1 value: 25.499428653265642 - type: nauc_ndcg_at_100_max value: 12.585064293899102 - type: nauc_ndcg_at_100_std value: 6.664889384341954 - type: nauc_ndcg_at_10_diff1 value: 25.74972755098383 - type: nauc_ndcg_at_10_max value: 13.793434874824303 - type: nauc_ndcg_at_10_std value: 3.883648047462527 - type: nauc_ndcg_at_1_diff1 value: 35.63041619844435 - type: nauc_ndcg_at_1_max value: 22.830306049699338 - type: nauc_ndcg_at_1_std value: 7.826683917417351 - type: nauc_ndcg_at_20_diff1 value: 25.334745687494443 - type: nauc_ndcg_at_20_max value: 12.305607906859144 - type: nauc_ndcg_at_20_std value: 4.7413728340444505 - type: nauc_ndcg_at_3_diff1 value: 29.45395763143249 - type: nauc_ndcg_at_3_max value: 16.23690234046979 - type: nauc_ndcg_at_3_std value: 6.142105291678576 - type: nauc_ndcg_at_5_diff1 value: 27.444736442905455 - type: nauc_ndcg_at_5_max value: 14.93362615759676 - type: nauc_ndcg_at_5_std value: 4.7342440148611225 - type: nauc_precision_at_1000_diff1 value: 16.80575206659899 - type: nauc_precision_at_1000_max value: 17.66226703408546 - type: nauc_precision_at_1000_std value: 18.77422949877631 - type: nauc_precision_at_100_diff1 value: 21.105287938477233 - type: nauc_precision_at_100_max value: 13.591179380636214 - type: nauc_precision_at_100_std value: 16.55840962012843 - type: nauc_precision_at_10_diff1 value: 21.469758913525254 - type: nauc_precision_at_10_max value: 15.320780706573464 - type: nauc_precision_at_10_std value: 6.351289997170259 - type: nauc_precision_at_1_diff1 value: 35.63041619844435 - type: nauc_precision_at_1_max value: 22.830306049699338 - type: nauc_precision_at_1_std value: 7.826683917417351 - type: nauc_precision_at_20_diff1 value: 20.438996654370953 - type: nauc_precision_at_20_max value: 11.895395539109575 - type: nauc_precision_at_20_std value: 9.227372989467945 - type: nauc_precision_at_3_diff1 value: 27.958385745280534 - type: nauc_precision_at_3_max value: 18.76663358991842 - type: nauc_precision_at_3_std value: 8.804799926813658 - type: nauc_precision_at_5_diff1 value: 25.20756412916346 - type: nauc_precision_at_5_max value: 17.16752690039525 - type: nauc_precision_at_5_std value: 7.822524248176865 - type: nauc_recall_at_1000_diff1 value: 17.093227818066353 - type: nauc_recall_at_1000_max value: 12.628515233697735 - type: nauc_recall_at_1000_std value: 16.519924218447994 - type: nauc_recall_at_100_diff1 value: 18.19732935930814 - type: nauc_recall_at_100_max value: 4.740051109026774 - type: nauc_recall_at_100_std value: 10.729043783837753 - type: nauc_recall_at_10_diff1 value: 17.84235497242283 - type: nauc_recall_at_10_max value: 7.9110522988146155 - type: nauc_recall_at_10_std value: 1.147900198002905 - type: nauc_recall_at_1_diff1 value: 34.36968432094878 - type: nauc_recall_at_1_max value: 17.456572010137457 - type: nauc_recall_at_1_std value: 4.2640515305539415 - type: nauc_recall_at_20_diff1 value: 16.692476991368853 - type: nauc_recall_at_20_max value: 3.809776817661501 - type: nauc_recall_at_20_std value: 3.6575551737685954 - type: nauc_recall_at_3_diff1 value: 25.110591985459862 - type: nauc_recall_at_3_max value: 13.681824792451245 - type: nauc_recall_at_3_std value: 5.806771643452482 - type: nauc_recall_at_5_diff1 value: 21.0191985797923 - type: nauc_recall_at_5_max value: 10.837381063643834 - type: nauc_recall_at_5_std value: 3.228418252689027 - type: ndcg_at_1 value: 11.81 - type: ndcg_at_10 value: 17.253 - type: ndcg_at_100 value: 21.404 - type: ndcg_at_1000 value: 24.09 - type: ndcg_at_20 value: 18.801000000000002 - type: ndcg_at_3 value: 14.716999999999999 - type: ndcg_at_5 value: 15.706000000000001 - type: precision_at_1 value: 11.81 - type: precision_at_10 value: 2.9749999999999996 - type: precision_at_100 value: 0.543 - type: precision_at_1000 value: 0.084 - type: precision_at_20 value: 1.848 - type: precision_at_3 value: 6.902 - type: precision_at_5 value: 4.816 - type: recall_at_1 value: 9.722999999999999 - type: recall_at_10 value: 24.569 - type: recall_at_100 value: 43.997 - type: recall_at_1000 value: 64.44 - type: recall_at_20 value: 30.505 - type: recall_at_3 value: 17.134 - type: recall_at_5 value: 19.72 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: main_score value: 13.308 - type: map_at_1 value: 7.497 - type: map_at_10 value: 10.846 - type: map_at_100 value: 11.498999999999999 - type: map_at_1000 value: 11.618 - type: map_at_20 value: 11.161999999999999 - type: map_at_3 value: 9.658999999999999 - type: map_at_5 value: 10.298 - type: mrr_at_1 value: 9.11906400550585 - type: mrr_at_10 value: 12.993232392750626 - type: mrr_at_100 value: 13.701403675494117 - type: mrr_at_1000 value: 13.798101712770123 - type: mrr_at_20 value: 13.360764217937035 - type: mrr_at_3 value: 11.6655196145905 - type: mrr_at_5 value: 12.362353750860274 - type: nauc_map_at_1000_diff1 value: 29.030158454163164 - type: nauc_map_at_1000_max value: 15.750545094681929 - type: nauc_map_at_1000_std value: -3.0798436292807834 - type: nauc_map_at_100_diff1 value: 29.05038743174521 - type: nauc_map_at_100_max value: 15.679082682471822 - type: nauc_map_at_100_std value: -3.2003921265004855 - type: nauc_map_at_10_diff1 value: 29.680682239615308 - type: nauc_map_at_10_max value: 15.532980267877802 - type: nauc_map_at_10_std value: -3.622076099535413 - type: nauc_map_at_1_diff1 value: 37.49924172327444 - type: nauc_map_at_1_max value: 14.852898999380606 - type: nauc_map_at_1_std value: -3.8871845491808403 - type: nauc_map_at_20_diff1 value: 29.440127025124063 - type: nauc_map_at_20_max value: 15.566926763278111 - type: nauc_map_at_20_std value: -3.5118135905883445 - type: nauc_map_at_3_diff1 value: 31.87407675131833 - type: nauc_map_at_3_max value: 16.133052442782088 - type: nauc_map_at_3_std value: -3.7331459743832536 - type: nauc_map_at_5_diff1 value: 30.702048393849918 - type: nauc_map_at_5_max value: 15.7292852737471 - type: nauc_map_at_5_std value: -3.72714036461797 - type: nauc_mrr_at_1000_diff1 value: 27.069591144268795 - type: nauc_mrr_at_1000_max value: 17.335323991978157 - type: nauc_mrr_at_1000_std value: -2.1443215489774863 - type: nauc_mrr_at_100_diff1 value: 27.06995261671637 - type: nauc_mrr_at_100_max value: 17.3285570198275 - type: nauc_mrr_at_100_std value: -2.1819679734953903 - type: nauc_mrr_at_10_diff1 value: 27.57687228309106 - type: nauc_mrr_at_10_max value: 17.166971785334017 - type: nauc_mrr_at_10_std value: -2.6000743496984526 - type: nauc_mrr_at_1_diff1 value: 35.22676568917156 - type: nauc_mrr_at_1_max value: 17.007211079819626 - type: nauc_mrr_at_1_std value: -4.214696308727653 - type: nauc_mrr_at_20_diff1 value: 27.374588178560465 - type: nauc_mrr_at_20_max value: 17.23758467893531 - type: nauc_mrr_at_20_std value: -2.4124837810565603 - type: nauc_mrr_at_3_diff1 value: 29.722577971696918 - type: nauc_mrr_at_3_max value: 18.07384167733403 - type: nauc_mrr_at_3_std value: -3.003414797443647 - type: nauc_mrr_at_5_diff1 value: 28.45980370469956 - type: nauc_mrr_at_5_max value: 17.511976658495847 - type: nauc_mrr_at_5_std value: -2.5924858663986745 - type: nauc_ndcg_at_1000_diff1 value: 23.077231893052307 - type: nauc_ndcg_at_1000_max value: 16.93593483664181 - type: nauc_ndcg_at_1000_std value: 1.2092406562986315 - type: nauc_ndcg_at_100_diff1 value: 23.549727836162358 - type: nauc_ndcg_at_100_max value: 15.750436011474273 - type: nauc_ndcg_at_100_std value: -0.9019324316165611 - type: nauc_ndcg_at_10_diff1 value: 26.053761788639434 - type: nauc_ndcg_at_10_max value: 15.3669306793647 - type: nauc_ndcg_at_10_std value: -3.193779292269917 - type: nauc_ndcg_at_1_diff1 value: 35.22676568917156 - type: nauc_ndcg_at_1_max value: 17.007211079819626 - type: nauc_ndcg_at_1_std value: -4.214696308727653 - type: nauc_ndcg_at_20_diff1 value: 25.425326574435168 - type: nauc_ndcg_at_20_max value: 15.385189154016906 - type: nauc_ndcg_at_20_std value: -2.7870454259014545 - type: nauc_ndcg_at_3_diff1 value: 29.685264931512716 - type: nauc_ndcg_at_3_max value: 17.07409526298788 - type: nauc_ndcg_at_3_std value: -3.4063850629923293 - type: nauc_ndcg_at_5_diff1 value: 27.89860104840894 - type: nauc_ndcg_at_5_max value: 15.996740122854927 - type: nauc_ndcg_at_5_std value: -3.3146899970251873 - type: nauc_precision_at_1000_diff1 value: 6.214195083416471 - type: nauc_precision_at_1000_max value: 24.273670809985404 - type: nauc_precision_at_1000_std value: 17.553556491344104 - type: nauc_precision_at_100_diff1 value: 11.6615588663656 - type: nauc_precision_at_100_max value: 20.59244105372682 - type: nauc_precision_at_100_std value: 8.072189089366798 - type: nauc_precision_at_10_diff1 value: 18.279161444567706 - type: nauc_precision_at_10_max value: 17.664508142320727 - type: nauc_precision_at_10_std value: -1.0218966605840407 - type: nauc_precision_at_1_diff1 value: 35.22676568917156 - type: nauc_precision_at_1_max value: 17.007211079819626 - type: nauc_precision_at_1_std value: -4.214696308727653 - type: nauc_precision_at_20_diff1 value: 16.855549347544613 - type: nauc_precision_at_20_max value: 18.640589054149743 - type: nauc_precision_at_20_std value: 0.7553558754796067 - type: nauc_precision_at_3_diff1 value: 25.61293747306704 - type: nauc_precision_at_3_max value: 20.254901193584562 - type: nauc_precision_at_3_std value: -2.9517852127763153 - type: nauc_precision_at_5_diff1 value: 22.32451285561962 - type: nauc_precision_at_5_max value: 18.709490300571886 - type: nauc_precision_at_5_std value: -2.0702847848899615 - type: nauc_recall_at_1000_diff1 value: 8.102081393478185 - type: nauc_recall_at_1000_max value: 17.111395305264892 - type: nauc_recall_at_1000_std value: 14.340291614611578 - type: nauc_recall_at_100_diff1 value: 12.480368811829736 - type: nauc_recall_at_100_max value: 12.879220685006636 - type: nauc_recall_at_100_std value: 3.650162252310097 - type: nauc_recall_at_10_diff1 value: 19.461318204968205 - type: nauc_recall_at_10_max value: 12.823289358103562 - type: nauc_recall_at_10_std value: -3.1960264321653895 - type: nauc_recall_at_1_diff1 value: 37.49924172327444 - type: nauc_recall_at_1_max value: 14.852898999380606 - type: nauc_recall_at_1_std value: -3.8871845491808403 - type: nauc_recall_at_20_diff1 value: 17.698352862902524 - type: nauc_recall_at_20_max value: 12.409413309293047 - type: nauc_recall_at_20_std value: -2.0913697847507136 - type: nauc_recall_at_3_diff1 value: 26.236763474946116 - type: nauc_recall_at_3_max value: 15.89287407458128 - type: nauc_recall_at_3_std value: -3.776018275852628 - type: nauc_recall_at_5_diff1 value: 23.10472386873395 - type: nauc_recall_at_5_max value: 14.09706657151941 - type: nauc_recall_at_5_std value: -3.7053105237887296 - type: ndcg_at_1 value: 9.119 - type: ndcg_at_10 value: 13.308 - type: ndcg_at_100 value: 16.98 - type: ndcg_at_1000 value: 20.488 - type: ndcg_at_20 value: 14.455000000000002 - type: ndcg_at_3 value: 10.982 - type: ndcg_at_5 value: 12.003 - type: precision_at_1 value: 9.119 - type: precision_at_10 value: 2.4979999999999998 - type: precision_at_100 value: 0.519 - type: precision_at_1000 value: 0.099 - type: precision_at_20 value: 1.5779999999999998 - type: precision_at_3 value: 5.288 - type: precision_at_5 value: 3.8890000000000002 - type: recall_at_1 value: 7.497 - type: recall_at_10 value: 18.817999999999998 - type: recall_at_100 value: 35.893 - type: recall_at_1000 value: 61.966 - type: recall_at_20 value: 23.017000000000003 - type: recall_at_3 value: 12.199 - type: recall_at_5 value: 14.87 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: main_score value: 20.061999999999998 - type: map_at_1 value: 11.856 - type: map_at_10 value: 16.685 - type: map_at_100 value: 17.433 - type: map_at_1000 value: 17.558 - type: map_at_20 value: 17.041999999999998 - type: map_at_3 value: 15.021 - type: map_at_5 value: 15.931999999999999 - type: mrr_at_1 value: 14.17910447761194 - type: mrr_at_10 value: 19.398468964700307 - type: mrr_at_100 value: 20.153361230634783 - type: mrr_at_1000 value: 20.25140420668968 - type: mrr_at_20 value: 19.79354704809282 - type: mrr_at_3 value: 17.63059701492538 - type: mrr_at_5 value: 18.516791044776127 - type: nauc_map_at_1000_diff1 value: 39.29033459612684 - type: nauc_map_at_1000_max value: 27.17416795511821 - type: nauc_map_at_1000_std value: -6.92127611795475 - type: nauc_map_at_100_diff1 value: 39.32396099754708 - type: nauc_map_at_100_max value: 27.09334212594238 - type: nauc_map_at_100_std value: -7.039062385443858 - type: nauc_map_at_10_diff1 value: 39.94340086930468 - type: nauc_map_at_10_max value: 27.423789336152417 - type: nauc_map_at_10_std value: -7.508495669216843 - type: nauc_map_at_1_diff1 value: 47.64613699501138 - type: nauc_map_at_1_max value: 31.632492599268748 - type: nauc_map_at_1_std value: -7.883784832592304 - type: nauc_map_at_20_diff1 value: 39.45107288329592 - type: nauc_map_at_20_max value: 27.15650902645131 - type: nauc_map_at_20_std value: -7.301916707077087 - type: nauc_map_at_3_diff1 value: 41.801336320148984 - type: nauc_map_at_3_max value: 28.342684341392683 - type: nauc_map_at_3_std value: -8.213654438632787 - type: nauc_map_at_5_diff1 value: 40.973958128612786 - type: nauc_map_at_5_max value: 28.355847958983126 - type: nauc_map_at_5_std value: -7.204454459764011 - type: nauc_mrr_at_1000_diff1 value: 39.68737143543835 - type: nauc_mrr_at_1000_max value: 28.74366308891808 - type: nauc_mrr_at_1000_std value: -5.74519909264754 - type: nauc_mrr_at_100_diff1 value: 39.696965050178875 - type: nauc_mrr_at_100_max value: 28.71065540406762 - type: nauc_mrr_at_100_std value: -5.8117683155682895 - type: nauc_mrr_at_10_diff1 value: 40.22891666712493 - type: nauc_mrr_at_10_max value: 28.97882832718155 - type: nauc_mrr_at_10_std value: -6.167061574555064 - type: nauc_mrr_at_1_diff1 value: 48.39795549312159 - type: nauc_mrr_at_1_max value: 33.31270433423697 - type: nauc_mrr_at_1_std value: -5.8264509798445925 - type: nauc_mrr_at_20_diff1 value: 39.75516014377185 - type: nauc_mrr_at_20_max value: 28.762238070807676 - type: nauc_mrr_at_20_std value: -6.015233094372284 - type: nauc_mrr_at_3_diff1 value: 42.39647678330573 - type: nauc_mrr_at_3_max value: 29.854246402890674 - type: nauc_mrr_at_3_std value: -6.989062488249666 - type: nauc_mrr_at_5_diff1 value: 41.32547115377251 - type: nauc_mrr_at_5_max value: 29.756253662694554 - type: nauc_mrr_at_5_std value: -5.989324088608618 - type: nauc_ndcg_at_1000_diff1 value: 33.24611188020779 - type: nauc_ndcg_at_1000_max value: 25.5685050419863 - type: nauc_ndcg_at_1000_std value: -2.1838171971216838 - type: nauc_ndcg_at_100_diff1 value: 34.12429897480726 - type: nauc_ndcg_at_100_max value: 24.386449655174115 - type: nauc_ndcg_at_100_std value: -4.463092158837694 - type: nauc_ndcg_at_10_diff1 value: 36.7514146310574 - type: nauc_ndcg_at_10_max value: 25.816604124438165 - type: nauc_ndcg_at_10_std value: -6.864047505974296 - type: nauc_ndcg_at_1_diff1 value: 48.39795549312159 - type: nauc_ndcg_at_1_max value: 33.31270433423697 - type: nauc_ndcg_at_1_std value: -5.8264509798445925 - type: nauc_ndcg_at_20_diff1 value: 35.19768360191347 - type: nauc_ndcg_at_20_max value: 25.02001675750392 - type: nauc_ndcg_at_20_std value: -6.20782733166831 - type: nauc_ndcg_at_3_diff1 value: 40.154344522643925 - type: nauc_ndcg_at_3_max value: 27.955302837392672 - type: nauc_ndcg_at_3_std value: -7.6328532886404235 - type: nauc_ndcg_at_5_diff1 value: 38.743591122825606 - type: nauc_ndcg_at_5_max value: 27.72241812814964 - type: nauc_ndcg_at_5_std value: -6.257812072012101 - type: nauc_precision_at_1000_diff1 value: -3.9866748764702096 - type: nauc_precision_at_1000_max value: 14.72470736881832 - type: nauc_precision_at_1000_std value: 15.962534584653012 - type: nauc_precision_at_100_diff1 value: 14.40948301991166 - type: nauc_precision_at_100_max value: 16.61733733078467 - type: nauc_precision_at_100_std value: 6.847882296599798 - type: nauc_precision_at_10_diff1 value: 27.51873293006865 - type: nauc_precision_at_10_max value: 22.893866555907746 - type: nauc_precision_at_10_std value: -3.030805589162383 - type: nauc_precision_at_1_diff1 value: 48.39795549312159 - type: nauc_precision_at_1_max value: 33.31270433423697 - type: nauc_precision_at_1_std value: -5.8264509798445925 - type: nauc_precision_at_20_diff1 value: 22.56834807636722 - type: nauc_precision_at_20_max value: 20.490661671424448 - type: nauc_precision_at_20_std value: -0.660069645072748 - type: nauc_precision_at_3_diff1 value: 36.978184171791156 - type: nauc_precision_at_3_max value: 26.478381926029265 - type: nauc_precision_at_3_std value: -6.091960417034656 - type: nauc_precision_at_5_diff1 value: 33.58525371051779 - type: nauc_precision_at_5_max value: 26.334754741578593 - type: nauc_precision_at_5_std value: -3.154368502496007 - type: nauc_recall_at_1000_diff1 value: 5.958742292353638 - type: nauc_recall_at_1000_max value: 15.864543076240528 - type: nauc_recall_at_1000_std value: 21.86695402215286 - type: nauc_recall_at_100_diff1 value: 17.82865358233198 - type: nauc_recall_at_100_max value: 13.118309558968022 - type: nauc_recall_at_100_std value: 2.3032751559115114 - type: nauc_recall_at_10_diff1 value: 27.980644115353996 - type: nauc_recall_at_10_max value: 19.39950863468228 - type: nauc_recall_at_10_std value: -6.36618746193429 - type: nauc_recall_at_1_diff1 value: 47.64613699501138 - type: nauc_recall_at_1_max value: 31.632492599268748 - type: nauc_recall_at_1_std value: -7.883784832592304 - type: nauc_recall_at_20_diff1 value: 22.967595804626253 - type: nauc_recall_at_20_max value: 16.693327271336244 - type: nauc_recall_at_20_std value: -4.559238353011102 - type: nauc_recall_at_3_diff1 value: 35.41022087124811 - type: nauc_recall_at_3_max value: 24.543890488663166 - type: nauc_recall_at_3_std value: -8.200059552235023 - type: nauc_recall_at_5_diff1 value: 32.09822917090586 - type: nauc_recall_at_5_max value: 23.82588196783892 - type: nauc_recall_at_5_std value: -4.932704288647733 - type: ndcg_at_1 value: 14.179 - type: ndcg_at_10 value: 20.061999999999998 - type: ndcg_at_100 value: 24.149 - type: ndcg_at_1000 value: 27.644999999999996 - type: ndcg_at_20 value: 21.387999999999998 - type: ndcg_at_3 value: 16.794 - type: ndcg_at_5 value: 18.224 - type: precision_at_1 value: 14.179 - type: precision_at_10 value: 3.582 - type: precision_at_100 value: 0.623 - type: precision_at_1000 value: 0.105 - type: precision_at_20 value: 2.1319999999999997 - type: precision_at_3 value: 7.774 - type: precision_at_5 value: 5.5969999999999995 - type: recall_at_1 value: 11.856 - type: recall_at_10 value: 27.778999999999996 - type: recall_at_100 value: 46.733000000000004 - type: recall_at_1000 value: 72.481 - type: recall_at_20 value: 32.737 - type: recall_at_3 value: 18.859 - type: recall_at_5 value: 22.435 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: main_score value: 23.735999999999997 - type: map_at_1 value: 13.164000000000001 - type: map_at_10 value: 19.317999999999998 - type: map_at_100 value: 20.463 - type: map_at_1000 value: 20.646 - type: map_at_20 value: 19.808 - type: map_at_3 value: 17.126 - type: map_at_5 value: 18.056 - type: mrr_at_1 value: 16.600790513833992 - type: mrr_at_10 value: 22.620067130936693 - type: mrr_at_100 value: 23.601448756772193 - type: mrr_at_1000 value: 23.675507750087586 - type: mrr_at_20 value: 23.09510872850641 - type: mrr_at_3 value: 20.685111989459816 - type: mrr_at_5 value: 21.46574440052701 - type: nauc_map_at_1000_diff1 value: 38.04966249247377 - type: nauc_map_at_1000_max value: 16.252263992463384 - type: nauc_map_at_1000_std value: -1.7460502582062356 - type: nauc_map_at_100_diff1 value: 38.014610979412474 - type: nauc_map_at_100_max value: 16.21534617931594 - type: nauc_map_at_100_std value: -1.862936037740923 - type: nauc_map_at_10_diff1 value: 37.85306201039408 - type: nauc_map_at_10_max value: 16.316152483605283 - type: nauc_map_at_10_std value: -1.9300768321014996 - type: nauc_map_at_1_diff1 value: 46.32670783118563 - type: nauc_map_at_1_max value: 19.162748070034993 - type: nauc_map_at_1_std value: -7.2143378209361435 - type: nauc_map_at_20_diff1 value: 37.76015277914087 - type: nauc_map_at_20_max value: 16.402558719060888 - type: nauc_map_at_20_std value: -2.065612538672495 - type: nauc_map_at_3_diff1 value: 39.76679931113434 - type: nauc_map_at_3_max value: 16.834290630961544 - type: nauc_map_at_3_std value: -3.9003170439130335 - type: nauc_map_at_5_diff1 value: 39.03208154755538 - type: nauc_map_at_5_max value: 16.225900244095133 - type: nauc_map_at_5_std value: -2.4557998742917273 - type: nauc_mrr_at_1000_diff1 value: 37.458213267102465 - type: nauc_mrr_at_1000_max value: 16.263132423271077 - type: nauc_mrr_at_1000_std value: -0.6455583895471498 - type: nauc_mrr_at_100_diff1 value: 37.45543984270519 - type: nauc_mrr_at_100_max value: 16.185738866185893 - type: nauc_mrr_at_100_std value: -0.6962640945779722 - type: nauc_mrr_at_10_diff1 value: 37.16827089026705 - type: nauc_mrr_at_10_max value: 15.901025716349201 - type: nauc_mrr_at_10_std value: -0.6599647334904797 - type: nauc_mrr_at_1_diff1 value: 44.322572770568456 - type: nauc_mrr_at_1_max value: 19.02126117731051 - type: nauc_mrr_at_1_std value: -5.8998188281784625 - type: nauc_mrr_at_20_diff1 value: 37.24551389599038 - type: nauc_mrr_at_20_max value: 16.113728443160127 - type: nauc_mrr_at_20_std value: -0.8856480048238807 - type: nauc_mrr_at_3_diff1 value: 38.800389636963004 - type: nauc_mrr_at_3_max value: 16.691447775512863 - type: nauc_mrr_at_3_std value: -2.2008701696190474 - type: nauc_mrr_at_5_diff1 value: 38.17066041754819 - type: nauc_mrr_at_5_max value: 15.854986493430074 - type: nauc_mrr_at_5_std value: -1.3419132385788708 - type: nauc_ndcg_at_1000_diff1 value: 36.500354605077305 - type: nauc_ndcg_at_1000_max value: 18.158853474546227 - type: nauc_ndcg_at_1000_std value: 3.7042707188045783 - type: nauc_ndcg_at_100_diff1 value: 35.68797486655767 - type: nauc_ndcg_at_100_max value: 15.949868116992763 - type: nauc_ndcg_at_100_std value: 1.8743757496922573 - type: nauc_ndcg_at_10_diff1 value: 34.44579459042251 - type: nauc_ndcg_at_10_max value: 14.976928472341097 - type: nauc_ndcg_at_10_std value: 0.668632426387858 - type: nauc_ndcg_at_1_diff1 value: 44.322572770568456 - type: nauc_ndcg_at_1_max value: 19.02126117731051 - type: nauc_ndcg_at_1_std value: -5.8998188281784625 - type: nauc_ndcg_at_20_diff1 value: 34.47554348325645 - type: nauc_ndcg_at_20_max value: 15.617518114283014 - type: nauc_ndcg_at_20_std value: 0.23279335295578624 - type: nauc_ndcg_at_3_diff1 value: 37.34865309502302 - type: nauc_ndcg_at_3_max value: 15.6035028610235 - type: nauc_ndcg_at_3_std value: -2.042290469888462 - type: nauc_ndcg_at_5_diff1 value: 36.710946337067 - type: nauc_ndcg_at_5_max value: 14.502265833101022 - type: nauc_ndcg_at_5_std value: -0.26386753108907807 - type: nauc_precision_at_1000_diff1 value: 3.5611970722748056 - type: nauc_precision_at_1000_max value: 6.9688736574296275 - type: nauc_precision_at_1000_std value: 7.291986774352235 - type: nauc_precision_at_100_diff1 value: 18.866491470530185 - type: nauc_precision_at_100_max value: 3.0721103361408497 - type: nauc_precision_at_100_std value: 4.384934503700695 - type: nauc_precision_at_10_diff1 value: 20.850504784204883 - type: nauc_precision_at_10_max value: 10.633189141801425 - type: nauc_precision_at_10_std value: 5.014926409884033 - type: nauc_precision_at_1_diff1 value: 44.322572770568456 - type: nauc_precision_at_1_max value: 19.02126117731051 - type: nauc_precision_at_1_std value: -5.8998188281784625 - type: nauc_precision_at_20_diff1 value: 20.309109922155518 - type: nauc_precision_at_20_max value: 9.029797084048417 - type: nauc_precision_at_20_std value: 2.758218391395686 - type: nauc_precision_at_3_diff1 value: 30.196789766812422 - type: nauc_precision_at_3_max value: 13.456577178909065 - type: nauc_precision_at_3_std value: 0.49917879030090373 - type: nauc_precision_at_5_diff1 value: 27.706537485425653 - type: nauc_precision_at_5_max value: 9.849229139569182 - type: nauc_precision_at_5_std value: 3.685125093555483 - type: nauc_recall_at_1000_diff1 value: 33.96229420221514 - type: nauc_recall_at_1000_max value: 37.16052892689619 - type: nauc_recall_at_1000_std value: 36.18222346361014 - type: nauc_recall_at_100_diff1 value: 27.657710979013174 - type: nauc_recall_at_100_max value: 15.352705013529967 - type: nauc_recall_at_100_std value: 11.850919034123116 - type: nauc_recall_at_10_diff1 value: 25.46843551212912 - type: nauc_recall_at_10_max value: 12.024769591895815 - type: nauc_recall_at_10_std value: 5.710557786436904 - type: nauc_recall_at_1_diff1 value: 46.32670783118563 - type: nauc_recall_at_1_max value: 19.162748070034993 - type: nauc_recall_at_1_std value: -7.2143378209361435 - type: nauc_recall_at_20_diff1 value: 24.950754303786603 - type: nauc_recall_at_20_max value: 13.779914894639022 - type: nauc_recall_at_20_std value: 4.337235880676669 - type: nauc_recall_at_3_diff1 value: 33.979943512337485 - type: nauc_recall_at_3_max value: 14.35407227008922 - type: nauc_recall_at_3_std value: -0.5408111812033761 - type: nauc_recall_at_5_diff1 value: 31.887819659716687 - type: nauc_recall_at_5_max value: 12.266354466300289 - type: nauc_recall_at_5_std value: 3.67855636796736 - type: ndcg_at_1 value: 16.601 - type: ndcg_at_10 value: 23.735999999999997 - type: ndcg_at_100 value: 29.047 - type: ndcg_at_1000 value: 32.323 - type: ndcg_at_20 value: 25.222 - type: ndcg_at_3 value: 20.013 - type: ndcg_at_5 value: 21.165 - type: precision_at_1 value: 16.601 - type: precision_at_10 value: 4.7829999999999995 - type: precision_at_100 value: 1.077 - type: precision_at_1000 value: 0.197 - type: precision_at_20 value: 3.0429999999999997 - type: precision_at_3 value: 9.881 - type: precision_at_5 value: 7.074999999999999 - type: recall_at_1 value: 13.164000000000001 - type: recall_at_10 value: 33.041 - type: recall_at_100 value: 57.907 - type: recall_at_1000 value: 79.887 - type: recall_at_20 value: 38.833 - type: recall_at_3 value: 21.397 - type: recall_at_5 value: 24.863 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 16.794999999999998 - type: map_at_1 value: 10.08 - type: map_at_10 value: 14.069 - type: map_at_100 value: 14.860000000000001 - type: map_at_1000 value: 14.968 - type: map_at_20 value: 14.46 - type: map_at_3 value: 12.498 - type: map_at_5 value: 13.324 - type: mrr_at_1 value: 10.905730129390019 - type: mrr_at_10 value: 15.199146201918854 - type: mrr_at_100 value: 16.00264496872985 - type: mrr_at_1000 value: 16.09501918722929 - type: mrr_at_20 value: 15.633768523540942 - type: mrr_at_3 value: 13.493530499075785 - type: mrr_at_5 value: 14.36229205175601 - type: nauc_map_at_1000_diff1 value: 22.950167181074935 - type: nauc_map_at_1000_max value: 18.717980764527866 - type: nauc_map_at_1000_std value: -6.25267811740101 - type: nauc_map_at_100_diff1 value: 22.94728125565202 - type: nauc_map_at_100_max value: 18.719770177431155 - type: nauc_map_at_100_std value: -6.323089529332934 - type: nauc_map_at_10_diff1 value: 22.346430545898126 - type: nauc_map_at_10_max value: 18.80938448630523 - type: nauc_map_at_10_std value: -7.0008855212089065 - type: nauc_map_at_1_diff1 value: 31.95272198051361 - type: nauc_map_at_1_max value: 22.895259623649785 - type: nauc_map_at_1_std value: -9.582498979740272 - type: nauc_map_at_20_diff1 value: 22.86393142972787 - type: nauc_map_at_20_max value: 18.86264577450788 - type: nauc_map_at_20_std value: -6.45412214287895 - type: nauc_map_at_3_diff1 value: 24.099754234032194 - type: nauc_map_at_3_max value: 18.478412248275664 - type: nauc_map_at_3_std value: -7.165377931835313 - type: nauc_map_at_5_diff1 value: 23.19897817392842 - type: nauc_map_at_5_max value: 18.92826540423832 - type: nauc_map_at_5_std value: -6.707296227198584 - type: nauc_mrr_at_1000_diff1 value: 23.213771617115064 - type: nauc_mrr_at_1000_max value: 19.46803843401541 - type: nauc_mrr_at_1000_std value: -6.593116817917101 - type: nauc_mrr_at_100_diff1 value: 23.231343638867212 - type: nauc_mrr_at_100_max value: 19.452575181351783 - type: nauc_mrr_at_100_std value: -6.626683471900298 - type: nauc_mrr_at_10_diff1 value: 22.605547224050298 - type: nauc_mrr_at_10_max value: 19.467230968891098 - type: nauc_mrr_at_10_std value: -7.304335909859951 - type: nauc_mrr_at_1_diff1 value: 32.21591155654977 - type: nauc_mrr_at_1_max value: 23.898168032566968 - type: nauc_mrr_at_1_std value: -10.113298227732622 - type: nauc_mrr_at_20_diff1 value: 23.17788912060599 - type: nauc_mrr_at_20_max value: 19.681138842631395 - type: nauc_mrr_at_20_std value: -6.668117181278914 - type: nauc_mrr_at_3_diff1 value: 24.324685622276508 - type: nauc_mrr_at_3_max value: 19.28094175953585 - type: nauc_mrr_at_3_std value: -7.896612175052549 - type: nauc_mrr_at_5_diff1 value: 23.56101870977645 - type: nauc_mrr_at_5_max value: 19.830915115983956 - type: nauc_mrr_at_5_std value: -7.247689969483312 - type: nauc_ndcg_at_1000_diff1 value: 21.101486527699198 - type: nauc_ndcg_at_1000_max value: 17.661660378409593 - type: nauc_ndcg_at_1000_std value: -1.627651235714167 - type: nauc_ndcg_at_100_diff1 value: 21.24378422898819 - type: nauc_ndcg_at_100_max value: 17.493044854580774 - type: nauc_ndcg_at_100_std value: -3.419151472965354 - type: nauc_ndcg_at_10_diff1 value: 18.656346406751783 - type: nauc_ndcg_at_10_max value: 17.884063161669054 - type: nauc_ndcg_at_10_std value: -6.3304637473674985 - type: nauc_ndcg_at_1_diff1 value: 32.21591155654977 - type: nauc_ndcg_at_1_max value: 23.898168032566968 - type: nauc_ndcg_at_1_std value: -10.113298227732622 - type: nauc_ndcg_at_20_diff1 value: 20.517191848764295 - type: nauc_ndcg_at_20_max value: 18.302766567740257 - type: nauc_ndcg_at_20_std value: -4.676348966303663 - type: nauc_ndcg_at_3_diff1 value: 22.229860548618376 - type: nauc_ndcg_at_3_max value: 17.700425344082685 - type: nauc_ndcg_at_3_std value: -6.599851166419227 - type: nauc_ndcg_at_5_diff1 value: 20.760917715244236 - type: nauc_ndcg_at_5_max value: 18.320361121073617 - type: nauc_ndcg_at_5_std value: -5.968352306934327 - type: nauc_precision_at_1000_diff1 value: 6.111781725558282 - type: nauc_precision_at_1000_max value: 4.893420377600338 - type: nauc_precision_at_1000_std value: 13.552656007673166 - type: nauc_precision_at_100_diff1 value: 16.174564725391278 - type: nauc_precision_at_100_max value: 14.759102996929807 - type: nauc_precision_at_100_std value: 6.644858850147021 - type: nauc_precision_at_10_diff1 value: 8.889821893924042 - type: nauc_precision_at_10_max value: 15.574473888576575 - type: nauc_precision_at_10_std value: -2.6115731810417366 - type: nauc_precision_at_1_diff1 value: 32.21591155654977 - type: nauc_precision_at_1_max value: 23.898168032566968 - type: nauc_precision_at_1_std value: -10.113298227732622 - type: nauc_precision_at_20_diff1 value: 14.776717379922587 - type: nauc_precision_at_20_max value: 19.55219664568408 - type: nauc_precision_at_20_std value: 2.8624434397265373 - type: nauc_precision_at_3_diff1 value: 17.24181833195652 - type: nauc_precision_at_3_max value: 15.310985601785825 - type: nauc_precision_at_3_std value: -5.815145792096017 - type: nauc_precision_at_5_diff1 value: 14.568702652383378 - type: nauc_precision_at_5_max value: 16.90398092807837 - type: nauc_precision_at_5_std value: -4.884555559489991 - type: nauc_recall_at_1000_diff1 value: 17.718608305964434 - type: nauc_recall_at_1000_max value: 13.402668234081721 - type: nauc_recall_at_1000_std value: 21.623779371422756 - type: nauc_recall_at_100_diff1 value: 18.932841874380454 - type: nauc_recall_at_100_max value: 13.254799775623564 - type: nauc_recall_at_100_std value: 4.592397886568707 - type: nauc_recall_at_10_diff1 value: 10.256753131266485 - type: nauc_recall_at_10_max value: 15.34274332609289 - type: nauc_recall_at_10_std value: -5.019100394026518 - type: nauc_recall_at_1_diff1 value: 31.95272198051361 - type: nauc_recall_at_1_max value: 22.895259623649785 - type: nauc_recall_at_1_std value: -9.582498979740272 - type: nauc_recall_at_20_diff1 value: 16.098225999062155 - type: nauc_recall_at_20_max value: 16.11919310391389 - type: nauc_recall_at_20_std value: -0.981856820033547 - type: nauc_recall_at_3_diff1 value: 16.896414167717293 - type: nauc_recall_at_3_max value: 14.67655178851271 - type: nauc_recall_at_3_std value: -4.885403738918622 - type: nauc_recall_at_5_diff1 value: 15.074392597620905 - type: nauc_recall_at_5_max value: 16.457162195748644 - type: nauc_recall_at_5_std value: -3.6534367499331046 - type: ndcg_at_1 value: 10.906 - type: ndcg_at_10 value: 16.794999999999998 - type: ndcg_at_100 value: 21.434 - type: ndcg_at_1000 value: 24.743000000000002 - type: ndcg_at_20 value: 18.275 - type: ndcg_at_3 value: 13.507 - type: ndcg_at_5 value: 14.953 - type: precision_at_1 value: 10.906 - type: precision_at_10 value: 2.791 - type: precision_at_100 value: 0.5559999999999999 - type: precision_at_1000 value: 0.091 - type: precision_at_20 value: 1.738 - type: precision_at_3 value: 5.545 - type: precision_at_5 value: 4.14 - type: recall_at_1 value: 10.08 - type: recall_at_10 value: 24.184 - type: recall_at_100 value: 46.967999999999996 - type: recall_at_1000 value: 72.92999999999999 - type: recall_at_20 value: 29.852 - type: recall_at_3 value: 15.440999999999999 - type: recall_at_5 value: 18.829 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 17.288999999999998 - type: map_at_1 value: 6.537 - type: map_at_10 value: 11.465 - type: map_at_100 value: 12.851 - type: map_at_1000 value: 13.045000000000002 - type: map_at_20 value: 12.174 - type: map_at_3 value: 9.369 - type: map_at_5 value: 10.331 - type: mrr_at_1 value: 15.2442996742671 - type: mrr_at_10 value: 23.59306654257793 - type: mrr_at_100 value: 24.771529453769823 - type: mrr_at_1000 value: 24.838895119526256 - type: mrr_at_20 value: 24.34915881726873 - type: mrr_at_3 value: 20.466883821932676 - type: mrr_at_5 value: 22.027144408251875 - type: nauc_map_at_1000_diff1 value: 21.34422077879759 - type: nauc_map_at_1000_max value: 22.628208123980382 - type: nauc_map_at_1000_std value: 15.80771024789922 - type: nauc_map_at_100_diff1 value: 21.373352148960333 - type: nauc_map_at_100_max value: 22.445247482460697 - type: nauc_map_at_100_std value: 15.551345921669244 - type: nauc_map_at_10_diff1 value: 22.093245216727393 - type: nauc_map_at_10_max value: 20.71848879842843 - type: nauc_map_at_10_std value: 13.073037988129768 - type: nauc_map_at_1_diff1 value: 32.56507685691908 - type: nauc_map_at_1_max value: 19.299512363814912 - type: nauc_map_at_1_std value: 7.980883065948159 - type: nauc_map_at_20_diff1 value: 21.612469499988222 - type: nauc_map_at_20_max value: 21.70315933461587 - type: nauc_map_at_20_std value: 14.51324386963804 - type: nauc_map_at_3_diff1 value: 22.671417020380986 - type: nauc_map_at_3_max value: 18.10374651349345 - type: nauc_map_at_3_std value: 9.73448791948781 - type: nauc_map_at_5_diff1 value: 22.034988196838064 - type: nauc_map_at_5_max value: 18.490696961140145 - type: nauc_map_at_5_std value: 11.001958112977931 - type: nauc_mrr_at_1000_diff1 value: 17.997877765827052 - type: nauc_mrr_at_1000_max value: 23.761191320854795 - type: nauc_mrr_at_1000_std value: 17.086288520129283 - type: nauc_mrr_at_100_diff1 value: 17.99589491236679 - type: nauc_mrr_at_100_max value: 23.76386777696214 - type: nauc_mrr_at_100_std value: 17.114923252433908 - type: nauc_mrr_at_10_diff1 value: 17.95028052166577 - type: nauc_mrr_at_10_max value: 23.313446785613046 - type: nauc_mrr_at_10_std value: 16.289313792057893 - type: nauc_mrr_at_1_diff1 value: 25.00794012521374 - type: nauc_mrr_at_1_max value: 20.934023514536086 - type: nauc_mrr_at_1_std value: 10.326842252115775 - type: nauc_mrr_at_20_diff1 value: 17.977173189525192 - type: nauc_mrr_at_20_max value: 23.858084437038833 - type: nauc_mrr_at_20_std value: 17.177629596269224 - type: nauc_mrr_at_3_diff1 value: 18.049118818264052 - type: nauc_mrr_at_3_max value: 21.812245650122605 - type: nauc_mrr_at_3_std value: 14.048078149579718 - type: nauc_mrr_at_5_diff1 value: 18.028877069283745 - type: nauc_mrr_at_5_max value: 21.88620019054395 - type: nauc_mrr_at_5_std value: 14.787661645971001 - type: nauc_ndcg_at_1000_diff1 value: 16.72726980659064 - type: nauc_ndcg_at_1000_max value: 30.043672363788087 - type: nauc_ndcg_at_1000_std value: 26.833584730455268 - type: nauc_ndcg_at_100_diff1 value: 17.16473243031922 - type: nauc_ndcg_at_100_max value: 28.239622016125566 - type: nauc_ndcg_at_100_std value: 24.469002695895977 - type: nauc_ndcg_at_10_diff1 value: 18.655890597433427 - type: nauc_ndcg_at_10_max value: 23.63136724071696 - type: nauc_ndcg_at_10_std value: 17.29295589103389 - type: nauc_ndcg_at_1_diff1 value: 25.00794012521374 - type: nauc_ndcg_at_1_max value: 20.934023514536086 - type: nauc_ndcg_at_1_std value: 10.326842252115775 - type: nauc_ndcg_at_20_diff1 value: 17.762757204969244 - type: nauc_ndcg_at_20_max value: 25.946755000541476 - type: nauc_ndcg_at_20_std value: 20.9523075152757 - type: nauc_ndcg_at_3_diff1 value: 18.258615831392746 - type: nauc_ndcg_at_3_max value: 20.21498568651181 - type: nauc_ndcg_at_3_std value: 12.588112301185989 - type: nauc_ndcg_at_5_diff1 value: 18.575198873459577 - type: nauc_ndcg_at_5_max value: 19.821485190942443 - type: nauc_ndcg_at_5_std value: 13.559611377687455 - type: nauc_precision_at_1000_diff1 value: -1.3591333339360123 - type: nauc_precision_at_1000_max value: 33.01866225202323 - type: nauc_precision_at_1000_std value: 38.26072433720804 - type: nauc_precision_at_100_diff1 value: 4.534183759090849 - type: nauc_precision_at_100_max value: 35.499433595656335 - type: nauc_precision_at_100_std value: 37.765227934597114 - type: nauc_precision_at_10_diff1 value: 11.369511250136568 - type: nauc_precision_at_10_max value: 30.281092515358527 - type: nauc_precision_at_10_std value: 26.690470077530847 - type: nauc_precision_at_1_diff1 value: 25.00794012521374 - type: nauc_precision_at_1_max value: 20.934023514536086 - type: nauc_precision_at_1_std value: 10.326842252115775 - type: nauc_precision_at_20_diff1 value: 8.133211694372351 - type: nauc_precision_at_20_max value: 34.161055315782775 - type: nauc_precision_at_20_std value: 33.33055010570849 - type: nauc_precision_at_3_diff1 value: 10.5682193001728 - type: nauc_precision_at_3_max value: 22.786982248944767 - type: nauc_precision_at_3_std value: 17.92766896610086 - type: nauc_precision_at_5_diff1 value: 10.940535871177055 - type: nauc_precision_at_5_max value: 23.197073410356037 - type: nauc_precision_at_5_std value: 20.612896217277573 - type: nauc_recall_at_1000_diff1 value: 5.540983045337761 - type: nauc_recall_at_1000_max value: 37.3394645787145 - type: nauc_recall_at_1000_std value: 43.905340993951555 - type: nauc_recall_at_100_diff1 value: 8.725053205627061 - type: nauc_recall_at_100_max value: 29.46589116376182 - type: nauc_recall_at_100_std value: 32.76739728784572 - type: nauc_recall_at_10_diff1 value: 13.519133005869758 - type: nauc_recall_at_10_max value: 23.66746585259265 - type: nauc_recall_at_10_std value: 19.744857128981092 - type: nauc_recall_at_1_diff1 value: 32.56507685691908 - type: nauc_recall_at_1_max value: 19.299512363814912 - type: nauc_recall_at_1_std value: 7.980883065948159 - type: nauc_recall_at_20_diff1 value: 10.866077600352101 - type: nauc_recall_at_20_max value: 26.726876720649262 - type: nauc_recall_at_20_std value: 26.28100368153264 - type: nauc_recall_at_3_diff1 value: 15.295338383488533 - type: nauc_recall_at_3_max value: 18.013167170259173 - type: nauc_recall_at_3_std value: 11.569701886642754 - type: nauc_recall_at_5_diff1 value: 14.214598780846863 - type: nauc_recall_at_5_max value: 17.96550333772466 - type: nauc_recall_at_5_std value: 13.720834673116972 - type: ndcg_at_1 value: 15.244 - type: ndcg_at_10 value: 17.288999999999998 - type: ndcg_at_100 value: 23.757 - type: ndcg_at_1000 value: 27.725 - type: ndcg_at_20 value: 19.686999999999998 - type: ndcg_at_3 value: 13.245000000000001 - type: ndcg_at_5 value: 14.485000000000001 - type: precision_at_1 value: 15.244 - type: precision_at_10 value: 5.733 - type: precision_at_100 value: 1.264 - type: precision_at_1000 value: 0.199 - type: precision_at_20 value: 3.85 - type: precision_at_3 value: 10.054 - type: precision_at_5 value: 7.9350000000000005 - type: recall_at_1 value: 6.537 - type: recall_at_10 value: 22.046 - type: recall_at_100 value: 44.818000000000005 - type: recall_at_1000 value: 67.676 - type: recall_at_20 value: 28.974 - type: recall_at_3 value: 12.232 - type: recall_at_5 value: 15.540999999999999 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 24.235 - type: map_at_1 value: 4.304 - type: map_at_10 value: 9.944 - type: map_at_100 value: 14.113000000000001 - type: map_at_1000 value: 15.085 - type: map_at_20 value: 11.594 - type: map_at_3 value: 7.228999999999999 - type: map_at_5 value: 8.368 - type: mrr_at_1 value: 43.0 - type: mrr_at_10 value: 53.30376984126983 - type: mrr_at_100 value: 53.97910163622114 - type: mrr_at_1000 value: 54.005267473599304 - type: mrr_at_20 value: 53.740161512249365 - type: mrr_at_3 value: 50.54166666666667 - type: mrr_at_5 value: 52.154166666666654 - type: nauc_map_at_1000_diff1 value: 26.809585057496545 - type: nauc_map_at_1000_max value: 27.599866660752987 - type: nauc_map_at_1000_std value: 31.459439584000094 - type: nauc_map_at_100_diff1 value: 27.049487336011836 - type: nauc_map_at_100_max value: 25.112936840752 - type: nauc_map_at_100_std value: 28.400137100413364 - type: nauc_map_at_10_diff1 value: 32.105246040146554 - type: nauc_map_at_10_max value: 9.658311385867774 - type: nauc_map_at_10_std value: 12.006591313970928 - type: nauc_map_at_1_diff1 value: 45.66826032911575 - type: nauc_map_at_1_max value: 1.1005171486965344 - type: nauc_map_at_1_std value: 3.2500050585955558 - type: nauc_map_at_20_diff1 value: 30.73734552740125 - type: nauc_map_at_20_max value: 14.994971393610829 - type: nauc_map_at_20_std value: 18.029603402042753 - type: nauc_map_at_3_diff1 value: 36.77585294977933 - type: nauc_map_at_3_max value: 2.0123666749907034 - type: nauc_map_at_3_std value: 3.1886056493854906 - type: nauc_map_at_5_diff1 value: 34.910885252980414 - type: nauc_map_at_5_max value: 4.606898880177816 - type: nauc_map_at_5_std value: 5.897472990222533 - type: nauc_mrr_at_1000_diff1 value: 32.8408203164654 - type: nauc_mrr_at_1000_max value: 44.57916824429895 - type: nauc_mrr_at_1000_std value: 25.76632603800019 - type: nauc_mrr_at_100_diff1 value: 32.83381181877902 - type: nauc_mrr_at_100_max value: 44.57742098993615 - type: nauc_mrr_at_100_std value: 25.763980866882193 - type: nauc_mrr_at_10_diff1 value: 32.85879447148161 - type: nauc_mrr_at_10_max value: 44.587973042043814 - type: nauc_mrr_at_10_std value: 25.548766798683893 - type: nauc_mrr_at_1_diff1 value: 36.064038704139605 - type: nauc_mrr_at_1_max value: 43.188409566789346 - type: nauc_mrr_at_1_std value: 24.26421817898062 - type: nauc_mrr_at_20_diff1 value: 32.752896264184685 - type: nauc_mrr_at_20_max value: 44.56787283356919 - type: nauc_mrr_at_20_std value: 25.763763879915313 - type: nauc_mrr_at_3_diff1 value: 33.265925003418126 - type: nauc_mrr_at_3_max value: 43.98236209085194 - type: nauc_mrr_at_3_std value: 24.811433062956347 - type: nauc_mrr_at_5_diff1 value: 33.02692454410134 - type: nauc_mrr_at_5_max value: 44.02150946107612 - type: nauc_mrr_at_5_std value: 24.414392179240878 - type: nauc_ndcg_at_1000_diff1 value: 29.071114816059023 - type: nauc_ndcg_at_1000_max value: 38.90222092060964 - type: nauc_ndcg_at_1000_std value: 44.44820451621514 - type: nauc_ndcg_at_100_diff1 value: 29.1316364198098 - type: nauc_ndcg_at_100_max value: 31.558894971415064 - type: nauc_ndcg_at_100_std value: 35.45395514581182 - type: nauc_ndcg_at_10_diff1 value: 29.303783217647744 - type: nauc_ndcg_at_10_max value: 31.009718153863414 - type: nauc_ndcg_at_10_std value: 27.49477754545124 - type: nauc_ndcg_at_1_diff1 value: 35.43480922848642 - type: nauc_ndcg_at_1_max value: 30.475722281046714 - type: nauc_ndcg_at_1_std value: 17.626646786380547 - type: nauc_ndcg_at_20_diff1 value: 29.30769894815147 - type: nauc_ndcg_at_20_max value: 27.870710525324107 - type: nauc_ndcg_at_20_std value: 28.334513734492532 - type: nauc_ndcg_at_3_diff1 value: 30.7536730308035 - type: nauc_ndcg_at_3_max value: 32.32457811814772 - type: nauc_ndcg_at_3_std value: 21.676427426548152 - type: nauc_ndcg_at_5_diff1 value: 29.96943892323901 - type: nauc_ndcg_at_5_max value: 31.493512707920964 - type: nauc_ndcg_at_5_std value: 24.0956693770445 - type: nauc_precision_at_1000_diff1 value: -5.720318672455256 - type: nauc_precision_at_1000_max value: 28.08646209634404 - type: nauc_precision_at_1000_std value: 29.34422238786186 - type: nauc_precision_at_100_diff1 value: 0.84607162708279 - type: nauc_precision_at_100_max value: 47.97391409332498 - type: nauc_precision_at_100_std value: 44.619521382937286 - type: nauc_precision_at_10_diff1 value: 9.622029967680373 - type: nauc_precision_at_10_max value: 45.89203900455004 - type: nauc_precision_at_10_std value: 38.276273021326745 - type: nauc_precision_at_1_diff1 value: 36.064038704139605 - type: nauc_precision_at_1_max value: 43.188409566789346 - type: nauc_precision_at_1_std value: 24.26421817898062 - type: nauc_precision_at_20_diff1 value: 6.709711811715244 - type: nauc_precision_at_20_max value: 47.47318907005896 - type: nauc_precision_at_20_std value: 42.595576770275095 - type: nauc_precision_at_3_diff1 value: 19.233575308317054 - type: nauc_precision_at_3_max value: 43.02563765159987 - type: nauc_precision_at_3_std value: 27.334254446564454 - type: nauc_precision_at_5_diff1 value: 14.298477498830673 - type: nauc_precision_at_5_max value: 42.72631241492758 - type: nauc_precision_at_5_std value: 32.14763584000337 - type: nauc_recall_at_1000_diff1 value: 18.551929022070503 - type: nauc_recall_at_1000_max value: 25.99572596347025 - type: nauc_recall_at_1000_std value: 49.479321187111644 - type: nauc_recall_at_100_diff1 value: 16.24655246342188 - type: nauc_recall_at_100_max value: 19.193014693852824 - type: nauc_recall_at_100_std value: 31.691642773148754 - type: nauc_recall_at_10_diff1 value: 21.181166055890365 - type: nauc_recall_at_10_max value: -0.020533885799737757 - type: nauc_recall_at_10_std value: 7.266191592314226 - type: nauc_recall_at_1_diff1 value: 45.66826032911575 - type: nauc_recall_at_1_max value: 1.1005171486965344 - type: nauc_recall_at_1_std value: 3.2500050585955558 - type: nauc_recall_at_20_diff1 value: 19.153797037751836 - type: nauc_recall_at_20_max value: 3.9385573002743057 - type: nauc_recall_at_20_std value: 14.048512138776442 - type: nauc_recall_at_3_diff1 value: 30.240078354763085 - type: nauc_recall_at_3_max value: -4.0841121814480195 - type: nauc_recall_at_3_std value: -2.3759344889809264 - type: nauc_recall_at_5_diff1 value: 26.22489817092464 - type: nauc_recall_at_5_max value: -3.2396073154699256 - type: nauc_recall_at_5_std value: -0.1327990827712389 - type: ndcg_at_1 value: 31.5 - type: ndcg_at_10 value: 24.235 - type: ndcg_at_100 value: 28.01 - type: ndcg_at_1000 value: 34.724 - type: ndcg_at_20 value: 24.265 - type: ndcg_at_3 value: 26.682 - type: ndcg_at_5 value: 25.249 - type: precision_at_1 value: 43.0 - type: precision_at_10 value: 21.65 - type: precision_at_100 value: 6.97 - type: precision_at_1000 value: 1.4449999999999998 - type: precision_at_20 value: 16.6 - type: precision_at_3 value: 32.25 - type: precision_at_5 value: 27.250000000000004 - type: recall_at_1 value: 4.304 - type: recall_at_10 value: 15.014 - type: recall_at_100 value: 35.115 - type: recall_at_1000 value: 58.52 - type: recall_at_20 value: 20.817 - type: recall_at_3 value: 8.698 - type: recall_at_5 value: 11.052 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 45.09 - type: f1 value: 41.3731018097549 - type: f1_weighted value: 47.129694558751545 - type: main_score value: 45.09 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 30.267 - type: map_at_1 value: 16.349 - type: map_at_10 value: 24.917 - type: map_at_100 value: 26.003 - type: map_at_1000 value: 26.072 - type: map_at_20 value: 25.558999999999997 - type: map_at_3 value: 22.067999999999998 - type: map_at_5 value: 23.610999999999997 - type: mrr_at_1 value: 17.416741674167415 - type: mrr_at_10 value: 26.439929707256365 - type: mrr_at_100 value: 27.508820939687954 - type: mrr_at_1000 value: 27.570352489203128 - type: mrr_at_20 value: 27.08319436248233 - type: mrr_at_3 value: 23.422342234223358 - type: mrr_at_5 value: 25.06350635063509 - type: nauc_map_at_1000_diff1 value: 21.773223671090857 - type: nauc_map_at_1000_max value: 6.412897130218669 - type: nauc_map_at_1000_std value: -6.3221009008493745 - type: nauc_map_at_100_diff1 value: 21.76483868507978 - type: nauc_map_at_100_max value: 6.404365200549758 - type: nauc_map_at_100_std value: -6.342840969370927 - type: nauc_map_at_10_diff1 value: 21.669481996014238 - type: nauc_map_at_10_max value: 6.019531738681224 - type: nauc_map_at_10_std value: -6.941777440293395 - type: nauc_map_at_1_diff1 value: 27.706382248361393 - type: nauc_map_at_1_max value: 4.030610814398596 - type: nauc_map_at_1_std value: -9.782554832619702 - type: nauc_map_at_20_diff1 value: 21.80535156700929 - type: nauc_map_at_20_max value: 6.361714278006344 - type: nauc_map_at_20_std value: -6.513790702798104 - type: nauc_map_at_3_diff1 value: 23.017059605983857 - type: nauc_map_at_3_max value: 5.110304244032051 - type: nauc_map_at_3_std value: -8.069547854658104 - type: nauc_map_at_5_diff1 value: 21.927491204194766 - type: nauc_map_at_5_max value: 5.462525780765053 - type: nauc_map_at_5_std value: -7.474340804858998 - type: nauc_mrr_at_1000_diff1 value: 21.61235920652557 - type: nauc_mrr_at_1000_max value: 6.6996553488043915 - type: nauc_mrr_at_1000_std value: -6.520954496784069 - type: nauc_mrr_at_100_diff1 value: 21.597831485534126 - type: nauc_mrr_at_100_max value: 6.705135295195408 - type: nauc_mrr_at_100_std value: -6.521597409657566 - type: nauc_mrr_at_10_diff1 value: 21.404259600861597 - type: nauc_mrr_at_10_max value: 6.348078634441438 - type: nauc_mrr_at_10_std value: -7.012906818443071 - type: nauc_mrr_at_1_diff1 value: 27.231264207663248 - type: nauc_mrr_at_1_max value: 4.04888129901842 - type: nauc_mrr_at_1_std value: -9.998368133129015 - type: nauc_mrr_at_20_diff1 value: 21.57543681953314 - type: nauc_mrr_at_20_max value: 6.670007051575425 - type: nauc_mrr_at_20_std value: -6.636382948186316 - type: nauc_mrr_at_3_diff1 value: 22.771758514181627 - type: nauc_mrr_at_3_max value: 5.389600538667887 - type: nauc_mrr_at_3_std value: -8.189661361743667 - type: nauc_mrr_at_5_diff1 value: 21.689397986510446 - type: nauc_mrr_at_5_max value: 5.765658649049543 - type: nauc_mrr_at_5_std value: -7.590205788150704 - type: nauc_ndcg_at_1000_diff1 value: 19.780729881850963 - type: nauc_ndcg_at_1000_max value: 8.968522119658385 - type: nauc_ndcg_at_1000_std value: -2.425269449284083 - type: nauc_ndcg_at_100_diff1 value: 19.46657224380776 - type: nauc_ndcg_at_100_max value: 9.05883201318058 - type: nauc_ndcg_at_100_std value: -2.5565659351523293 - type: nauc_ndcg_at_10_diff1 value: 19.29152253186839 - type: nauc_ndcg_at_10_max value: 7.499062048205841 - type: nauc_ndcg_at_10_std value: -5.2482566392088685 - type: nauc_ndcg_at_1_diff1 value: 27.231264207663248 - type: nauc_ndcg_at_1_max value: 4.04888129901842 - type: nauc_ndcg_at_1_std value: -9.998368133129015 - type: nauc_ndcg_at_20_diff1 value: 19.71545443537324 - type: nauc_ndcg_at_20_max value: 8.64504551388718 - type: nauc_ndcg_at_20_std value: -3.7667113417348976 - type: nauc_ndcg_at_3_diff1 value: 21.745216173844803 - type: nauc_ndcg_at_3_max value: 5.650727598972489 - type: nauc_ndcg_at_3_std value: -7.481336986244201 - type: nauc_ndcg_at_5_diff1 value: 19.936133837204203 - type: nauc_ndcg_at_5_max value: 6.259916537058443 - type: nauc_ndcg_at_5_std value: -6.484388158971839 - type: nauc_precision_at_1000_diff1 value: 1.471146535072958 - type: nauc_precision_at_1000_max value: 20.630906784097483 - type: nauc_precision_at_1000_std value: 21.9773366010731 - type: nauc_precision_at_100_diff1 value: 7.533964401054148 - type: nauc_precision_at_100_max value: 19.925643661900423 - type: nauc_precision_at_100_std value: 15.336729247195924 - type: nauc_precision_at_10_diff1 value: 12.150440335935734 - type: nauc_precision_at_10_max value: 11.983854268540387 - type: nauc_precision_at_10_std value: -0.37221151434129196 - type: nauc_precision_at_1_diff1 value: 27.231264207663248 - type: nauc_precision_at_1_max value: 4.04888129901842 - type: nauc_precision_at_1_std value: -9.998368133129015 - type: nauc_precision_at_20_diff1 value: 12.630450311503752 - type: nauc_precision_at_20_max value: 16.05605149278296 - type: nauc_precision_at_20_std value: 5.3999355877921165 - type: nauc_precision_at_3_diff1 value: 18.359563527526568 - type: nauc_precision_at_3_max value: 7.050702808245418 - type: nauc_precision_at_3_std value: -6.012052050420314 - type: nauc_precision_at_5_diff1 value: 14.398743831406193 - type: nauc_precision_at_5_max value: 8.47645601614165 - type: nauc_precision_at_5_std value: -4.017240645221931 - type: nauc_recall_at_1000_diff1 value: 7.839541590866944 - type: nauc_recall_at_1000_max value: 23.309619602703478 - type: nauc_recall_at_1000_std value: 27.804864458508405 - type: nauc_recall_at_100_diff1 value: 9.97691215791031 - type: nauc_recall_at_100_max value: 18.819153599870717 - type: nauc_recall_at_100_std value: 14.458117071228108 - type: nauc_recall_at_10_diff1 value: 12.810432997078946 - type: nauc_recall_at_10_max value: 10.766544057766287 - type: nauc_recall_at_10_std value: -0.5969028921503585 - type: nauc_recall_at_1_diff1 value: 27.706382248361393 - type: nauc_recall_at_1_max value: 4.030610814398596 - type: nauc_recall_at_1_std value: -9.782554832619702 - type: nauc_recall_at_20_diff1 value: 13.595110328407126 - type: nauc_recall_at_20_max value: 14.757809231376443 - type: nauc_recall_at_20_std value: 4.9020894617594575 - type: nauc_recall_at_3_diff1 value: 18.603105066886183 - type: nauc_recall_at_3_max value: 6.695351132956627 - type: nauc_recall_at_3_std value: -5.761401766506087 - type: nauc_recall_at_5_diff1 value: 14.770731919705574 - type: nauc_recall_at_5_max value: 7.754748009508286 - type: nauc_recall_at_5_std value: -3.7961358195332773 - type: ndcg_at_1 value: 17.416999999999998 - type: ndcg_at_10 value: 30.267 - type: ndcg_at_100 value: 35.650999999999996 - type: ndcg_at_1000 value: 37.57 - type: ndcg_at_20 value: 32.574 - type: ndcg_at_3 value: 24.303 - type: ndcg_at_5 value: 27.099 - type: precision_at_1 value: 17.416999999999998 - type: precision_at_10 value: 4.9590000000000005 - type: precision_at_100 value: 0.7799999999999999 - type: precision_at_1000 value: 0.096 - type: precision_at_20 value: 2.9819999999999998 - type: precision_at_3 value: 10.536 - type: precision_at_5 value: 7.807 - type: recall_at_1 value: 16.349 - type: recall_at_10 value: 45.678999999999995 - type: recall_at_100 value: 70.541 - type: recall_at_1000 value: 85.36500000000001 - type: recall_at_20 value: 54.541 - type: recall_at_3 value: 29.42 - type: recall_at_5 value: 36.112 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 16.619 - type: map_at_1 value: 7.478999999999999 - type: map_at_10 value: 11.933 - type: map_at_100 value: 13.078000000000001 - type: map_at_1000 value: 13.267999999999999 - type: map_at_20 value: 12.465 - type: map_at_3 value: 9.975000000000001 - type: map_at_5 value: 10.928 - type: mrr_at_1 value: 14.660493827160495 - type: mrr_at_10 value: 20.737250146972368 - type: mrr_at_100 value: 21.718558761167632 - type: mrr_at_1000 value: 21.808600465854973 - type: mrr_at_20 value: 21.221196101889976 - type: mrr_at_3 value: 18.569958847736622 - type: mrr_at_5 value: 19.557613168724284 - type: nauc_map_at_1000_diff1 value: 21.51431734644358 - type: nauc_map_at_1000_max value: 4.931074809601008 - type: nauc_map_at_1000_std value: -3.3303160557020033 - type: nauc_map_at_100_diff1 value: 21.38249575770264 - type: nauc_map_at_100_max value: 4.725930298940441 - type: nauc_map_at_100_std value: -3.4448477852279473 - type: nauc_map_at_10_diff1 value: 21.195172969735484 - type: nauc_map_at_10_max value: 4.412691847045547 - type: nauc_map_at_10_std value: -4.350074377307911 - type: nauc_map_at_1_diff1 value: 28.103238263092063 - type: nauc_map_at_1_max value: 6.669837188399256 - type: nauc_map_at_1_std value: -4.3658897905036405 - type: nauc_map_at_20_diff1 value: 21.489132375885042 - type: nauc_map_at_20_max value: 4.303022314751493 - type: nauc_map_at_20_std value: -4.17992541434375 - type: nauc_map_at_3_diff1 value: 22.237087711122065 - type: nauc_map_at_3_max value: 4.533442194144081 - type: nauc_map_at_3_std value: -5.4916480142821635 - type: nauc_map_at_5_diff1 value: 21.876772694300065 - type: nauc_map_at_5_max value: 4.511112176374985 - type: nauc_map_at_5_std value: -5.176150118472554 - type: nauc_mrr_at_1000_diff1 value: 22.783625924297894 - type: nauc_mrr_at_1000_max value: 5.601679998803955 - type: nauc_mrr_at_1000_std value: -7.3878080622090865 - type: nauc_mrr_at_100_diff1 value: 22.729460521696915 - type: nauc_mrr_at_100_max value: 5.57805664833725 - type: nauc_mrr_at_100_std value: -7.3741470356357945 - type: nauc_mrr_at_10_diff1 value: 22.92977199129734 - type: nauc_mrr_at_10_max value: 5.36088601159652 - type: nauc_mrr_at_10_std value: -7.875413563795927 - type: nauc_mrr_at_1_diff1 value: 28.31095482042955 - type: nauc_mrr_at_1_max value: 7.815000197077026 - type: nauc_mrr_at_1_std value: -7.957538731368522 - type: nauc_mrr_at_20_diff1 value: 22.946584920142406 - type: nauc_mrr_at_20_max value: 5.384498887828733 - type: nauc_mrr_at_20_std value: -7.633579657779428 - type: nauc_mrr_at_3_diff1 value: 23.46361356498147 - type: nauc_mrr_at_3_max value: 4.50117125788086 - type: nauc_mrr_at_3_std value: -8.902224452227653 - type: nauc_mrr_at_5_diff1 value: 23.331352654582094 - type: nauc_mrr_at_5_max value: 4.978873752458006 - type: nauc_mrr_at_5_std value: -8.93749978655238 - type: nauc_ndcg_at_1000_diff1 value: 19.87039469365751 - type: nauc_ndcg_at_1000_max value: 8.696714614408632 - type: nauc_ndcg_at_1000_std value: 1.9681923697039077 - type: nauc_ndcg_at_100_diff1 value: 18.868322837780532 - type: nauc_ndcg_at_100_max value: 6.0333062132177675 - type: nauc_ndcg_at_100_std value: 0.44045929715801535 - type: nauc_ndcg_at_10_diff1 value: 19.727068370792786 - type: nauc_ndcg_at_10_max value: 4.277512828410901 - type: nauc_ndcg_at_10_std value: -4.086859790177703 - type: nauc_ndcg_at_1_diff1 value: 28.31095482042955 - type: nauc_ndcg_at_1_max value: 7.815000197077026 - type: nauc_ndcg_at_1_std value: -7.957538731368522 - type: nauc_ndcg_at_20_diff1 value: 20.29147215834196 - type: nauc_ndcg_at_20_max value: 4.095649235859702 - type: nauc_ndcg_at_20_std value: -3.35870597862009 - type: nauc_ndcg_at_3_diff1 value: 21.821928240162936 - type: nauc_ndcg_at_3_max value: 4.480256449572136 - type: nauc_ndcg_at_3_std value: -7.852741840584263 - type: nauc_ndcg_at_5_diff1 value: 21.15156996884851 - type: nauc_ndcg_at_5_max value: 4.290200639355712 - type: nauc_ndcg_at_5_std value: -6.820305338379054 - type: nauc_precision_at_1000_diff1 value: 8.075302805866599 - type: nauc_precision_at_1000_max value: 19.944406193476624 - type: nauc_precision_at_1000_std value: 7.381890177301082 - type: nauc_precision_at_100_diff1 value: 11.601078456057651 - type: nauc_precision_at_100_max value: 13.628171798745194 - type: nauc_precision_at_100_std value: 5.64401780985023 - type: nauc_precision_at_10_diff1 value: 16.653551040271243 - type: nauc_precision_at_10_max value: 6.546264597330201 - type: nauc_precision_at_10_std value: -4.71713361654603 - type: nauc_precision_at_1_diff1 value: 28.31095482042955 - type: nauc_precision_at_1_max value: 7.815000197077026 - type: nauc_precision_at_1_std value: -7.957538731368522 - type: nauc_precision_at_20_diff1 value: 17.066402720849883 - type: nauc_precision_at_20_max value: 6.178677607606832 - type: nauc_precision_at_20_std value: -3.987829586084965 - type: nauc_precision_at_3_diff1 value: 18.358060169256518 - type: nauc_precision_at_3_max value: 3.326657304001109 - type: nauc_precision_at_3_std value: -10.729398884603352 - type: nauc_precision_at_5_diff1 value: 19.41722339541596 - type: nauc_precision_at_5_max value: 5.714829813319856 - type: nauc_precision_at_5_std value: -8.915414021584194 - type: nauc_recall_at_1000_diff1 value: 9.365082280755011 - type: nauc_recall_at_1000_max value: 15.829818126823215 - type: nauc_recall_at_1000_std value: 27.360808820832666 - type: nauc_recall_at_100_diff1 value: 8.05391879951721 - type: nauc_recall_at_100_max value: 5.285477600522065 - type: nauc_recall_at_100_std value: 13.239431098719457 - type: nauc_recall_at_10_diff1 value: 13.288596558862537 - type: nauc_recall_at_10_max value: 1.9512189235666242 - type: nauc_recall_at_10_std value: 0.08420098367582614 - type: nauc_recall_at_1_diff1 value: 28.103238263092063 - type: nauc_recall_at_1_max value: 6.669837188399256 - type: nauc_recall_at_1_std value: -4.3658897905036405 - type: nauc_recall_at_20_diff1 value: 14.781087409113736 - type: nauc_recall_at_20_max value: 1.6715579437911525 - type: nauc_recall_at_20_std value: 1.4885011649849296 - type: nauc_recall_at_3_diff1 value: 16.904223069103445 - type: nauc_recall_at_3_max value: 1.2031021965601998 - type: nauc_recall_at_3_std value: -5.7358517453558395 - type: nauc_recall_at_5_diff1 value: 15.560583779980208 - type: nauc_recall_at_5_max value: 1.268944483676161 - type: nauc_recall_at_5_std value: -5.114882384179444 - type: ndcg_at_1 value: 14.66 - type: ndcg_at_10 value: 16.619 - type: ndcg_at_100 value: 22.467000000000002 - type: ndcg_at_1000 value: 26.745 - type: ndcg_at_20 value: 18.356 - type: ndcg_at_3 value: 13.547 - type: ndcg_at_5 value: 14.466999999999999 - type: precision_at_1 value: 14.66 - type: precision_at_10 value: 4.8149999999999995 - type: precision_at_100 value: 1.0619999999999998 - type: precision_at_1000 value: 0.182 - type: precision_at_20 value: 3.071 - type: precision_at_3 value: 9.002 - type: precision_at_5 value: 6.79 - type: recall_at_1 value: 7.478999999999999 - type: recall_at_10 value: 21.884 - type: recall_at_100 value: 45.545 - type: recall_at_1000 value: 71.887 - type: recall_at_20 value: 27.567999999999998 - type: recall_at_3 value: 12.485 - type: recall_at_5 value: 15.862000000000002 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 36.217 - type: map_at_1 value: 20.628 - type: map_at_10 value: 28.559 - type: map_at_100 value: 29.5 - type: map_at_1000 value: 29.601 - type: map_at_20 value: 29.069 - type: map_at_3 value: 26.429000000000002 - type: map_at_5 value: 27.589000000000002 - type: mrr_at_1 value: 41.2559081701553 - type: mrr_at_10 value: 48.84337052399182 - type: mrr_at_100 value: 49.523346087979284 - type: mrr_at_1000 value: 49.56958885341236 - type: mrr_at_20 value: 49.24793448550151 - type: mrr_at_3 value: 46.893990546927924 - type: mrr_at_5 value: 48.02430790006756 - type: nauc_map_at_1000_diff1 value: 47.360168970984724 - type: nauc_map_at_1000_max value: 24.614881662381816 - type: nauc_map_at_1000_std value: 7.361001821254585 - type: nauc_map_at_100_diff1 value: 47.364333667549126 - type: nauc_map_at_100_max value: 24.59919582686935 - type: nauc_map_at_100_std value: 7.30629187742088 - type: nauc_map_at_10_diff1 value: 47.72981170600924 - type: nauc_map_at_10_max value: 24.438913671717863 - type: nauc_map_at_10_std value: 6.344771843030873 - type: nauc_map_at_1_diff1 value: 60.38112885477367 - type: nauc_map_at_1_max value: 25.9097175050165 - type: nauc_map_at_1_std value: 1.6564371988429167 - type: nauc_map_at_20_diff1 value: 47.57684884180127 - type: nauc_map_at_20_max value: 24.499763513475443 - type: nauc_map_at_20_std value: 6.846169751546589 - type: nauc_map_at_3_diff1 value: 49.86374782865936 - type: nauc_map_at_3_max value: 24.885292020762233 - type: nauc_map_at_3_std value: 4.8258321037343075 - type: nauc_map_at_5_diff1 value: 48.41433187485084 - type: nauc_map_at_5_max value: 24.439622781310288 - type: nauc_map_at_5_std value: 5.664110533938225 - type: nauc_mrr_at_1000_diff1 value: 56.730426912840926 - type: nauc_mrr_at_1000_max value: 25.303184184778832 - type: nauc_mrr_at_1000_std value: 4.096788282752593 - type: nauc_mrr_at_100_diff1 value: 56.72217642846328 - type: nauc_mrr_at_100_max value: 25.302090289174313 - type: nauc_mrr_at_100_std value: 4.108586907297719 - type: nauc_mrr_at_10_diff1 value: 56.738023427066885 - type: nauc_mrr_at_10_max value: 25.271616491844455 - type: nauc_mrr_at_10_std value: 3.824908381559653 - type: nauc_mrr_at_1_diff1 value: 60.38112885477367 - type: nauc_mrr_at_1_max value: 25.9097175050165 - type: nauc_mrr_at_1_std value: 1.6564371988429167 - type: nauc_mrr_at_20_diff1 value: 56.70644340159845 - type: nauc_mrr_at_20_max value: 25.27993872890672 - type: nauc_mrr_at_20_std value: 4.0064390570846875 - type: nauc_mrr_at_3_diff1 value: 57.245840183280194 - type: nauc_mrr_at_3_max value: 25.33525251108163 - type: nauc_mrr_at_3_std value: 2.9291934957523584 - type: nauc_mrr_at_5_diff1 value: 56.755596718387125 - type: nauc_mrr_at_5_max value: 25.22311364368114 - type: nauc_mrr_at_5_std value: 3.5613271952141865 - type: nauc_ndcg_at_1000_diff1 value: 46.553394894195456 - type: nauc_ndcg_at_1000_max value: 24.938550469205936 - type: nauc_ndcg_at_1000_std value: 11.539278224453703 - type: nauc_ndcg_at_100_diff1 value: 46.60518292153804 - type: nauc_ndcg_at_100_max value: 24.724969691359487 - type: nauc_ndcg_at_100_std value: 10.73834721703669 - type: nauc_ndcg_at_10_diff1 value: 48.12092181292035 - type: nauc_ndcg_at_10_max value: 24.2791002435645 - type: nauc_ndcg_at_10_std value: 7.153695707296072 - type: nauc_ndcg_at_1_diff1 value: 60.38112885477367 - type: nauc_ndcg_at_1_max value: 25.9097175050165 - type: nauc_ndcg_at_1_std value: 1.6564371988429167 - type: nauc_ndcg_at_20_diff1 value: 47.65117800859018 - type: nauc_ndcg_at_20_max value: 24.357451369693482 - type: nauc_ndcg_at_20_std value: 8.469581027730795 - type: nauc_ndcg_at_3_diff1 value: 51.08303103543016 - type: nauc_ndcg_at_3_max value: 24.799424583706255 - type: nauc_ndcg_at_3_std value: 4.63909501741516 - type: nauc_ndcg_at_5_diff1 value: 49.136821889915225 - type: nauc_ndcg_at_5_max value: 24.243099266851612 - type: nauc_ndcg_at_5_std value: 5.961841495442629 - type: nauc_precision_at_1000_diff1 value: 14.823992446535481 - type: nauc_precision_at_1000_max value: 17.957974549199044 - type: nauc_precision_at_1000_std value: 31.79928156519854 - type: nauc_precision_at_100_diff1 value: 23.121894912525356 - type: nauc_precision_at_100_max value: 19.166436915427486 - type: nauc_precision_at_100_std value: 23.79964191034748 - type: nauc_precision_at_10_diff1 value: 35.6440151764581 - type: nauc_precision_at_10_max value: 21.022400502868223 - type: nauc_precision_at_10_std value: 11.461152130387351 - type: nauc_precision_at_1_diff1 value: 60.38112885477367 - type: nauc_precision_at_1_max value: 25.9097175050165 - type: nauc_precision_at_1_std value: 1.6564371988429167 - type: nauc_precision_at_20_diff1 value: 31.893138428309527 - type: nauc_precision_at_20_max value: 19.961827091439737 - type: nauc_precision_at_20_std value: 15.056260461619232 - type: nauc_precision_at_3_diff1 value: 45.06971180999361 - type: nauc_precision_at_3_max value: 23.635891515921788 - type: nauc_precision_at_3_std value: 6.198234444102806 - type: nauc_precision_at_5_diff1 value: 39.43842818627394 - type: nauc_precision_at_5_max value: 21.623592109687603 - type: nauc_precision_at_5_std value: 8.718348302717638 - type: nauc_recall_at_1000_diff1 value: 14.823992446535502 - type: nauc_recall_at_1000_max value: 17.95797454919907 - type: nauc_recall_at_1000_std value: 31.799281565198577 - type: nauc_recall_at_100_diff1 value: 23.121894912525338 - type: nauc_recall_at_100_max value: 19.16643691542745 - type: nauc_recall_at_100_std value: 23.799641910347454 - type: nauc_recall_at_10_diff1 value: 35.64401517645808 - type: nauc_recall_at_10_max value: 21.022400502868223 - type: nauc_recall_at_10_std value: 11.461152130387346 - type: nauc_recall_at_1_diff1 value: 60.38112885477367 - type: nauc_recall_at_1_max value: 25.9097175050165 - type: nauc_recall_at_1_std value: 1.6564371988429167 - type: nauc_recall_at_20_diff1 value: 31.89313842830953 - type: nauc_recall_at_20_max value: 19.961827091439776 - type: nauc_recall_at_20_std value: 15.05626046161922 - type: nauc_recall_at_3_diff1 value: 45.06971180999365 - type: nauc_recall_at_3_max value: 23.6358915159218 - type: nauc_recall_at_3_std value: 6.198234444102802 - type: nauc_recall_at_5_diff1 value: 39.43842818627392 - type: nauc_recall_at_5_max value: 21.623592109687596 - type: nauc_recall_at_5_std value: 8.71834830271761 - type: ndcg_at_1 value: 41.256 - type: ndcg_at_10 value: 36.217 - type: ndcg_at_100 value: 40.422000000000004 - type: ndcg_at_1000 value: 42.762 - type: ndcg_at_20 value: 37.801 - type: ndcg_at_3 value: 32.275999999999996 - type: ndcg_at_5 value: 34.184 - type: precision_at_1 value: 41.256 - type: precision_at_10 value: 7.838000000000001 - type: precision_at_100 value: 1.119 - type: precision_at_1000 value: 0.14300000000000002 - type: precision_at_20 value: 4.429 - type: precision_at_3 value: 20.207 - type: precision_at_5 value: 13.636999999999999 - type: recall_at_1 value: 20.628 - type: recall_at_10 value: 39.190000000000005 - type: recall_at_100 value: 55.962 - type: recall_at_1000 value: 71.56700000000001 - type: recall_at_20 value: 44.288 - type: recall_at_3 value: 30.311 - type: recall_at_5 value: 34.092 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 70.78 - type: ap value: 65.09281598781793 - type: ap_weighted value: 65.09281598781793 - type: f1 value: 70.56498155979408 - type: f1_weighted value: 70.56498155979408 - type: main_score value: 70.78 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: test revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 34.981 - type: map_at_1 value: 0.9369999999999999 - type: map_at_10 value: 6.105 - type: map_at_100 value: 16.573 - type: map_at_1000 value: 20.952 - type: map_at_20 value: 9.495000000000001 - type: map_at_3 value: 2.429 - type: map_at_5 value: 3.7199999999999998 - type: mrr_at_1 value: 55.81395348837209 - type: mrr_at_10 value: 68.06201550387597 - type: mrr_at_100 value: 68.1915571731129 - type: mrr_at_1000 value: 68.20171255038517 - type: mrr_at_20 value: 68.06201550387597 - type: mrr_at_3 value: 65.89147286821705 - type: mrr_at_5 value: 67.05426356589147 - type: nauc_map_at_1000_diff1 value: 18.395978949265306 - type: nauc_map_at_1000_max value: 65.4845955483722 - type: nauc_map_at_1000_std value: 60.01425674651855 - type: nauc_map_at_100_diff1 value: 17.66459171040137 - type: nauc_map_at_100_max value: 56.91214775388199 - type: nauc_map_at_100_std value: 51.26999006986676 - type: nauc_map_at_10_diff1 value: 16.954292128521953 - type: nauc_map_at_10_max value: 29.470502786246144 - type: nauc_map_at_10_std value: 26.609751637393327 - type: nauc_map_at_1_diff1 value: 10.947697022780028 - type: nauc_map_at_1_max value: 11.333211449460881 - type: nauc_map_at_1_std value: 19.475048420924633 - type: nauc_map_at_20_diff1 value: 13.788525799384063 - type: nauc_map_at_20_max value: 36.86668066777578 - type: nauc_map_at_20_std value: 31.64971965701265 - type: nauc_map_at_3_diff1 value: 17.859630126844696 - type: nauc_map_at_3_max value: 21.46834280704547 - type: nauc_map_at_3_std value: 21.076387895251823 - type: nauc_map_at_5_diff1 value: 20.17441650295119 - type: nauc_map_at_5_max value: 24.878188082696866 - type: nauc_map_at_5_std value: 25.307502719861176 - type: nauc_mrr_at_1000_diff1 value: 14.192749126463891 - type: nauc_mrr_at_1000_max value: 52.54526357757101 - type: nauc_mrr_at_1000_std value: 44.496694053499596 - type: nauc_mrr_at_100_diff1 value: 14.215939043892334 - type: nauc_mrr_at_100_max value: 52.564251294672225 - type: nauc_mrr_at_100_std value: 44.51890218594217 - type: nauc_mrr_at_10_diff1 value: 14.433120969285195 - type: nauc_mrr_at_10_max value: 52.78365722715205 - type: nauc_mrr_at_10_std value: 44.72011559301776 - type: nauc_mrr_at_1_diff1 value: 4.7355957804700415 - type: nauc_mrr_at_1_max value: 39.93352486009351 - type: nauc_mrr_at_1_std value: 39.55801119967461 - type: nauc_mrr_at_20_diff1 value: 14.433120969285195 - type: nauc_mrr_at_20_max value: 52.78365722715205 - type: nauc_mrr_at_20_std value: 44.72011559301776 - type: nauc_mrr_at_3_diff1 value: 13.11183382637074 - type: nauc_mrr_at_3_max value: 51.12370908328734 - type: nauc_mrr_at_3_std value: 40.238401804460075 - type: nauc_mrr_at_5_diff1 value: 13.179254658692855 - type: nauc_mrr_at_5_max value: 53.38265101836388 - type: nauc_mrr_at_5_std value: 44.541370972177624 - type: nauc_ndcg_at_1000_diff1 value: 21.69587945916941 - type: nauc_ndcg_at_1000_max value: 63.37066645313249 - type: nauc_ndcg_at_1000_std value: 62.97303091219909 - type: nauc_ndcg_at_100_diff1 value: 14.796314010328851 - type: nauc_ndcg_at_100_max value: 58.71101997436683 - type: nauc_ndcg_at_100_std value: 56.81420228421644 - type: nauc_ndcg_at_10_diff1 value: 3.194403093296008 - type: nauc_ndcg_at_10_max value: 48.55754387196878 - type: nauc_ndcg_at_10_std value: 47.48615570741263 - type: nauc_ndcg_at_1_diff1 value: -6.148169734658873 - type: nauc_ndcg_at_1_max value: 25.556355503841665 - type: nauc_ndcg_at_1_std value: 21.48805389151005 - type: nauc_ndcg_at_20_diff1 value: 4.461683170351035 - type: nauc_ndcg_at_20_max value: 56.88294190421313 - type: nauc_ndcg_at_20_std value: 51.93821404537562 - type: nauc_ndcg_at_3_diff1 value: -2.861880240597804 - type: nauc_ndcg_at_3_max value: 41.33450475096539 - type: nauc_ndcg_at_3_std value: 37.27470370159716 - type: nauc_ndcg_at_5_diff1 value: 0.08149020695323854 - type: nauc_ndcg_at_5_max value: 46.722954751612264 - type: nauc_ndcg_at_5_std value: 44.665247293303416 - type: nauc_precision_at_1000_diff1 value: 6.514642381748156 - type: nauc_precision_at_1000_max value: 54.61143553569596 - type: nauc_precision_at_1000_std value: 51.84636945565138 - type: nauc_precision_at_100_diff1 value: 9.181266993927007 - type: nauc_precision_at_100_max value: 63.29553111429812 - type: nauc_precision_at_100_std value: 59.013060721871035 - type: nauc_precision_at_10_diff1 value: 16.062673027273505 - type: nauc_precision_at_10_max value: 64.85826828536602 - type: nauc_precision_at_10_std value: 58.476222375984 - type: nauc_precision_at_1_diff1 value: 4.7355957804700415 - type: nauc_precision_at_1_max value: 39.93352486009351 - type: nauc_precision_at_1_std value: 39.55801119967461 - type: nauc_precision_at_20_diff1 value: 12.061096674017728 - type: nauc_precision_at_20_max value: 66.81322466200473 - type: nauc_precision_at_20_std value: 58.18606533749746 - type: nauc_precision_at_3_diff1 value: 9.10289433878097 - type: nauc_precision_at_3_max value: 61.00901833818042 - type: nauc_precision_at_3_std value: 52.94626237786338 - type: nauc_precision_at_5_diff1 value: 13.765083369324818 - type: nauc_precision_at_5_max value: 67.0735717931603 - type: nauc_precision_at_5_std value: 60.160759158192334 - type: nauc_recall_at_1000_diff1 value: 33.378885488094184 - type: nauc_recall_at_1000_max value: 58.97167459966026 - type: nauc_recall_at_1000_std value: 59.59218645358476 - type: nauc_recall_at_100_diff1 value: 25.1307767949282 - type: nauc_recall_at_100_max value: 48.29698220976826 - type: nauc_recall_at_100_std value: 44.76527467601765 - type: nauc_recall_at_10_diff1 value: 21.012536607264714 - type: nauc_recall_at_10_max value: 21.719714919287135 - type: nauc_recall_at_10_std value: 18.503987452436643 - type: nauc_recall_at_1_diff1 value: 10.947697022780028 - type: nauc_recall_at_1_max value: 11.333211449460881 - type: nauc_recall_at_1_std value: 19.475048420924633 - type: nauc_recall_at_20_diff1 value: 14.221666924930961 - type: nauc_recall_at_20_max value: 30.83326629354958 - type: nauc_recall_at_20_std value: 25.419400751031635 - type: nauc_recall_at_3_diff1 value: 19.488515137385438 - type: nauc_recall_at_3_max value: 18.682366339227507 - type: nauc_recall_at_3_std value: 14.801487977327957 - type: nauc_recall_at_5_diff1 value: 21.493404372645262 - type: nauc_recall_at_5_max value: 22.470910257369972 - type: nauc_recall_at_5_std value: 20.91789333035049 - type: ndcg_at_1 value: 36.047000000000004 - type: ndcg_at_10 value: 34.981 - type: ndcg_at_100 value: 33.928000000000004 - type: ndcg_at_1000 value: 42.553999999999995 - type: ndcg_at_20 value: 33.768 - type: ndcg_at_3 value: 35.477 - type: ndcg_at_5 value: 35.54 - type: precision_at_1 value: 55.814 - type: precision_at_10 value: 46.744 - type: precision_at_100 value: 22.721 - type: precision_at_1000 value: 4.781 - type: precision_at_20 value: 40.465 - type: precision_at_3 value: 52.713 - type: precision_at_5 value: 51.163000000000004 - type: recall_at_1 value: 0.9369999999999999 - type: recall_at_10 value: 7.921 - type: recall_at_100 value: 28.903000000000002 - type: recall_at_1000 value: 53.691 - type: recall_at_20 value: 12.745000000000001 - type: recall_at_3 value: 2.8240000000000003 - type: recall_at_5 value: 4.476999999999999 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 88.95576835385319 - type: f1 value: 88.06364678376042 - type: f1_weighted value: 89.00721562093213 - type: main_score value: 88.95576835385319 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 56.99726402188783 - type: f1 value: 38.19916053247397 - type: f1_weighted value: 59.96788951671549 - type: main_score value: 56.99726402188783 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 63.79287155346336 - type: f1 value: 61.634629394462934 - type: f1_weighted value: 62.567311481126055 - type: main_score value: 63.79287155346336 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 70.30934767989241 - type: f1 value: 68.77914761769517 - type: f1_weighted value: 70.1128179307388 - type: main_score value: 70.30934767989241 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 27.61734940907637 - type: v_measure value: 27.61734940907637 - type: v_measure_std value: 1.2248100208316097 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 23.802943866708308 - type: v_measure value: 23.802943866708308 - type: v_measure_std value: 1.4975518910969763 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 29.431722284942175 - type: map value: 29.431722284942175 - type: mrr value: 30.207239990924332 - type: nAUC_map_diff1 value: 8.996546748314882 - type: nAUC_map_max value: -23.177815249478726 - type: nAUC_map_std value: -8.953694065964015 - type: nAUC_mrr_diff1 value: 9.247690774332192 - type: nAUC_mrr_max value: -17.42779158552557 - type: nAUC_mrr_std value: -5.997215692334967 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 24.267 - type: map_at_1 value: 3.479 - type: map_at_10 value: 7.603 - type: map_at_100 value: 9.725999999999999 - type: map_at_1000 value: 10.84 - type: map_at_20 value: 8.458 - type: map_at_3 value: 5.844 - type: map_at_5 value: 6.732 - type: mrr_at_1 value: 33.746130030959755 - type: mrr_at_10 value: 43.515897587105016 - type: mrr_at_100 value: 44.1900925310943 - type: mrr_at_1000 value: 44.248355412773655 - type: mrr_at_20 value: 43.868459509915866 - type: mrr_at_3 value: 41.74406604747161 - type: mrr_at_5 value: 42.82765737874097 - type: nauc_map_at_1000_diff1 value: 34.88971488841416 - type: nauc_map_at_1000_max value: 31.233839968277195 - type: nauc_map_at_1000_std value: 17.992857492799814 - type: nauc_map_at_100_diff1 value: 36.76693324709909 - type: nauc_map_at_100_max value: 29.86086979425915 - type: nauc_map_at_100_std value: 13.839419605590217 - type: nauc_map_at_10_diff1 value: 41.84259867098214 - type: nauc_map_at_10_max value: 25.879197474145045 - type: nauc_map_at_10_std value: 5.172621372587683 - type: nauc_map_at_1_diff1 value: 59.30631217950276 - type: nauc_map_at_1_max value: 20.33548433428363 - type: nauc_map_at_1_std value: -1.8217254079917093 - type: nauc_map_at_20_diff1 value: 38.95414455683049 - type: nauc_map_at_20_max value: 26.987123257006363 - type: nauc_map_at_20_std value: 8.70109669516395 - type: nauc_map_at_3_diff1 value: 47.18504542973307 - type: nauc_map_at_3_max value: 21.706151469833202 - type: nauc_map_at_3_std value: 0.8205050181794802 - type: nauc_map_at_5_diff1 value: 45.415931092144476 - type: nauc_map_at_5_max value: 23.366427326413234 - type: nauc_map_at_5_std value: 2.036343948136038 - type: nauc_mrr_at_1000_diff1 value: 34.09352814360173 - type: nauc_mrr_at_1000_max value: 36.57744406738573 - type: nauc_mrr_at_1000_std value: 18.874642200828255 - type: nauc_mrr_at_100_diff1 value: 34.07606233752646 - type: nauc_mrr_at_100_max value: 36.570920987632604 - type: nauc_mrr_at_100_std value: 18.90704866545748 - type: nauc_mrr_at_10_diff1 value: 33.86749261732675 - type: nauc_mrr_at_10_max value: 36.53445713485045 - type: nauc_mrr_at_10_std value: 18.72635222657426 - type: nauc_mrr_at_1_diff1 value: 38.310753456104415 - type: nauc_mrr_at_1_max value: 32.080433604684444 - type: nauc_mrr_at_1_std value: 10.76705379557832 - type: nauc_mrr_at_20_diff1 value: 34.05889362360272 - type: nauc_mrr_at_20_max value: 36.539902847898894 - type: nauc_mrr_at_20_std value: 18.829170969376136 - type: nauc_mrr_at_3_diff1 value: 34.661230693226 - type: nauc_mrr_at_3_max value: 35.27494037957078 - type: nauc_mrr_at_3_std value: 16.799715396839538 - type: nauc_mrr_at_5_diff1 value: 34.30568391918026 - type: nauc_mrr_at_5_max value: 36.31513238612551 - type: nauc_mrr_at_5_std value: 18.248879043938977 - type: nauc_ndcg_at_1000_diff1 value: 28.625594076978317 - type: nauc_ndcg_at_1000_max value: 39.10317925519372 - type: nauc_ndcg_at_1000_std value: 28.285055860454257 - type: nauc_ndcg_at_100_diff1 value: 27.620568325357986 - type: nauc_ndcg_at_100_max value: 34.32867733567831 - type: nauc_ndcg_at_100_std value: 25.103257804738867 - type: nauc_ndcg_at_10_diff1 value: 24.527566945282576 - type: nauc_ndcg_at_10_max value: 32.19051221282665 - type: nauc_ndcg_at_10_std value: 25.403501921327432 - type: nauc_ndcg_at_1_diff1 value: 38.95386802348185 - type: nauc_ndcg_at_1_max value: 30.134605059752644 - type: nauc_ndcg_at_1_std value: 11.904644683131 - type: nauc_ndcg_at_20_diff1 value: 25.422544698266798 - type: nauc_ndcg_at_20_max value: 31.85394200124836 - type: nauc_ndcg_at_20_std value: 26.925279769256523 - type: nauc_ndcg_at_3_diff1 value: 27.968874988258573 - type: nauc_ndcg_at_3_max value: 30.93696431950224 - type: nauc_ndcg_at_3_std value: 18.551823245893114 - type: nauc_ndcg_at_5_diff1 value: 25.722349682774233 - type: nauc_ndcg_at_5_max value: 32.29294830500251 - type: nauc_ndcg_at_5_std value: 21.309663190563718 - type: nauc_precision_at_1000_diff1 value: -7.466934392543785 - type: nauc_precision_at_1000_max value: 17.534662065944236 - type: nauc_precision_at_1000_std value: 43.86335465977071 - type: nauc_precision_at_100_diff1 value: -2.073530455550674 - type: nauc_precision_at_100_max value: 26.51626141328235 - type: nauc_precision_at_100_std value: 47.02741717034574 - type: nauc_precision_at_10_diff1 value: 6.717006995188633 - type: nauc_precision_at_10_max value: 32.738691529253494 - type: nauc_precision_at_10_std value: 35.80103442917034 - type: nauc_precision_at_1_diff1 value: 38.310753456104415 - type: nauc_precision_at_1_max value: 32.080433604684444 - type: nauc_precision_at_1_std value: 10.76705379557832 - type: nauc_precision_at_20_diff1 value: 2.745832502363386 - type: nauc_precision_at_20_max value: 30.954145690157688 - type: nauc_precision_at_20_std value: 41.74795596694651 - type: nauc_precision_at_3_diff1 value: 20.04271494210498 - type: nauc_precision_at_3_max value: 32.49798591360355 - type: nauc_precision_at_3_std value: 22.433174666547337 - type: nauc_precision_at_5_diff1 value: 13.559244763754297 - type: nauc_precision_at_5_max value: 34.29174467545541 - type: nauc_precision_at_5_std value: 27.67088510253159 - type: nauc_recall_at_1000_diff1 value: 14.406899781864585 - type: nauc_recall_at_1000_max value: 18.63293041982341 - type: nauc_recall_at_1000_std value: 14.873113563587054 - type: nauc_recall_at_100_diff1 value: 20.276630820341023 - type: nauc_recall_at_100_max value: 20.74130868375551 - type: nauc_recall_at_100_std value: 14.253807947296465 - type: nauc_recall_at_10_diff1 value: 32.131322772361194 - type: nauc_recall_at_10_max value: 21.834619003317645 - type: nauc_recall_at_10_std value: 5.111047982154726 - type: nauc_recall_at_1_diff1 value: 59.30631217950276 - type: nauc_recall_at_1_max value: 20.33548433428363 - type: nauc_recall_at_1_std value: -1.8217254079917093 - type: nauc_recall_at_20_diff1 value: 29.009526186873646 - type: nauc_recall_at_20_max value: 19.222693262075214 - type: nauc_recall_at_20_std value: 8.263428180065297 - type: nauc_recall_at_3_diff1 value: 38.428506196942266 - type: nauc_recall_at_3_max value: 18.92885903756039 - type: nauc_recall_at_3_std value: 2.2767688747391106 - type: nauc_recall_at_5_diff1 value: 35.93597428489607 - type: nauc_recall_at_5_max value: 19.591607144107787 - type: nauc_recall_at_5_std value: 2.110828447844176 - type: ndcg_at_1 value: 31.424000000000003 - type: ndcg_at_10 value: 24.267 - type: ndcg_at_100 value: 22.416 - type: ndcg_at_1000 value: 31.165 - type: ndcg_at_20 value: 22.698 - type: ndcg_at_3 value: 28.349999999999998 - type: ndcg_at_5 value: 26.596999999999998 - type: precision_at_1 value: 33.745999999999995 - type: precision_at_10 value: 18.173000000000002 - type: precision_at_100 value: 6.142 - type: precision_at_1000 value: 1.856 - type: precision_at_20 value: 13.808000000000002 - type: precision_at_3 value: 27.141 - type: precision_at_5 value: 22.91 - type: recall_at_1 value: 3.479 - type: recall_at_10 value: 10.838000000000001 - type: recall_at_100 value: 23.817 - type: recall_at_1000 value: 54.910000000000004 - type: recall_at_20 value: 14.201 - type: recall_at_3 value: 7.236 - type: recall_at_5 value: 9.003 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 19.543 - type: map_at_1 value: 8.413 - type: map_at_10 value: 15.137 - type: map_at_100 value: 16.393 - type: map_at_1000 value: 16.492 - type: map_at_20 value: 15.827 - type: map_at_3 value: 12.584999999999999 - type: map_at_5 value: 13.963000000000001 - type: mrr_at_1 value: 9.73348783314021 - type: mrr_at_10 value: 16.79895712630359 - type: mrr_at_100 value: 17.96527488497497 - type: mrr_at_1000 value: 18.049284621380956 - type: mrr_at_20 value: 17.456541969883244 - type: mrr_at_3 value: 14.2429509463113 - type: mrr_at_5 value: 15.636346079567373 - type: nauc_map_at_1000_diff1 value: 18.819971639310904 - type: nauc_map_at_1000_max value: 13.814947350680912 - type: nauc_map_at_1000_std value: 2.521914759184715 - type: nauc_map_at_100_diff1 value: 18.814255883152295 - type: nauc_map_at_100_max value: 13.784098474987728 - type: nauc_map_at_100_std value: 2.463386644603925 - type: nauc_map_at_10_diff1 value: 18.859741700546 - type: nauc_map_at_10_max value: 13.200112454161522 - type: nauc_map_at_10_std value: 1.2838729142015952 - type: nauc_map_at_1_diff1 value: 22.792911666175435 - type: nauc_map_at_1_max value: 9.420966909430586 - type: nauc_map_at_1_std value: -2.177707391834426 - type: nauc_map_at_20_diff1 value: 18.857585870077603 - type: nauc_map_at_20_max value: 13.494371000020585 - type: nauc_map_at_20_std value: 1.7987081767888724 - type: nauc_map_at_3_diff1 value: 20.3919043114244 - type: nauc_map_at_3_max value: 11.229233328712159 - type: nauc_map_at_3_std value: -0.38627708043707826 - type: nauc_map_at_5_diff1 value: 19.354241266183816 - type: nauc_map_at_5_max value: 12.050995012138287 - type: nauc_map_at_5_std value: 0.4619900683963445 - type: nauc_mrr_at_1000_diff1 value: 17.44597143162577 - type: nauc_mrr_at_1000_max value: 12.99325734801233 - type: nauc_mrr_at_1000_std value: 3.843471729334042 - type: nauc_mrr_at_100_diff1 value: 17.435646674940784 - type: nauc_mrr_at_100_max value: 12.977733602157626 - type: nauc_mrr_at_100_std value: 3.819688827654704 - type: nauc_mrr_at_10_diff1 value: 17.366258247556274 - type: nauc_mrr_at_10_max value: 12.525863095955028 - type: nauc_mrr_at_10_std value: 2.9586217333067033 - type: nauc_mrr_at_1_diff1 value: 21.181200992092933 - type: nauc_mrr_at_1_max value: 9.071174422547715 - type: nauc_mrr_at_1_std value: 0.37666341313223156 - type: nauc_mrr_at_20_diff1 value: 17.47842029246494 - type: nauc_mrr_at_20_max value: 12.782728137865854 - type: nauc_mrr_at_20_std value: 3.335207400639897 - type: nauc_mrr_at_3_diff1 value: 18.51145002403263 - type: nauc_mrr_at_3_max value: 10.835289485126742 - type: nauc_mrr_at_3_std value: 1.9317890085586098 - type: nauc_mrr_at_5_diff1 value: 17.85072852768249 - type: nauc_mrr_at_5_max value: 11.48513938150474 - type: nauc_mrr_at_5_std value: 2.42459300983239 - type: nauc_ndcg_at_1000_diff1 value: 16.90906471124972 - type: nauc_ndcg_at_1000_max value: 18.10309890125217 - type: nauc_ndcg_at_1000_std value: 9.531587494208333 - type: nauc_ndcg_at_100_diff1 value: 16.794610031459452 - type: nauc_ndcg_at_100_max value: 17.320423121617587 - type: nauc_ndcg_at_100_std value: 8.36089871892644 - type: nauc_ndcg_at_10_diff1 value: 16.9238328483549 - type: nauc_ndcg_at_10_max value: 15.003898384476175 - type: nauc_ndcg_at_10_std value: 3.220068514580869 - type: nauc_ndcg_at_1_diff1 value: 21.181200992092933 - type: nauc_ndcg_at_1_max value: 9.071174422547715 - type: nauc_ndcg_at_1_std value: 0.37666341313223156 - type: nauc_ndcg_at_20_diff1 value: 17.122783032672636 - type: nauc_ndcg_at_20_max value: 15.811529036192868 - type: nauc_ndcg_at_20_std value: 4.638881062044276 - type: nauc_ndcg_at_3_diff1 value: 19.397651629456085 - type: nauc_ndcg_at_3_max value: 11.519185092964664 - type: nauc_ndcg_at_3_std value: 0.5852664941054009 - type: nauc_ndcg_at_5_diff1 value: 17.836092374281833 - type: nauc_ndcg_at_5_max value: 12.692159310256345 - type: nauc_ndcg_at_5_std value: 1.7356004993081944 - type: nauc_precision_at_1000_diff1 value: 3.073453832047264 - type: nauc_precision_at_1000_max value: 23.790855697865958 - type: nauc_precision_at_1000_std value: 32.57511127212919 - type: nauc_precision_at_100_diff1 value: 9.127444700503846 - type: nauc_precision_at_100_max value: 22.71156118580008 - type: nauc_precision_at_100_std value: 24.63648530454141 - type: nauc_precision_at_10_diff1 value: 13.02401021030829 - type: nauc_precision_at_10_max value: 18.85263386483255 - type: nauc_precision_at_10_std value: 8.373513612599647 - type: nauc_precision_at_1_diff1 value: 21.181200992092933 - type: nauc_precision_at_1_max value: 9.071174422547715 - type: nauc_precision_at_1_std value: 0.37666341313223156 - type: nauc_precision_at_20_diff1 value: 12.975989332948448 - type: nauc_precision_at_20_max value: 20.296858370304385 - type: nauc_precision_at_20_std value: 12.119876359299383 - type: nauc_precision_at_3_diff1 value: 17.130641156396027 - type: nauc_precision_at_3_max value: 12.010571872098485 - type: nauc_precision_at_3_std value: 2.637465881798806 - type: nauc_precision_at_5_diff1 value: 14.960326184287629 - type: nauc_precision_at_5_max value: 14.264819044499205 - type: nauc_precision_at_5_std value: 4.5445140864787215 - type: nauc_recall_at_1000_diff1 value: 11.322486975456016 - type: nauc_recall_at_1000_max value: 42.74305283200241 - type: nauc_recall_at_1000_std value: 47.78794764298061 - type: nauc_recall_at_100_diff1 value: 12.242221079259041 - type: nauc_recall_at_100_max value: 26.918744103646013 - type: nauc_recall_at_100_std value: 24.541980019505186 - type: nauc_recall_at_10_diff1 value: 13.38045827515169 - type: nauc_recall_at_10_max value: 18.545456163809533 - type: nauc_recall_at_10_std value: 5.734945625849404 - type: nauc_recall_at_1_diff1 value: 22.792911666175435 - type: nauc_recall_at_1_max value: 9.420966909430586 - type: nauc_recall_at_1_std value: -2.177707391834426 - type: nauc_recall_at_20_diff1 value: 14.133329746281683 - type: nauc_recall_at_20_max value: 20.394153554260118 - type: nauc_recall_at_20_std value: 9.229321407977622 - type: nauc_recall_at_3_diff1 value: 18.230047011254864 - type: nauc_recall_at_3_max value: 12.217461047044784 - type: nauc_recall_at_3_std value: 1.0395060720237228 - type: nauc_recall_at_5_diff1 value: 14.947190921163273 - type: nauc_recall_at_5_max value: 13.844816353548604 - type: nauc_recall_at_5_std value: 2.9621844586841086 - type: ndcg_at_1 value: 9.733 - type: ndcg_at_10 value: 19.543 - type: ndcg_at_100 value: 25.965 - type: ndcg_at_1000 value: 28.663 - type: ndcg_at_20 value: 21.985 - type: ndcg_at_3 value: 14.308000000000002 - type: ndcg_at_5 value: 16.771 - type: precision_at_1 value: 9.733 - type: precision_at_10 value: 3.7249999999999996 - type: precision_at_100 value: 0.739 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 2.4330000000000003 - type: precision_at_3 value: 6.856 - type: precision_at_5 value: 5.475 - type: recall_at_1 value: 8.413 - type: recall_at_10 value: 31.668000000000003 - type: recall_at_100 value: 61.551 - type: recall_at_1000 value: 82.228 - type: recall_at_20 value: 40.888999999999996 - type: recall_at_3 value: 17.669 - type: recall_at_5 value: 23.488999999999997 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 80.598 - type: map_at_1 value: 63.532 - type: map_at_10 value: 76.07300000000001 - type: map_at_100 value: 76.863 - type: map_at_1000 value: 76.896 - type: map_at_20 value: 76.575 - type: map_at_3 value: 73.075 - type: map_at_5 value: 74.888 - type: mrr_at_1 value: 73.11 - type: mrr_at_10 value: 80.13760714285678 - type: mrr_at_100 value: 80.40676931635143 - type: mrr_at_1000 value: 80.413857041773 - type: mrr_at_20 value: 80.33569450368124 - type: mrr_at_3 value: 78.73166666666627 - type: mrr_at_5 value: 79.60316666666607 - type: nauc_map_at_1000_diff1 value: 71.76748518946404 - type: nauc_map_at_1000_max value: 37.52091562623074 - type: nauc_map_at_1000_std value: -19.886772833711106 - type: nauc_map_at_100_diff1 value: 71.77392469494623 - type: nauc_map_at_100_max value: 37.51305402355471 - type: nauc_map_at_100_std value: -19.90950133564633 - type: nauc_map_at_10_diff1 value: 71.78435718469383 - type: nauc_map_at_10_max value: 37.12859151143304 - type: nauc_map_at_10_std value: -20.6727975668906 - type: nauc_map_at_1_diff1 value: 74.16329762399023 - type: nauc_map_at_1_max value: 30.710315707498864 - type: nauc_map_at_1_std value: -19.3193474040897 - type: nauc_map_at_20_diff1 value: 71.8048608565351 - type: nauc_map_at_20_max value: 37.437936254957336 - type: nauc_map_at_20_std value: -20.256332267213164 - type: nauc_map_at_3_diff1 value: 72.15934361454754 - type: nauc_map_at_3_max value: 35.34630080626579 - type: nauc_map_at_3_std value: -22.03571060362441 - type: nauc_map_at_5_diff1 value: 71.83699898564598 - type: nauc_map_at_5_max value: 36.479498983192975 - type: nauc_map_at_5_std value: -21.231304270451062 - type: nauc_mrr_at_1000_diff1 value: 72.88897169606878 - type: nauc_mrr_at_1000_max value: 40.200221349285634 - type: nauc_mrr_at_1000_std value: -17.633375591506123 - type: nauc_mrr_at_100_diff1 value: 72.88918562563104 - type: nauc_mrr_at_100_max value: 40.20508375617468 - type: nauc_mrr_at_100_std value: -17.62754237516005 - type: nauc_mrr_at_10_diff1 value: 72.78722143722388 - type: nauc_mrr_at_10_max value: 40.26493516347653 - type: nauc_mrr_at_10_std value: -17.591516046092213 - type: nauc_mrr_at_1_diff1 value: 74.20323111992924 - type: nauc_mrr_at_1_max value: 39.1888925247388 - type: nauc_mrr_at_1_std value: -17.041083591080856 - type: nauc_mrr_at_20_diff1 value: 72.87614719969847 - type: nauc_mrr_at_20_max value: 40.25187245577547 - type: nauc_mrr_at_20_std value: -17.623643078270213 - type: nauc_mrr_at_3_diff1 value: 72.70424133205663 - type: nauc_mrr_at_3_max value: 40.015103745774944 - type: nauc_mrr_at_3_std value: -18.296912082298693 - type: nauc_mrr_at_5_diff1 value: 72.6695462203408 - type: nauc_mrr_at_5_max value: 40.166677547198724 - type: nauc_mrr_at_5_std value: -17.836669429879553 - type: nauc_ndcg_at_1000_diff1 value: 71.7014600627096 - type: nauc_ndcg_at_1000_max value: 39.17528447849729 - type: nauc_ndcg_at_1000_std value: -18.169144412803025 - type: nauc_ndcg_at_100_diff1 value: 71.72812292491562 - type: nauc_ndcg_at_100_max value: 39.178065817466866 - type: nauc_ndcg_at_100_std value: -17.98857148420824 - type: nauc_ndcg_at_10_diff1 value: 71.22490342106018 - type: nauc_ndcg_at_10_max value: 38.58976910658222 - type: nauc_ndcg_at_10_std value: -19.3807889122846 - type: nauc_ndcg_at_1_diff1 value: 74.20323111992924 - type: nauc_ndcg_at_1_max value: 39.18366557965937 - type: nauc_ndcg_at_1_std value: -16.979563433712343 - type: nauc_ndcg_at_20_diff1 value: 71.59416957115776 - type: nauc_ndcg_at_20_max value: 39.11048553178983 - type: nauc_ndcg_at_20_std value: -18.913452979338476 - type: nauc_ndcg_at_3_diff1 value: 71.15596154191027 - type: nauc_ndcg_at_3_max value: 37.36564154714553 - type: nauc_ndcg_at_3_std value: -20.721815190390565 - type: nauc_ndcg_at_5_diff1 value: 71.0047395584928 - type: nauc_ndcg_at_5_max value: 37.95479899642812 - type: nauc_ndcg_at_5_std value: -20.008045920279887 - type: nauc_precision_at_1000_diff1 value: -36.79287717727177 - type: nauc_precision_at_1000_max value: -4.853042765778535 - type: nauc_precision_at_1000_std value: 21.89700327903914 - type: nauc_precision_at_100_diff1 value: -33.803566917391024 - type: nauc_precision_at_100_max value: -2.343501157957199 - type: nauc_precision_at_100_std value: 21.03134251148425 - type: nauc_precision_at_10_diff1 value: -19.647078935128047 - type: nauc_precision_at_10_max value: 7.646163968592671 - type: nauc_precision_at_10_std value: 11.425640109742039 - type: nauc_precision_at_1_diff1 value: 74.20323111992924 - type: nauc_precision_at_1_max value: 39.18366557965937 - type: nauc_precision_at_1_std value: -16.979563433712343 - type: nauc_precision_at_20_diff1 value: -26.95360783576433 - type: nauc_precision_at_20_max value: 3.534889652498316 - type: nauc_precision_at_20_std value: 16.011941126119197 - type: nauc_precision_at_3_diff1 value: 7.80806721613657 - type: nauc_precision_at_3_max value: 18.93471456458755 - type: nauc_precision_at_3_std value: -2.3471793824170493 - type: nauc_precision_at_5_diff1 value: -7.187077136844068 - type: nauc_precision_at_5_max value: 13.710196203710806 - type: nauc_precision_at_5_std value: 5.029517000064198 - type: nauc_recall_at_1000_diff1 value: 55.29138658386572 - type: nauc_recall_at_1000_max value: 57.58368141138265 - type: nauc_recall_at_1000_std value: 33.353499745829765 - type: nauc_recall_at_100_diff1 value: 65.98407378542676 - type: nauc_recall_at_100_max value: 43.3437006049648 - type: nauc_recall_at_100_std value: 3.7556643837275345 - type: nauc_recall_at_10_diff1 value: 64.73552843826317 - type: nauc_recall_at_10_max value: 37.93061567923699 - type: nauc_recall_at_10_std value: -19.1098323242707 - type: nauc_recall_at_1_diff1 value: 74.16329762399023 - type: nauc_recall_at_1_max value: 30.710315707498864 - type: nauc_recall_at_1_std value: -19.3193474040897 - type: nauc_recall_at_20_diff1 value: 64.4507396763554 - type: nauc_recall_at_20_max value: 40.62914458603293 - type: nauc_recall_at_20_std value: -15.040711675139082 - type: nauc_recall_at_3_diff1 value: 67.8143518137102 - type: nauc_recall_at_3_max value: 33.649275891159945 - type: nauc_recall_at_3_std value: -24.400275123272163 - type: nauc_recall_at_5_diff1 value: 65.9405683463817 - type: nauc_recall_at_5_max value: 35.64051201738537 - type: nauc_recall_at_5_std value: -22.06335424061329 - type: ndcg_at_1 value: 73.11 - type: ndcg_at_10 value: 80.598 - type: ndcg_at_100 value: 82.75200000000001 - type: ndcg_at_1000 value: 83.145 - type: ndcg_at_20 value: 81.71300000000001 - type: ndcg_at_3 value: 77.025 - type: ndcg_at_5 value: 78.85 - type: precision_at_1 value: 73.11 - type: precision_at_10 value: 12.206999999999999 - type: precision_at_100 value: 1.459 - type: precision_at_1000 value: 0.155 - type: precision_at_20 value: 6.579 - type: precision_at_3 value: 33.36 - type: precision_at_5 value: 22.09 - type: recall_at_1 value: 63.532 - type: recall_at_10 value: 89.32600000000001 - type: recall_at_100 value: 97.35000000000001 - type: recall_at_1000 value: 99.613 - type: recall_at_20 value: 93.151 - type: recall_at_3 value: 79.074 - type: recall_at_5 value: 84.143 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 39.5465127563479 - type: v_measure value: 39.5465127563479 - type: v_measure_std value: 5.038703300031419 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 47.07911795189491 - type: v_measure value: 47.07911795189491 - type: v_measure_std value: 11.546436135362846 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 12.386999999999999 - type: map_at_1 value: 3.053 - type: map_at_10 value: 6.912999999999999 - type: map_at_100 value: 8.261000000000001 - type: map_at_1000 value: 8.530999999999999 - type: map_at_20 value: 7.566000000000001 - type: map_at_3 value: 5.094 - type: map_at_5 value: 5.997 - type: mrr_at_1 value: 15.0 - type: mrr_at_10 value: 22.795357142857135 - type: mrr_at_100 value: 24.007787966055577 - type: mrr_at_1000 value: 24.09964360060081 - type: mrr_at_20 value: 23.466190383404 - type: mrr_at_3 value: 20.100000000000012 - type: mrr_at_5 value: 21.685000000000006 - type: nauc_map_at_1000_diff1 value: 11.73412101608325 - type: nauc_map_at_1000_max value: 14.330449150895694 - type: nauc_map_at_1000_std value: 15.742095990011743 - type: nauc_map_at_100_diff1 value: 11.777038848684697 - type: nauc_map_at_100_max value: 14.104140826193404 - type: nauc_map_at_100_std value: 15.155771699462264 - type: nauc_map_at_10_diff1 value: 12.374060330916672 - type: nauc_map_at_10_max value: 11.856630361520313 - type: nauc_map_at_10_std value: 11.753665232073269 - type: nauc_map_at_1_diff1 value: 16.986085327339335 - type: nauc_map_at_1_max value: 12.246255844992572 - type: nauc_map_at_1_std value: 7.863450169503143 - type: nauc_map_at_20_diff1 value: 11.634858111388464 - type: nauc_map_at_20_max value: 13.108008262696513 - type: nauc_map_at_20_std value: 13.423455469499999 - type: nauc_map_at_3_diff1 value: 14.889445454705324 - type: nauc_map_at_3_max value: 11.572110481390013 - type: nauc_map_at_3_std value: 8.556136010622351 - type: nauc_map_at_5_diff1 value: 12.907309838627985 - type: nauc_map_at_5_max value: 11.000220583694968 - type: nauc_map_at_5_std value: 10.111376166991917 - type: nauc_mrr_at_1000_diff1 value: 14.963874100415397 - type: nauc_mrr_at_1000_max value: 13.495160823256164 - type: nauc_mrr_at_1000_std value: 11.28815345444998 - type: nauc_mrr_at_100_diff1 value: 14.97621893176082 - type: nauc_mrr_at_100_max value: 13.464936280105155 - type: nauc_mrr_at_100_std value: 11.305521958378108 - type: nauc_mrr_at_10_diff1 value: 14.956869421525884 - type: nauc_mrr_at_10_max value: 13.425685629657924 - type: nauc_mrr_at_10_std value: 10.767260180262618 - type: nauc_mrr_at_1_diff1 value: 16.83378691664147 - type: nauc_mrr_at_1_max value: 12.112287067835906 - type: nauc_mrr_at_1_std value: 8.418304606390475 - type: nauc_mrr_at_20_diff1 value: 14.917032940839656 - type: nauc_mrr_at_20_max value: 13.41755983642966 - type: nauc_mrr_at_20_std value: 11.11458079038555 - type: nauc_mrr_at_3_diff1 value: 15.214496970273089 - type: nauc_mrr_at_3_max value: 12.165871395179483 - type: nauc_mrr_at_3_std value: 9.980162064503286 - type: nauc_mrr_at_5_diff1 value: 14.835204244776087 - type: nauc_mrr_at_5_max value: 12.524956858818742 - type: nauc_mrr_at_5_std value: 10.099655249800849 - type: nauc_ndcg_at_1000_diff1 value: 10.764737128236437 - type: nauc_ndcg_at_1000_max value: 18.3469700109834 - type: nauc_ndcg_at_1000_std value: 23.22837765426608 - type: nauc_ndcg_at_100_diff1 value: 11.606245579895573 - type: nauc_ndcg_at_100_max value: 17.167157579603412 - type: nauc_ndcg_at_100_std value: 20.347909657378473 - type: nauc_ndcg_at_10_diff1 value: 12.394040285590439 - type: nauc_ndcg_at_10_max value: 13.388439287974505 - type: nauc_ndcg_at_10_std value: 13.188024533529397 - type: nauc_ndcg_at_1_diff1 value: 16.83378691664147 - type: nauc_ndcg_at_1_max value: 12.112287067835906 - type: nauc_ndcg_at_1_std value: 8.418304606390475 - type: nauc_ndcg_at_20_diff1 value: 11.212784095325706 - type: nauc_ndcg_at_20_max value: 15.185332617097233 - type: nauc_ndcg_at_20_std value: 16.087050160363443 - type: nauc_ndcg_at_3_diff1 value: 14.708471591387005 - type: nauc_ndcg_at_3_max value: 11.70756510699363 - type: nauc_ndcg_at_3_std value: 9.658612404132116 - type: nauc_ndcg_at_5_diff1 value: 13.123868466784149 - type: nauc_ndcg_at_5_max value: 11.60382600862464 - type: nauc_ndcg_at_5_std value: 10.625775061954277 - type: nauc_precision_at_1000_diff1 value: 3.608251418490512 - type: nauc_precision_at_1000_max value: 20.501537930519582 - type: nauc_precision_at_1000_std value: 34.4770607840569 - type: nauc_precision_at_100_diff1 value: 7.864853652134883 - type: nauc_precision_at_100_max value: 19.894334894038547 - type: nauc_precision_at_100_std value: 28.711783183330663 - type: nauc_precision_at_10_diff1 value: 9.605214553552692 - type: nauc_precision_at_10_max value: 14.347596155123817 - type: nauc_precision_at_10_std value: 16.242794843380032 - type: nauc_precision_at_1_diff1 value: 16.83378691664147 - type: nauc_precision_at_1_max value: 12.112287067835906 - type: nauc_precision_at_1_std value: 8.418304606390475 - type: nauc_precision_at_20_diff1 value: 6.9964985542924545 - type: nauc_precision_at_20_max value: 17.275243538199216 - type: nauc_precision_at_20_std value: 20.986245055691036 - type: nauc_precision_at_3_diff1 value: 13.995705983866177 - type: nauc_precision_at_3_max value: 11.391320470301181 - type: nauc_precision_at_3_std value: 10.151716783634907 - type: nauc_precision_at_5_diff1 value: 11.064867165700008 - type: nauc_precision_at_5_max value: 10.965289810519257 - type: nauc_precision_at_5_std value: 11.837752544253021 - type: nauc_recall_at_1000_diff1 value: 3.4118402840027118 - type: nauc_recall_at_1000_max value: 21.505334337938027 - type: nauc_recall_at_1000_std value: 34.87205826061254 - type: nauc_recall_at_100_diff1 value: 7.793188645900735 - type: nauc_recall_at_100_max value: 20.09269964020807 - type: nauc_recall_at_100_std value: 28.838050639358375 - type: nauc_recall_at_10_diff1 value: 10.010288074812564 - type: nauc_recall_at_10_max value: 14.470333599080465 - type: nauc_recall_at_10_std value: 16.106977670704044 - type: nauc_recall_at_1_diff1 value: 16.986085327339335 - type: nauc_recall_at_1_max value: 12.246255844992572 - type: nauc_recall_at_1_std value: 7.863450169503143 - type: nauc_recall_at_20_diff1 value: 7.248991485381231 - type: nauc_recall_at_20_max value: 17.357162157871585 - type: nauc_recall_at_20_std value: 20.916649810908385 - type: nauc_recall_at_3_diff1 value: 14.190312777099356 - type: nauc_recall_at_3_max value: 11.494013846579504 - type: nauc_recall_at_3_std value: 9.871734511413411 - type: nauc_recall_at_5_diff1 value: 11.369318015463497 - type: nauc_recall_at_5_max value: 11.0867249382338 - type: nauc_recall_at_5_std value: 11.565786080587733 - type: ndcg_at_1 value: 15.0 - type: ndcg_at_10 value: 12.386999999999999 - type: ndcg_at_100 value: 18.533 - type: ndcg_at_1000 value: 23.955000000000002 - type: ndcg_at_20 value: 14.459 - type: ndcg_at_3 value: 11.75 - type: ndcg_at_5 value: 10.285 - type: precision_at_1 value: 15.0 - type: precision_at_10 value: 6.36 - type: precision_at_100 value: 1.528 - type: precision_at_1000 value: 0.28300000000000003 - type: precision_at_20 value: 4.375 - type: precision_at_3 value: 10.767 - type: precision_at_5 value: 8.9 - type: recall_at_1 value: 3.053 - type: recall_at_10 value: 12.873000000000001 - type: recall_at_100 value: 30.982 - type: recall_at_1000 value: 57.489999999999995 - type: recall_at_20 value: 17.718 - type: recall_at_3 value: 6.553000000000001 - type: recall_at_5 value: 9.013 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 75.67336823619708 - type: cosine_spearman value: 64.6753400763881 - type: euclidean_pearson value: 69.13481550039579 - type: euclidean_spearman value: 64.6752133161514 - type: main_score value: 64.6753400763881 - type: manhattan_pearson value: 69.01619023671678 - type: manhattan_spearman value: 64.8728231074179 - type: pearson value: 75.67336823619708 - type: spearman value: 64.6753400763881 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 72.06681927996405 - type: cosine_spearman value: 62.248985055530525 - type: euclidean_pearson value: 68.05815981894538 - type: euclidean_spearman value: 62.248985055530525 - type: main_score value: 62.248985055530525 - type: manhattan_pearson value: 66.68543185400786 - type: manhattan_spearman value: 61.43850654925033 - type: pearson value: 72.06681927996405 - type: spearman value: 62.248985055530525 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 76.53983680018591 - type: cosine_spearman value: 77.27600787572996 - type: euclidean_pearson value: 76.77960647262235 - type: euclidean_spearman value: 77.27600787572996 - type: main_score value: 77.27600787572996 - type: manhattan_pearson value: 76.37651436440808 - type: manhattan_spearman value: 76.85568457177312 - type: pearson value: 76.53983680018591 - type: spearman value: 77.27600787572996 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 76.20854411766629 - type: cosine_spearman value: 71.914099628002 - type: euclidean_pearson value: 74.5273047891339 - type: euclidean_spearman value: 71.914099628002 - type: main_score value: 71.914099628002 - type: manhattan_pearson value: 74.53275458017302 - type: manhattan_spearman value: 71.9720930787841 - type: pearson value: 76.20854411766629 - type: spearman value: 71.914099628002 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 79.24273419832653 - type: cosine_spearman value: 79.75345871163103 - type: euclidean_pearson value: 79.31395801169265 - type: euclidean_spearman value: 79.75345871163103 - type: main_score value: 79.75345871163103 - type: manhattan_pearson value: 79.24199238927697 - type: manhattan_spearman value: 79.64058599210834 - type: pearson value: 79.24273419832653 - type: spearman value: 79.75345871163103 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 75.64452330127995 - type: cosine_spearman value: 76.26343823222666 - type: euclidean_pearson value: 75.64112047932008 - type: euclidean_spearman value: 76.26343823222666 - type: main_score value: 76.26343823222666 - type: manhattan_pearson value: 75.32718809126764 - type: manhattan_spearman value: 75.9420892784719 - type: pearson value: 75.64452330127995 - type: spearman value: 76.26343823222666 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 17.52217310066287 - type: cosine_spearman value: 14.729958484232528 - type: euclidean_pearson value: 17.507234354096582 - type: euclidean_spearman value: 14.729958484232528 - type: main_score value: 14.729958484232528 - type: manhattan_pearson value: 15.286020788097272 - type: manhattan_spearman value: 11.320242312589713 - type: pearson value: 17.52217310066287 - type: spearman value: 14.729958484232528 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 84.67406984717113 - type: cosine_spearman value: 85.96709815630739 - type: euclidean_pearson value: 84.7186375682207 - type: euclidean_spearman value: 85.96709815630739 - type: main_score value: 85.96709815630739 - type: manhattan_pearson value: 85.07894758059129 - type: manhattan_spearman value: 86.57110045700985 - type: pearson value: 84.67406984717113 - type: spearman value: 85.96709815630739 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 36.02331692863771 - type: cosine_spearman value: 34.28540470062557 - type: euclidean_pearson value: 35.996881386631514 - type: euclidean_spearman value: 34.28540470062557 - type: main_score value: 34.28540470062557 - type: manhattan_pearson value: 35.47246063445784 - type: manhattan_spearman value: 34.83247787211397 - type: pearson value: 36.02331692863771 - type: spearman value: 34.28540470062557 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 13.925983981770388 - type: cosine_spearman value: 11.193291331109325 - type: euclidean_pearson value: 13.9151651239108 - type: euclidean_spearman value: 11.193291331109325 - type: main_score value: 11.193291331109325 - type: manhattan_pearson value: 12.652407957594654 - type: manhattan_spearman value: 9.888358907769014 - type: pearson value: 13.925983981770388 - type: spearman value: 11.193291331109325 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 26.77839285232968 - type: cosine_spearman value: 23.010015986939717 - type: euclidean_pearson value: 27.13668235790385 - type: euclidean_spearman value: 23.010015986939717 - type: main_score value: 23.010015986939717 - type: manhattan_pearson value: 27.02698710744775 - type: manhattan_spearman value: 23.038730409304936 - type: pearson value: 26.77839285232968 - type: spearman value: 23.010015986939717 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 25.330935194314364 - type: cosine_spearman value: 23.143555348782797 - type: euclidean_pearson value: 24.670147594978143 - type: euclidean_spearman value: 23.143555348782797 - type: main_score value: 23.143555348782797 - type: manhattan_pearson value: 24.879695698914418 - type: manhattan_spearman value: 25.916630507885134 - type: pearson value: 25.330935194314364 - type: spearman value: 23.143555348782797 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 6.61651078645899 - type: cosine_spearman value: 5.415104433010482 - type: euclidean_pearson value: 6.791575957480809 - type: euclidean_spearman value: 5.415104433010482 - type: main_score value: 5.415104433010482 - type: manhattan_pearson value: 3.6585407382250987 - type: manhattan_spearman value: 4.566044103659472 - type: pearson value: 6.61651078645899 - type: spearman value: 5.415104433010482 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 32.718045784523184 - type: cosine_spearman value: 27.52844368619317 - type: euclidean_pearson value: 32.98978359596458 - type: euclidean_spearman value: 27.52844368619317 - type: main_score value: 27.52844368619317 - type: manhattan_pearson value: 35.57923949366344 - type: manhattan_spearman value: 34.27137422651138 - type: pearson value: 32.718045784523184 - type: spearman value: 27.52844368619317 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 9.98410299881163 - type: cosine_spearman value: 10.98684405086525 - type: euclidean_pearson value: 9.461680781495218 - type: euclidean_spearman value: 10.9925413190658 - type: main_score value: 10.98684405086525 - type: manhattan_pearson value: 9.442055271895944 - type: manhattan_spearman value: 11.226101908391069 - type: pearson value: 9.98410299881163 - type: spearman value: 10.98684405086525 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 59.3180680265132 - type: cosine_spearman value: 63.07956002739231 - type: euclidean_pearson value: 62.46424835000928 - type: euclidean_spearman value: 63.07956002739231 - type: main_score value: 63.07956002739231 - type: manhattan_pearson value: 62.048137683643766 - type: manhattan_spearman value: 61.83898606879604 - type: pearson value: 59.3180680265132 - type: spearman value: 63.07956002739231 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 29.061215770374826 - type: cosine_spearman value: 36.21441725938738 - type: euclidean_pearson value: 28.44045530150387 - type: euclidean_spearman value: 36.21441725938738 - type: main_score value: 36.21441725938738 - type: manhattan_pearson value: 29.32403221599612 - type: manhattan_spearman value: 38.914481153396494 - type: pearson value: 29.061215770374826 - type: spearman value: 36.21441725938738 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 11.266385865086239 - type: cosine_spearman value: 17.291293843893733 - type: euclidean_pearson value: 10.045897285683115 - type: euclidean_spearman value: 17.321323804048646 - type: main_score value: 17.291293843893733 - type: manhattan_pearson value: 15.333482209624194 - type: manhattan_spearman value: 20.399166731513915 - type: pearson value: 11.266385865086239 - type: spearman value: 17.291293843893733 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 9.647587208410648 - type: cosine_spearman value: 21.33739699413266 - type: euclidean_pearson value: 7.451981822243237 - type: euclidean_spearman value: 21.33739699413266 - type: main_score value: 21.33739699413266 - type: manhattan_pearson value: 10.05280275870948 - type: manhattan_spearman value: 22.233400969472218 - type: pearson value: 9.647587208410648 - type: spearman value: 21.33739699413266 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 77.2598255013409 - type: cosine_spearman value: 75.40519061413276 - type: euclidean_pearson value: 77.19878276657876 - type: euclidean_spearman value: 75.40519061413276 - type: main_score value: 75.40519061413276 - type: manhattan_pearson value: 77.04099640594512 - type: manhattan_spearman value: 75.32219501493076 - type: pearson value: 77.2598255013409 - type: spearman value: 75.40519061413276 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 72.10127087089839 - type: map value: 72.10127087089839 - type: mrr value: 90.62288020621355 - type: nAUC_map_diff1 value: 8.726677558277695 - type: nAUC_map_max value: 54.59636736704295 - type: nAUC_map_std value: 67.36367052533402 - type: nAUC_mrr_diff1 value: 47.77588337162405 - type: nAUC_mrr_max value: 74.90946175462605 - type: nAUC_mrr_std value: 71.81332269641806 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 50.63999999999999 - type: map_at_1 value: 35.5 - type: map_at_10 value: 45.238 - type: map_at_100 value: 46.135999999999996 - type: map_at_1000 value: 46.181 - type: map_at_20 value: 45.767 - type: map_at_3 value: 42.329 - type: map_at_5 value: 44.054 - type: mrr_at_1 value: 37.666666666666664 - type: mrr_at_10 value: 46.6611111111111 - type: mrr_at_100 value: 47.37819687814183 - type: mrr_at_1000 value: 47.417644921595766 - type: mrr_at_20 value: 47.06856780130773 - type: mrr_at_3 value: 43.94444444444443 - type: mrr_at_5 value: 45.52777777777777 - type: nauc_map_at_1000_diff1 value: 52.83081390161976 - type: nauc_map_at_1000_max value: 37.21621852995913 - type: nauc_map_at_1000_std value: -3.416369626271914 - type: nauc_map_at_100_diff1 value: 52.823502489139884 - type: nauc_map_at_100_max value: 37.2435733087758 - type: nauc_map_at_100_std value: -3.376708460074628 - type: nauc_map_at_10_diff1 value: 52.495695868970785 - type: nauc_map_at_10_max value: 36.79244353087952 - type: nauc_map_at_10_std value: -3.998841918813238 - type: nauc_map_at_1_diff1 value: 55.20714819661926 - type: nauc_map_at_1_max value: 33.68583272500883 - type: nauc_map_at_1_std value: -7.806502386166579 - type: nauc_map_at_20_diff1 value: 52.82557233788675 - type: nauc_map_at_20_max value: 37.02532534485883 - type: nauc_map_at_20_std value: -3.6962702134516126 - type: nauc_map_at_3_diff1 value: 53.005833884053054 - type: nauc_map_at_3_max value: 35.102473883265056 - type: nauc_map_at_3_std value: -6.237364868462919 - type: nauc_map_at_5_diff1 value: 52.67151253564545 - type: nauc_map_at_5_max value: 36.083416260083574 - type: nauc_map_at_5_std value: -4.7023113318143785 - type: nauc_mrr_at_1000_diff1 value: 52.938698102997094 - type: nauc_mrr_at_1000_max value: 39.46705187537523 - type: nauc_mrr_at_1000_std value: 0.6163818152860598 - type: nauc_mrr_at_100_diff1 value: 52.93491193041612 - type: nauc_mrr_at_100_max value: 39.490426719059165 - type: nauc_mrr_at_100_std value: 0.6662007971949842 - type: nauc_mrr_at_10_diff1 value: 52.70216069864656 - type: nauc_mrr_at_10_max value: 39.52193808791504 - type: nauc_mrr_at_10_std value: 0.536595037291294 - type: nauc_mrr_at_1_diff1 value: 55.77100806609076 - type: nauc_mrr_at_1_max value: 37.966164940491446 - type: nauc_mrr_at_1_std value: -2.1074234936282537 - type: nauc_mrr_at_20_diff1 value: 52.942136130524986 - type: nauc_mrr_at_20_max value: 39.42716448302782 - type: nauc_mrr_at_20_std value: 0.5472281187662744 - type: nauc_mrr_at_3_diff1 value: 53.144295072591206 - type: nauc_mrr_at_3_max value: 38.05294316134295 - type: nauc_mrr_at_3_std value: -1.2360608664776096 - type: nauc_mrr_at_5_diff1 value: 52.789220500594205 - type: nauc_mrr_at_5_max value: 38.83395427252616 - type: nauc_mrr_at_5_std value: -0.09099470685601964 - type: nauc_ndcg_at_1000_diff1 value: 52.16867590195915 - type: nauc_ndcg_at_1000_max value: 39.70115643730131 - type: nauc_ndcg_at_1000_std value: 0.904258507053096 - type: nauc_ndcg_at_100_diff1 value: 51.87328245345757 - type: nauc_ndcg_at_100_max value: 40.59055338026654 - type: nauc_ndcg_at_100_std value: 2.554356951645788 - type: nauc_ndcg_at_10_diff1 value: 50.809281234563805 - type: nauc_ndcg_at_10_max value: 39.085094925973245 - type: nauc_ndcg_at_10_std value: -0.23387754671232033 - type: nauc_ndcg_at_1_diff1 value: 55.77100806609076 - type: nauc_ndcg_at_1_max value: 37.966164940491446 - type: nauc_ndcg_at_1_std value: -2.1074234936282537 - type: nauc_ndcg_at_20_diff1 value: 51.74864887078553 - type: nauc_ndcg_at_20_max value: 39.32033115509482 - type: nauc_ndcg_at_20_std value: 0.4346356935494506 - type: nauc_ndcg_at_3_diff1 value: 51.9909705702443 - type: nauc_ndcg_at_3_max value: 36.078476037019094 - type: nauc_ndcg_at_3_std value: -4.014502363911228 - type: nauc_ndcg_at_5_diff1 value: 51.312788955634325 - type: nauc_ndcg_at_5_max value: 37.54290824294073 - type: nauc_ndcg_at_5_std value: -1.8169251273098448 - type: nauc_precision_at_1000_diff1 value: 1.4596703970072096 - type: nauc_precision_at_1000_max value: 36.408552907408 - type: nauc_precision_at_1000_std value: 53.892991905053776 - type: nauc_precision_at_100_diff1 value: 17.90829681479967 - type: nauc_precision_at_100_max value: 50.02058762977557 - type: nauc_precision_at_100_std value: 50.95242296795188 - type: nauc_precision_at_10_diff1 value: 33.69533492770854 - type: nauc_precision_at_10_max value: 47.554637845938025 - type: nauc_precision_at_10_std value: 21.812883074791838 - type: nauc_precision_at_1_diff1 value: 55.77100806609076 - type: nauc_precision_at_1_max value: 37.966164940491446 - type: nauc_precision_at_1_std value: -2.1074234936282537 - type: nauc_precision_at_20_diff1 value: 31.797703948512723 - type: nauc_precision_at_20_max value: 46.94077230822751 - type: nauc_precision_at_20_std value: 29.525569664289396 - type: nauc_precision_at_3_diff1 value: 41.753151429999456 - type: nauc_precision_at_3_max value: 38.30163209243931 - type: nauc_precision_at_3_std value: 6.19935377482869 - type: nauc_precision_at_5_diff1 value: 38.479320931912575 - type: nauc_precision_at_5_max value: 41.576866734894516 - type: nauc_precision_at_5_std value: 13.327714566652604 - type: nauc_recall_at_1000_diff1 value: 50.28923446773287 - type: nauc_recall_at_1000_max value: 68.29528746364413 - type: nauc_recall_at_1000_std value: 48.2313231806132 - type: nauc_recall_at_100_diff1 value: 46.22085619290839 - type: nauc_recall_at_100_max value: 61.60933703216747 - type: nauc_recall_at_100_std value: 42.210649980610896 - type: nauc_recall_at_10_diff1 value: 43.10485234893865 - type: nauc_recall_at_10_max value: 43.06779802776641 - type: nauc_recall_at_10_std value: 8.272818985431385 - type: nauc_recall_at_1_diff1 value: 55.20714819661926 - type: nauc_recall_at_1_max value: 33.68583272500883 - type: nauc_recall_at_1_std value: -7.806502386166579 - type: nauc_recall_at_20_diff1 value: 46.850902149595036 - type: nauc_recall_at_20_max value: 44.58623368637416 - type: nauc_recall_at_20_std value: 11.890054420031708 - type: nauc_recall_at_3_diff1 value: 48.80301236823221 - type: nauc_recall_at_3_max value: 34.177890037375 - type: nauc_recall_at_3_std value: -3.852215004054359 - type: nauc_recall_at_5_diff1 value: 46.206941308622056 - type: nauc_recall_at_5_max value: 38.61994260176494 - type: nauc_recall_at_5_std value: 2.735469769782116 - type: ndcg_at_1 value: 37.667 - type: ndcg_at_10 value: 50.63999999999999 - type: ndcg_at_100 value: 54.885 - type: ndcg_at_1000 value: 56.274 - type: ndcg_at_20 value: 52.349000000000004 - type: ndcg_at_3 value: 44.891999999999996 - type: ndcg_at_5 value: 47.788000000000004 - type: precision_at_1 value: 37.667 - type: precision_at_10 value: 7.3 - type: precision_at_100 value: 0.97 - type: precision_at_1000 value: 0.11 - type: precision_at_20 value: 4.067 - type: precision_at_3 value: 18.333 - type: precision_at_5 value: 12.6 - type: recall_at_1 value: 35.5 - type: recall_at_10 value: 66.178 - type: recall_at_100 value: 85.9 - type: recall_at_1000 value: 97.1 - type: recall_at_20 value: 72.60600000000001 - type: recall_at_3 value: 50.306 - type: recall_at_5 value: 57.443999999999996 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.71386138613862 - type: cosine_accuracy_threshold value: 78.56961662426235 - type: cosine_ap value: 90.20131927652946 - type: cosine_f1 value: 84.7749114820435 - type: cosine_f1_threshold value: 75.7768544371973 - type: cosine_precision value: 85.7727737973388 - type: cosine_recall value: 83.8 - type: dot_accuracy value: 99.71386138613862 - type: dot_accuracy_threshold value: 78.56961780669964 - type: dot_ap value: 90.20131927652946 - type: dot_f1 value: 84.7749114820435 - type: dot_f1_threshold value: 75.77685228378391 - type: dot_precision value: 85.7727737973388 - type: dot_recall value: 83.8 - type: euclidean_accuracy value: 99.71386138613862 - type: euclidean_accuracy_threshold value: 65.46813529720524 - type: euclidean_ap value: 90.20131927652946 - type: euclidean_f1 value: 84.7749114820435 - type: euclidean_f1_threshold value: 69.60336608830053 - type: euclidean_precision value: 85.7727737973388 - type: euclidean_recall value: 83.8 - type: main_score value: 90.20131927652946 - type: manhattan_accuracy value: 99.7059405940594 - type: manhattan_accuracy_threshold value: 804.8100425289704 - type: manhattan_ap value: 90.00682250828237 - type: manhattan_f1 value: 84.44211629125196 - type: manhattan_f1_threshold value: 828.8486447498144 - type: manhattan_precision value: 88.66886688668868 - type: manhattan_recall value: 80.60000000000001 - type: max_accuracy value: 99.71386138613862 - type: max_ap value: 90.20131927652946 - type: max_f1 value: 84.7749114820435 - type: max_precision value: 88.66886688668868 - type: max_recall value: 83.8 - type: similarity_accuracy value: 99.71386138613862 - type: similarity_accuracy_threshold value: 78.56961662426235 - type: similarity_ap value: 90.20131927652946 - type: similarity_f1 value: 84.7749114820435 - type: similarity_f1_threshold value: 75.7768544371973 - type: similarity_precision value: 85.7727737973388 - type: similarity_recall value: 83.8 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 48.18939518021159 - type: v_measure value: 48.18939518021159 - type: v_measure_std value: 4.6189444340187995 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 30.743938802421265 - type: v_measure value: 30.743938802421265 - type: v_measure_std value: 1.4645401677053824 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 43.254152892780986 - type: map value: 43.254152892780986 - type: mrr value: 43.70483989050165 - type: nAUC_map_diff1 value: 33.22453777168869 - type: nAUC_map_max value: 13.175366935671228 - type: nAUC_map_std value: 3.718253924398536 - type: nAUC_mrr_diff1 value: 32.58818809467491 - type: nAUC_mrr_max value: 14.093758435205075 - type: nAUC_mrr_std value: 4.198791420159734 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 29.88360050203766 - type: cosine_spearman value: 29.275185932109494 - type: dot_pearson value: 29.883597746108975 - type: dot_spearman value: 29.28377974870949 - type: main_score value: 29.275185932109494 - type: pearson value: 29.88360050203766 - type: spearman value: 29.275185932109494 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 45.747 - type: map_at_1 value: 0.148 - type: map_at_10 value: 0.972 - type: map_at_100 value: 4.652 - type: map_at_1000 value: 11.511000000000001 - type: map_at_20 value: 1.643 - type: map_at_3 value: 0.369 - type: map_at_5 value: 0.561 - type: mrr_at_1 value: 62.0 - type: mrr_at_10 value: 70.06904761904761 - type: mrr_at_100 value: 70.45500059672992 - type: mrr_at_1000 value: 70.45500059672992 - type: mrr_at_20 value: 70.31716791979949 - type: mrr_at_3 value: 68.0 - type: mrr_at_5 value: 69.19999999999999 - type: nauc_map_at_1000_diff1 value: -0.8266899821302324 - type: nauc_map_at_1000_max value: 34.62914536640893 - type: nauc_map_at_1000_std value: 57.177693387251615 - type: nauc_map_at_100_diff1 value: -3.3097934383165613 - type: nauc_map_at_100_max value: 22.052336613600293 - type: nauc_map_at_100_std value: 29.905360060478188 - type: nauc_map_at_10_diff1 value: 6.057035481050755 - type: nauc_map_at_10_max value: 22.742824418774667 - type: nauc_map_at_10_std value: 5.649441588476496 - type: nauc_map_at_1_diff1 value: 10.469485578180873 - type: nauc_map_at_1_max value: 4.582098501050435 - type: nauc_map_at_1_std value: -10.47482550446343 - type: nauc_map_at_20_diff1 value: 1.5813367839245727 - type: nauc_map_at_20_max value: 25.09380802651507 - type: nauc_map_at_20_std value: 11.733045886140895 - type: nauc_map_at_3_diff1 value: -0.4174848325628528 - type: nauc_map_at_3_max value: 16.54291715633098 - type: nauc_map_at_3_std value: -6.315368365719176 - type: nauc_map_at_5_diff1 value: 1.6439114449809122 - type: nauc_map_at_5_max value: 18.119472468345634 - type: nauc_map_at_5_std value: -1.4642215840068935 - type: nauc_mrr_at_1000_diff1 value: 19.962304210632194 - type: nauc_mrr_at_1000_max value: 28.66281052259736 - type: nauc_mrr_at_1000_std value: 14.4833499197582 - type: nauc_mrr_at_100_diff1 value: 19.962304210632194 - type: nauc_mrr_at_100_max value: 28.66281052259736 - type: nauc_mrr_at_100_std value: 14.4833499197582 - type: nauc_mrr_at_10_diff1 value: 19.79498540271038 - type: nauc_mrr_at_10_max value: 28.07551011390951 - type: nauc_mrr_at_10_std value: 13.820791565247939 - type: nauc_mrr_at_1_diff1 value: 23.72088730271045 - type: nauc_mrr_at_1_max value: 29.338830261821947 - type: nauc_mrr_at_1_std value: 10.463649509276033 - type: nauc_mrr_at_20_diff1 value: 20.06776286940325 - type: nauc_mrr_at_20_max value: 28.69272909781133 - type: nauc_mrr_at_20_std value: 14.560673636667628 - type: nauc_mrr_at_3_diff1 value: 18.71166001912622 - type: nauc_mrr_at_3_max value: 30.645161290322555 - type: nauc_mrr_at_3_std value: 16.37394164159257 - type: nauc_mrr_at_5_diff1 value: 15.791374902745353 - type: nauc_mrr_at_5_max value: 28.51602708149093 - type: nauc_mrr_at_5_std value: 15.246386476651619 - type: nauc_ndcg_at_1000_diff1 value: -5.179304837164554 - type: nauc_ndcg_at_1000_max value: 27.27301986190763 - type: nauc_ndcg_at_1000_std value: 49.239144813886654 - type: nauc_ndcg_at_100_diff1 value: 7.283019925558149 - type: nauc_ndcg_at_100_max value: 29.80340187562149 - type: nauc_ndcg_at_100_std value: 47.60799676958296 - type: nauc_ndcg_at_10_diff1 value: 11.621471677557253 - type: nauc_ndcg_at_10_max value: 31.78727749460396 - type: nauc_ndcg_at_10_std value: 26.339328462146177 - type: nauc_ndcg_at_1_diff1 value: 26.896384303421446 - type: nauc_ndcg_at_1_max value: 28.727080596332872 - type: nauc_ndcg_at_1_std value: 12.10515793682523 - type: nauc_ndcg_at_20_diff1 value: 7.253524538786647 - type: nauc_ndcg_at_20_max value: 33.412855576178295 - type: nauc_ndcg_at_20_std value: 34.10895211064073 - type: nauc_ndcg_at_3_diff1 value: 11.303112239393863 - type: nauc_ndcg_at_3_max value: 35.0880605283756 - type: nauc_ndcg_at_3_std value: 18.514877130637803 - type: nauc_ndcg_at_5_diff1 value: 8.537541001217583 - type: nauc_ndcg_at_5_max value: 32.24796400964019 - type: nauc_ndcg_at_5_std value: 21.65596013895985 - type: nauc_precision_at_1000_diff1 value: 5.217123572202896 - type: nauc_precision_at_1000_max value: 31.954154167309177 - type: nauc_precision_at_1000_std value: 60.51613061301686 - type: nauc_precision_at_100_diff1 value: 5.748688865778208 - type: nauc_precision_at_100_max value: 28.503515028630567 - type: nauc_precision_at_100_std value: 52.8175811950368 - type: nauc_precision_at_10_diff1 value: 9.634424129349284 - type: nauc_precision_at_10_max value: 33.90210630229416 - type: nauc_precision_at_10_std value: 30.197787312348073 - type: nauc_precision_at_1_diff1 value: 23.72088730271045 - type: nauc_precision_at_1_max value: 29.338830261821947 - type: nauc_precision_at_1_std value: 10.463649509276033 - type: nauc_precision_at_20_diff1 value: 2.6440820197838923 - type: nauc_precision_at_20_max value: 36.6927642980172 - type: nauc_precision_at_20_std value: 40.53918258763216 - type: nauc_precision_at_3_diff1 value: 2.9773659425793695 - type: nauc_precision_at_3_max value: 35.63522203655881 - type: nauc_precision_at_3_std value: 17.365942579371055 - type: nauc_precision_at_5_diff1 value: 3.883249981522982 - type: nauc_precision_at_5_max value: 34.19785174053362 - type: nauc_precision_at_5_std value: 25.391096548495977 - type: nauc_recall_at_1000_diff1 value: -10.977265624215267 - type: nauc_recall_at_1000_max value: 22.349720150932985 - type: nauc_recall_at_1000_std value: 47.14118127199015 - type: nauc_recall_at_100_diff1 value: -10.566105105889243 - type: nauc_recall_at_100_max value: 13.59897332326766 - type: nauc_recall_at_100_std value: 25.1260269383207 - type: nauc_recall_at_10_diff1 value: 3.9418824014124514 - type: nauc_recall_at_10_max value: 18.87305117920693 - type: nauc_recall_at_10_std value: 4.227456274746917 - type: nauc_recall_at_1_diff1 value: 10.469485578180873 - type: nauc_recall_at_1_max value: 4.582098501050435 - type: nauc_recall_at_1_std value: -10.47482550446343 - type: nauc_recall_at_20_diff1 value: -3.663384950691917 - type: nauc_recall_at_20_max value: 20.838703493064635 - type: nauc_recall_at_20_std value: 10.729793670370862 - type: nauc_recall_at_3_diff1 value: -1.1850402683856456 - type: nauc_recall_at_3_max value: 16.033671610288522 - type: nauc_recall_at_3_std value: -6.953520529126048 - type: nauc_recall_at_5_diff1 value: -0.5156927662191768 - type: nauc_recall_at_5_max value: 15.556954479927315 - type: nauc_recall_at_5_std value: -2.965229848389009 - type: ndcg_at_1 value: 56.00000000000001 - type: ndcg_at_10 value: 45.747 - type: ndcg_at_100 value: 32.761 - type: ndcg_at_1000 value: 29.633 - type: ndcg_at_20 value: 42.905 - type: ndcg_at_3 value: 50.641999999999996 - type: ndcg_at_5 value: 48.231 - type: precision_at_1 value: 62.0 - type: precision_at_10 value: 47.8 - type: precision_at_100 value: 33.72 - type: precision_at_1000 value: 14.238000000000001 - type: precision_at_20 value: 45.2 - type: precision_at_3 value: 54.0 - type: precision_at_5 value: 50.8 - type: recall_at_1 value: 0.148 - type: recall_at_10 value: 1.143 - type: recall_at_100 value: 7.219 - type: recall_at_1000 value: 28.294999999999998 - type: recall_at_20 value: 2.083 - type: recall_at_3 value: 0.395 - type: recall_at_5 value: 0.628 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 18.618000000000002 - type: map_at_1 value: 1.22 - type: map_at_10 value: 6.635000000000001 - type: map_at_100 value: 10.873 - type: map_at_1000 value: 12.415 - type: map_at_20 value: 8.334 - type: map_at_3 value: 2.8240000000000003 - type: map_at_5 value: 4.111 - type: mrr_at_1 value: 14.285714285714285 - type: mrr_at_10 value: 31.959831551668284 - type: mrr_at_100 value: 33.15059576942869 - type: mrr_at_1000 value: 33.15059576942869 - type: mrr_at_20 value: 32.685999641281754 - type: mrr_at_3 value: 25.850340136054424 - type: mrr_at_5 value: 29.31972789115646 - type: nauc_map_at_1000_diff1 value: 8.820920087157313 - type: nauc_map_at_1000_max value: -33.58280072902863 - type: nauc_map_at_1000_std value: -22.730292551065183 - type: nauc_map_at_100_diff1 value: 9.741008911531535 - type: nauc_map_at_100_max value: -33.6532837418042 - type: nauc_map_at_100_std value: -28.3444309192652 - type: nauc_map_at_10_diff1 value: 7.657150877271815 - type: nauc_map_at_10_max value: -41.7412362957407 - type: nauc_map_at_10_std value: -35.66062824513052 - type: nauc_map_at_1_diff1 value: 7.593190069621649 - type: nauc_map_at_1_max value: -39.58442010649443 - type: nauc_map_at_1_std value: -22.564719811889777 - type: nauc_map_at_20_diff1 value: 7.245303325270055 - type: nauc_map_at_20_max value: -37.804327180430946 - type: nauc_map_at_20_std value: -32.702756826489846 - type: nauc_map_at_3_diff1 value: 6.742365189818029 - type: nauc_map_at_3_max value: -41.7228290771728 - type: nauc_map_at_3_std value: -30.230168338925107 - type: nauc_map_at_5_diff1 value: 11.935913888588882 - type: nauc_map_at_5_max value: -41.39335754887243 - type: nauc_map_at_5_std value: -33.780157609546535 - type: nauc_mrr_at_1000_diff1 value: -1.6708159098532442 - type: nauc_mrr_at_1000_max value: -36.55890935351506 - type: nauc_mrr_at_1000_std value: -24.27343264470873 - type: nauc_mrr_at_100_diff1 value: -1.6708159098532442 - type: nauc_mrr_at_100_max value: -36.55890935351506 - type: nauc_mrr_at_100_std value: -24.27343264470873 - type: nauc_mrr_at_10_diff1 value: -0.42650070974468685 - type: nauc_mrr_at_10_max value: -37.09244916127389 - type: nauc_mrr_at_10_std value: -24.66093983608399 - type: nauc_mrr_at_1_diff1 value: -5.630573652147252 - type: nauc_mrr_at_1_max value: -33.616658797870684 - type: nauc_mrr_at_1_std value: -23.601564115907 - type: nauc_mrr_at_20_diff1 value: -1.832519847770416 - type: nauc_mrr_at_20_max value: -37.12461848720876 - type: nauc_mrr_at_20_std value: -24.697864546344437 - type: nauc_mrr_at_3_diff1 value: -0.005683436651441496 - type: nauc_mrr_at_3_max value: -32.50516010446863 - type: nauc_mrr_at_3_std value: -21.544877233050823 - type: nauc_mrr_at_5_diff1 value: -2.354001730958692 - type: nauc_mrr_at_5_max value: -32.51899298268129 - type: nauc_mrr_at_5_std value: -23.68035252143919 - type: nauc_ndcg_at_1000_diff1 value: 14.007950932108976 - type: nauc_ndcg_at_1000_max value: -31.274257790464837 - type: nauc_ndcg_at_1000_std value: 3.658749568249879 - type: nauc_ndcg_at_100_diff1 value: 13.626007116136158 - type: nauc_ndcg_at_100_max value: -35.59107319590088 - type: nauc_ndcg_at_100_std value: -18.874707006492024 - type: nauc_ndcg_at_10_diff1 value: 9.82558048538336 - type: nauc_ndcg_at_10_max value: -39.51461465840459 - type: nauc_ndcg_at_10_std value: -30.33405672804229 - type: nauc_ndcg_at_1_diff1 value: -1.598770159246464 - type: nauc_ndcg_at_1_max value: -31.975857803575675 - type: nauc_ndcg_at_1_std value: -18.993368614347663 - type: nauc_ndcg_at_20_diff1 value: 11.616460882964375 - type: nauc_ndcg_at_20_max value: -36.68867443298684 - type: nauc_ndcg_at_20_std value: -27.831158282067598 - type: nauc_ndcg_at_3_diff1 value: 3.6760483719742556 - type: nauc_ndcg_at_3_max value: -30.935030030092992 - type: nauc_ndcg_at_3_std value: -18.717891674270643 - type: nauc_ndcg_at_5_diff1 value: 10.773599917143413 - type: nauc_ndcg_at_5_max value: -31.08451038101287 - type: nauc_ndcg_at_5_std value: -25.478457258577336 - type: nauc_precision_at_1000_diff1 value: -6.780225586359699 - type: nauc_precision_at_1000_max value: 38.71975790762798 - type: nauc_precision_at_1000_std value: 57.8083677042306 - type: nauc_precision_at_100_diff1 value: 2.959136061872892 - type: nauc_precision_at_100_max value: -8.27764507575222 - type: nauc_precision_at_100_std value: 5.742410187313611 - type: nauc_precision_at_10_diff1 value: 9.882789695687109 - type: nauc_precision_at_10_max value: -31.486245698037102 - type: nauc_precision_at_10_std value: -29.081919554833874 - type: nauc_precision_at_1_diff1 value: -5.630573652147252 - type: nauc_precision_at_1_max value: -33.616658797870684 - type: nauc_precision_at_1_std value: -23.601564115907 - type: nauc_precision_at_20_diff1 value: 5.165999913921455 - type: nauc_precision_at_20_max value: -19.322665087378923 - type: nauc_precision_at_20_std value: -19.841805142598865 - type: nauc_precision_at_3_diff1 value: 2.846740832419061 - type: nauc_precision_at_3_max value: -30.76562032864513 - type: nauc_precision_at_3_std value: -23.610192672373636 - type: nauc_precision_at_5_diff1 value: 13.83881140180208 - type: nauc_precision_at_5_max value: -23.40672207825652 - type: nauc_precision_at_5_std value: -26.803291207458884 - type: nauc_recall_at_1000_diff1 value: 5.989093134294799 - type: nauc_recall_at_1000_max value: -23.01810906637643 - type: nauc_recall_at_1000_std value: 51.72967782759332 - type: nauc_recall_at_100_diff1 value: 9.279568158025599 - type: nauc_recall_at_100_max value: -32.49225165397591 - type: nauc_recall_at_100_std value: -14.266931753931292 - type: nauc_recall_at_10_diff1 value: 8.789441102892894 - type: nauc_recall_at_10_max value: -41.575759675933185 - type: nauc_recall_at_10_std value: -36.066608504981836 - type: nauc_recall_at_1_diff1 value: 7.593190069621649 - type: nauc_recall_at_1_max value: -39.58442010649443 - type: nauc_recall_at_1_std value: -22.564719811889777 - type: nauc_recall_at_20_diff1 value: 7.288095720364289 - type: nauc_recall_at_20_max value: -34.19747470428325 - type: nauc_recall_at_20_std value: -29.334755464530023 - type: nauc_recall_at_3_diff1 value: 7.541743741210702 - type: nauc_recall_at_3_max value: -38.357726279072416 - type: nauc_recall_at_3_std value: -29.877869977138204 - type: nauc_recall_at_5_diff1 value: 11.512545675995455 - type: nauc_recall_at_5_max value: -37.366204857623586 - type: nauc_recall_at_5_std value: -33.58926486109219 - type: ndcg_at_1 value: 12.245000000000001 - type: ndcg_at_10 value: 18.618000000000002 - type: ndcg_at_100 value: 28.488000000000003 - type: ndcg_at_1000 value: 41.208 - type: ndcg_at_20 value: 19.536 - type: ndcg_at_3 value: 15.045 - type: ndcg_at_5 value: 16.359 - type: precision_at_1 value: 14.285999999999998 - type: precision_at_10 value: 19.796 - type: precision_at_100 value: 6.5920000000000005 - type: precision_at_1000 value: 1.471 - type: precision_at_20 value: 15.204 - type: precision_at_3 value: 18.367 - type: precision_at_5 value: 18.776 - type: recall_at_1 value: 1.22 - type: recall_at_10 value: 13.763 - type: recall_at_100 value: 40.107 - type: recall_at_1000 value: 79.06800000000001 - type: recall_at_20 value: 20.049 - type: recall_at_3 value: 4.2540000000000004 - type: recall_at_5 value: 7.142999999999999 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 69.0625 - type: ap value: 12.429057046174089 - type: ap_weighted value: 12.429057046174089 - type: f1 value: 52.366056859622454 - type: f1_weighted value: 75.91632061778698 - type: main_score value: 69.0625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 55.387662705149964 - type: f1 value: 55.62292803889264 - type: f1_weighted value: 55.01561915660653 - type: main_score value: 55.387662705149964 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 33.535908963951435 - type: v_measure value: 33.535908963951435 - type: v_measure_std value: 1.8862804680454297 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 81.57000655659535 - type: cosine_accuracy_threshold value: 76.01186428039885 - type: cosine_ap value: 57.187252502171674 - type: cosine_f1 value: 54.94480738905159 - type: cosine_f1_threshold value: 63.27845286960887 - type: cosine_precision value: 47.93632075471698 - type: cosine_recall value: 64.35356200527704 - type: dot_accuracy value: 81.57000655659535 - type: dot_accuracy_threshold value: 76.01186510638954 - type: dot_ap value: 57.1872568788409 - type: dot_f1 value: 54.94480738905159 - type: dot_f1_threshold value: 63.27845437266042 - type: dot_precision value: 47.93632075471698 - type: dot_recall value: 64.35356200527704 - type: euclidean_accuracy value: 81.57000655659535 - type: euclidean_accuracy_threshold value: 69.2649048666448 - type: euclidean_ap value: 57.18724194735979 - type: euclidean_f1 value: 54.94480738905159 - type: euclidean_f1_threshold value: 85.69894748780587 - type: euclidean_precision value: 47.93632075471698 - type: euclidean_recall value: 64.35356200527704 - type: main_score value: 57.516050924090266 - type: manhattan_accuracy value: 81.71902008702389 - type: manhattan_accuracy_threshold value: 856.8997862166725 - type: manhattan_ap value: 57.516050924090266 - type: manhattan_f1 value: 55.16339869281046 - type: manhattan_f1_threshold value: 1035.858379830097 - type: manhattan_precision value: 50.18378378378379 - type: manhattan_recall value: 61.24010554089709 - type: max_accuracy value: 81.71902008702389 - type: max_ap value: 57.516050924090266 - type: max_f1 value: 55.16339869281046 - type: max_precision value: 50.18378378378379 - type: max_recall value: 64.35356200527704 - type: similarity_accuracy value: 81.57000655659535 - type: similarity_accuracy_threshold value: 76.01186428039885 - type: similarity_ap value: 57.187252502171674 - type: similarity_f1 value: 54.94480738905159 - type: similarity_f1_threshold value: 63.27845286960887 - type: similarity_precision value: 47.93632075471698 - type: similarity_recall value: 64.35356200527704 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 87.09977878682035 - type: cosine_accuracy_threshold value: 63.00089389314832 - type: cosine_ap value: 81.9487582699938 - type: cosine_f1 value: 74.04089724292375 - type: cosine_f1_threshold value: 56.35024835869245 - type: cosine_precision value: 70.7599466704091 - type: cosine_recall value: 77.64089929165382 - type: dot_accuracy value: 87.09977878682035 - type: dot_accuracy_threshold value: 63.00089560728222 - type: dot_ap value: 81.94879514546079 - type: dot_f1 value: 74.04089724292375 - type: dot_f1_threshold value: 56.350250341728405 - type: dot_precision value: 70.7599466704091 - type: dot_recall value: 77.64089929165382 - type: euclidean_accuracy value: 87.09977878682035 - type: euclidean_accuracy_threshold value: 86.02221469735642 - type: euclidean_ap value: 81.94875892553148 - type: euclidean_f1 value: 74.04089724292375 - type: euclidean_f1_threshold value: 93.43420484744681 - type: euclidean_precision value: 70.7599466704091 - type: euclidean_recall value: 77.64089929165382 - type: main_score value: 82.13756947863085 - type: manhattan_accuracy value: 87.19292117825125 - type: manhattan_accuracy_threshold value: 1076.0586285257887 - type: manhattan_ap value: 82.13756947863085 - type: manhattan_f1 value: 74.36426623424485 - type: manhattan_f1_threshold value: 1148.366796662276 - type: manhattan_precision value: 71.32051463311183 - type: manhattan_recall value: 77.6793963658762 - type: max_accuracy value: 87.19292117825125 - type: max_ap value: 82.13756947863085 - type: max_f1 value: 74.36426623424485 - type: max_precision value: 71.32051463311183 - type: max_recall value: 77.6793963658762 - type: similarity_accuracy value: 87.09977878682035 - type: similarity_accuracy_threshold value: 63.00089389314832 - type: similarity_ap value: 81.9487582699938 - type: similarity_f1 value: 74.04089724292375 - type: similarity_f1_threshold value: 56.35024835869245 - type: similarity_precision value: 70.7599466704091 - type: similarity_recall value: 77.64089929165382 --- # potion-base-8M Model Card <div align="center"> <img width="35%" alt="Model2Vec logo" src="https://raw.githubusercontent.com/MinishLab/model2vec/main/assets/images/logo_v2.png"> </div> This [Model2Vec](https://github.com/MinishLab/model2vec) model is pre-trained using [Tokenlearn](https://github.com/MinishLab/tokenlearn). It is a distilled version of the [baai/bge-base-en-v1.5](https://huggingface.co/baai/bge-base-en-v1.5) Sentence Transformer. It uses static embeddings, allowing text embeddings to be computed orders of magnitude faster on both GPU and CPU. It is designed for applications where computational resources are limited or where real-time performance is critical. ## Installation Install model2vec using pip: ``` pip install model2vec ``` ## Usage Load this model using the `from_pretrained` method: ```python from model2vec import StaticModel # Load a pretrained Model2Vec model model = StaticModel.from_pretrained("minishlab/potion-base-8M") # Compute text embeddings embeddings = model.encode(["Example sentence"]) ``` ## How it works Model2vec creates a small, static model that outperforms other static embedding models by a large margin on all tasks on [MTEB](https://huggingface.co/spaces/mteb/leaderboard). This model is pre-trained using [Tokenlearn](https://github.com/MinishLab/tokenlearn). It's created using the following steps: - Distillation: first, a model is distilled from a sentence transformer model using Model2Vec. - Training data creation: the sentence transformer model is used to create training data by creating mean output embeddings on a large corpus. - Training: the distilled model is trained on the training data using Tokenlearn. - Post-training re-regularization: after training, the model is re-regularized by weighting the tokens based on their frequency, applying PCA, and finally applying [SIF weighting](https://openreview.net/pdf?id=SyK00v5xx). The results for this model can be found on the [Model2Vec results page](https://github.com/MinishLab/model2vec/blob/main/results/README.md). ## Additional Resources - [All Model2Vec models on the hub](https://huggingface.co/models?library=model2vec) - [Model2Vec Repo](https://github.com/MinishLab/model2vec) - [Tokenlearn repo](https://github.com/MinishLab/tokenlearn) - [Model2Vec Results](https://github.com/MinishLab/model2vec/blob/main/results/README.md) - [Model2Vec Tutorials](https://github.com/MinishLab/model2vec/tree/main/tutorials) ## Library Authors Model2Vec was developed by the [Minish Lab](https://github.com/MinishLab) team consisting of [Stephan Tulkens](https://github.com/stephantul) and [Thomas van Dongen](https://github.com/Pringled). ## Citation Please cite the [Model2Vec repository](https://github.com/MinishLab/model2vec) if you use this model in your work. ``` @software{minishlab2024model2vec, authors = {Stephan Tulkens, Thomas van Dongen}, title = {Model2Vec: Turn any Sentence Transformer into a Small Fast Model}, year = {2024}, url = {https://github.com/MinishLab/model2vec}, } ```
[ "BIOSSES", "SCIFACT" ]
NovaSearch/stella_en_400M_v5
NovaSearch
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "new", "feature-extraction", "mteb", "transformers", "sentence-similarity", "custom_code", "arxiv:2412.19048", "arxiv:2205.13147", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-07-12T15:52:33Z"
2025-03-05T13:57:29+00:00
315,187
192
--- license: mit tags: - mteb - sentence-transformers - transformers - sentence-similarity model-index: - name: stella_en_400M_v5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 92.35820895522387 - type: ap value: 70.81322736988783 - type: ap_weighted value: 70.81322736988783 - type: f1 value: 88.9505466159595 - type: f1_weighted value: 92.68630932872613 - type: main_score value: 92.35820895522387 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.1945 - type: ap value: 96.08192192244094 - type: ap_weighted value: 96.08192192244094 - type: f1 value: 97.1936887167346 - type: f1_weighted value: 97.1936887167346 - type: main_score value: 97.1945 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 59.528000000000006 - type: f1 value: 59.21016819840188 - type: f1_weighted value: 59.21016819840188 - type: main_score value: 59.528000000000006 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 64.24 - type: map_at_1 value: 40.398 - type: map_at_10 value: 56.215 - type: map_at_100 value: 56.833999999999996 - type: map_at_1000 value: 56.835 - type: map_at_20 value: 56.747 - type: map_at_3 value: 52.181 - type: map_at_5 value: 54.628 - type: mrr_at_1 value: 41.25177809388336 - type: mrr_at_10 value: 56.570762491815216 - type: mrr_at_100 value: 57.17548614361504 - type: mrr_at_1000 value: 57.176650626377466 - type: mrr_at_20 value: 57.08916253512566 - type: mrr_at_3 value: 52.47747747747754 - type: mrr_at_5 value: 54.94547178757718 - type: nauc_map_at_1000_diff1 value: 22.408086887100158 - type: nauc_map_at_1000_max value: -8.730419096847543 - type: nauc_map_at_1000_std value: -17.789262741255737 - type: nauc_map_at_100_diff1 value: 22.407371684274025 - type: nauc_map_at_100_max value: -8.732263549026266 - type: nauc_map_at_100_std value: -17.79550515579994 - type: nauc_map_at_10_diff1 value: 21.925005073301246 - type: nauc_map_at_10_max value: -8.990323944492134 - type: nauc_map_at_10_std value: -18.199246301671458 - type: nauc_map_at_1_diff1 value: 26.23276644969203 - type: nauc_map_at_1_max value: -12.376511389571245 - type: nauc_map_at_1_std value: -18.11411715207284 - type: nauc_map_at_20_diff1 value: 22.32455790850922 - type: nauc_map_at_20_max value: -8.664671547236034 - type: nauc_map_at_20_std value: -17.8290016125137 - type: nauc_map_at_3_diff1 value: 22.395462147465064 - type: nauc_map_at_3_max value: -8.206580750918844 - type: nauc_map_at_3_std value: -17.604490446911484 - type: nauc_map_at_5_diff1 value: 21.95307379904799 - type: nauc_map_at_5_max value: -8.03958102978443 - type: nauc_map_at_5_std value: -17.36578866595004 - type: nauc_mrr_at_1000_diff1 value: 20.124236798365587 - type: nauc_mrr_at_1000_max value: -9.587376069575898 - type: nauc_mrr_at_1000_std value: -17.79191612151833 - type: nauc_mrr_at_100_diff1 value: 20.123612603474033 - type: nauc_mrr_at_100_max value: -9.589187218607831 - type: nauc_mrr_at_100_std value: -17.7981617777748 - type: nauc_mrr_at_10_diff1 value: 19.723683875738075 - type: nauc_mrr_at_10_max value: -9.774151729178815 - type: nauc_mrr_at_10_std value: -18.168668675495162 - type: nauc_mrr_at_1_diff1 value: 23.945332059908132 - type: nauc_mrr_at_1_max value: -12.260461466152819 - type: nauc_mrr_at_1_std value: -18.007194922921148 - type: nauc_mrr_at_20_diff1 value: 20.04819461810257 - type: nauc_mrr_at_20_max value: -9.518368283588936 - type: nauc_mrr_at_20_std value: -17.831608149836136 - type: nauc_mrr_at_3_diff1 value: 19.8571785245832 - type: nauc_mrr_at_3_max value: -9.464375021240478 - type: nauc_mrr_at_3_std value: -17.728533927330453 - type: nauc_mrr_at_5_diff1 value: 19.670313652167827 - type: nauc_mrr_at_5_max value: -8.966372585728434 - type: nauc_mrr_at_5_std value: -17.468955834324817 - type: nauc_ndcg_at_1000_diff1 value: 21.863049281767417 - type: nauc_ndcg_at_1000_max value: -8.18698520924057 - type: nauc_ndcg_at_1000_std value: -17.634483364794804 - type: nauc_ndcg_at_100_diff1 value: 21.849924385738586 - type: nauc_ndcg_at_100_max value: -8.226437560889345 - type: nauc_ndcg_at_100_std value: -17.774648478087002 - type: nauc_ndcg_at_10_diff1 value: 19.888395590413573 - type: nauc_ndcg_at_10_max value: -8.968706085632382 - type: nauc_ndcg_at_10_std value: -19.31386964628115 - type: nauc_ndcg_at_1_diff1 value: 26.23276644969203 - type: nauc_ndcg_at_1_max value: -12.376511389571245 - type: nauc_ndcg_at_1_std value: -18.11411715207284 - type: nauc_ndcg_at_20_diff1 value: 21.38413342416933 - type: nauc_ndcg_at_20_max value: -7.636238194084164 - type: nauc_ndcg_at_20_std value: -17.946390844693028 - type: nauc_ndcg_at_3_diff1 value: 21.29169165029195 - type: nauc_ndcg_at_3_max value: -6.793840499730093 - type: nauc_ndcg_at_3_std value: -17.52359001586737 - type: nauc_ndcg_at_5_diff1 value: 20.238297656671364 - type: nauc_ndcg_at_5_max value: -6.424992706950072 - type: nauc_ndcg_at_5_std value: -17.082391132291356 - type: nauc_precision_at_1000_diff1 value: -7.05195108528572 - type: nauc_precision_at_1000_max value: 34.439879624882145 - type: nauc_precision_at_1000_std value: 68.72436351659353 - type: nauc_precision_at_100_diff1 value: -2.769464113932605 - type: nauc_precision_at_100_max value: 9.89562961226698 - type: nauc_precision_at_100_std value: -0.5880967482224028 - type: nauc_precision_at_10_diff1 value: 2.1371544726832323 - type: nauc_precision_at_10_max value: -11.93051325147756 - type: nauc_precision_at_10_std value: -30.83144187392059 - type: nauc_precision_at_1_diff1 value: 26.23276644969203 - type: nauc_precision_at_1_max value: -12.376511389571245 - type: nauc_precision_at_1_std value: -18.11411715207284 - type: nauc_precision_at_20_diff1 value: 3.780146814257504 - type: nauc_precision_at_20_max value: 17.06527540214615 - type: nauc_precision_at_20_std value: -20.36832563035565 - type: nauc_precision_at_3_diff1 value: 17.63894384012077 - type: nauc_precision_at_3_max value: -2.0220490624638887 - type: nauc_precision_at_3_std value: -17.285601413493918 - type: nauc_precision_at_5_diff1 value: 12.557855071944601 - type: nauc_precision_at_5_max value: 0.5840236463956658 - type: nauc_precision_at_5_std value: -15.827224420217846 - type: nauc_recall_at_1000_diff1 value: -7.051951085286463 - type: nauc_recall_at_1000_max value: 34.43987962487738 - type: nauc_recall_at_1000_std value: 68.724363516591 - type: nauc_recall_at_100_diff1 value: -2.769464113930314 - type: nauc_recall_at_100_max value: 9.895629612270017 - type: nauc_recall_at_100_std value: -0.58809674821745 - type: nauc_recall_at_10_diff1 value: 2.1371544726834495 - type: nauc_recall_at_10_max value: -11.930513251477253 - type: nauc_recall_at_10_std value: -30.83144187392047 - type: nauc_recall_at_1_diff1 value: 26.23276644969203 - type: nauc_recall_at_1_max value: -12.376511389571245 - type: nauc_recall_at_1_std value: -18.11411715207284 - type: nauc_recall_at_20_diff1 value: 3.7801468142575922 - type: nauc_recall_at_20_max value: 17.0652754021456 - type: nauc_recall_at_20_std value: -20.36832563035559 - type: nauc_recall_at_3_diff1 value: 17.63894384012074 - type: nauc_recall_at_3_max value: -2.02204906246383 - type: nauc_recall_at_3_std value: -17.28560141349386 - type: nauc_recall_at_5_diff1 value: 12.55785507194463 - type: nauc_recall_at_5_max value: 0.5840236463957296 - type: nauc_recall_at_5_std value: -15.827224420217856 - type: ndcg_at_1 value: 40.398 - type: ndcg_at_10 value: 64.24 - type: ndcg_at_100 value: 66.631 - type: ndcg_at_1000 value: 66.65100000000001 - type: ndcg_at_20 value: 66.086 - type: ndcg_at_3 value: 55.938 - type: ndcg_at_5 value: 60.370000000000005 - type: precision_at_1 value: 40.398 - type: precision_at_10 value: 8.962 - type: precision_at_100 value: 0.9950000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.836 - type: precision_at_3 value: 22.262 - type: precision_at_5 value: 15.519 - type: recall_at_1 value: 40.398 - type: recall_at_10 value: 89.616 - type: recall_at_100 value: 99.502 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 96.72800000000001 - type: recall_at_3 value: 66.78500000000001 - type: recall_at_5 value: 77.596 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 55.1564333205451 - type: v_measure value: 55.1564333205451 - type: v_measure_std value: 14.696883012214512 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 49.823698316694795 - type: v_measure value: 49.823698316694795 - type: v_measure_std value: 14.951660654298186 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 66.15294503553424 - type: map value: 66.15294503553424 - type: mrr value: 78.53438420612935 - type: nAUC_map_diff1 value: 12.569697092717997 - type: nAUC_map_max value: 21.50670312412572 - type: nAUC_map_std value: 16.943786429229064 - type: nAUC_mrr_diff1 value: 15.590272897361238 - type: nAUC_mrr_max value: 34.96072022474653 - type: nAUC_mrr_std value: 21.649217605241045 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 85.7824546319275 - type: cosine_spearman value: 83.29587385660628 - type: euclidean_pearson value: 84.58764190565167 - type: euclidean_spearman value: 83.30069324352772 - type: main_score value: 83.29587385660628 - type: manhattan_pearson value: 84.95996839947179 - type: manhattan_spearman value: 83.87480271054358 - type: pearson value: 85.7824546319275 - type: spearman value: 83.29587385660628 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 89.30194805194806 - type: f1 value: 89.26182507266391 - type: f1_weighted value: 89.26182507266391 - type: main_score value: 89.30194805194806 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 50.67972171889736 - type: v_measure value: 50.67972171889736 - type: v_measure_std value: 0.7687409980036303 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 45.80539715556144 - type: v_measure value: 45.80539715556144 - type: v_measure_std value: 0.9601346216579142 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 44.361250000000005 - type: map_at_1 value: 28.304499999999997 - type: map_at_10 value: 38.54841666666666 - type: map_at_100 value: 39.83141666666667 - type: map_at_1000 value: 39.944750000000006 - type: map_at_20 value: 39.25341666666667 - type: map_at_3 value: 35.406749999999995 - type: map_at_5 value: 37.15558333333333 - type: mrr_at_1 value: 34.09077232860122 - type: mrr_at_10 value: 43.15445393211421 - type: mrr_at_100 value: 43.98645286848257 - type: mrr_at_1000 value: 44.037631313469404 - type: mrr_at_20 value: 43.64045813249614 - type: mrr_at_3 value: 40.674138648480486 - type: mrr_at_5 value: 42.106251182620255 - type: nauc_map_at_1000_diff1 value: 46.250011739434996 - type: nauc_map_at_1000_max value: 30.13664446260598 - type: nauc_map_at_1000_std value: 5.422301791618935 - type: nauc_map_at_100_diff1 value: 46.253631351999395 - type: nauc_map_at_100_max value: 30.12612918885181 - type: nauc_map_at_100_std value: 5.367077019987172 - type: nauc_map_at_10_diff1 value: 46.328171341741346 - type: nauc_map_at_10_max value: 29.80274612581464 - type: nauc_map_at_10_std value: 4.62996685176396 - type: nauc_map_at_1_diff1 value: 51.56118117729493 - type: nauc_map_at_1_max value: 27.94885243863768 - type: nauc_map_at_1_std value: 1.700366508927356 - type: nauc_map_at_20_diff1 value: 46.286750260299094 - type: nauc_map_at_20_max value: 29.979205290353278 - type: nauc_map_at_20_std value: 5.010588412441873 - type: nauc_map_at_3_diff1 value: 47.10018183619064 - type: nauc_map_at_3_max value: 29.062318206078753 - type: nauc_map_at_3_std value: 3.2235696254694197 - type: nauc_map_at_5_diff1 value: 46.41971733050039 - type: nauc_map_at_5_max value: 29.456798617695657 - type: nauc_map_at_5_std value: 4.0921691023077145 - type: nauc_mrr_at_1000_diff1 value: 45.88888977975723 - type: nauc_mrr_at_1000_max value: 32.162138978089544 - type: nauc_mrr_at_1000_std value: 6.2811943424217915 - type: nauc_mrr_at_100_diff1 value: 45.87480433011124 - type: nauc_mrr_at_100_max value: 32.16011334212834 - type: nauc_mrr_at_100_std value: 6.2865717772421785 - type: nauc_mrr_at_10_diff1 value: 45.849652904658825 - type: nauc_mrr_at_10_max value: 32.13847916232293 - type: nauc_mrr_at_10_std value: 6.105718728141999 - type: nauc_mrr_at_1_diff1 value: 51.013730325062156 - type: nauc_mrr_at_1_max value: 32.77457396492779 - type: nauc_mrr_at_1_std value: 4.415684893471724 - type: nauc_mrr_at_20_diff1 value: 45.86663046255274 - type: nauc_mrr_at_20_max value: 32.15219360697865 - type: nauc_mrr_at_20_std value: 6.19603046412763 - type: nauc_mrr_at_3_diff1 value: 46.522376582423185 - type: nauc_mrr_at_3_max value: 32.18259009733714 - type: nauc_mrr_at_3_std value: 5.288000648220897 - type: nauc_mrr_at_5_diff1 value: 45.86611481369745 - type: nauc_mrr_at_5_max value: 32.14261639054921 - type: nauc_mrr_at_5_std value: 5.8811238177073735 - type: nauc_ndcg_at_1000_diff1 value: 44.5055097547565 - type: nauc_ndcg_at_1000_max value: 31.149682057975458 - type: nauc_ndcg_at_1000_std value: 8.157937194901333 - type: nauc_ndcg_at_100_diff1 value: 44.12398363638596 - type: nauc_ndcg_at_100_max value: 30.878064321409994 - type: nauc_ndcg_at_100_std value: 8.40493441452808 - type: nauc_ndcg_at_10_diff1 value: 44.200093505221474 - type: nauc_ndcg_at_10_max value: 30.15267107733158 - type: nauc_ndcg_at_10_std value: 6.407495361566107 - type: nauc_ndcg_at_1_diff1 value: 51.013730325062156 - type: nauc_ndcg_at_1_max value: 32.77457396492779 - type: nauc_ndcg_at_1_std value: 4.415684893471724 - type: nauc_ndcg_at_20_diff1 value: 44.16988321564116 - type: nauc_ndcg_at_20_max value: 30.333532500651213 - type: nauc_ndcg_at_20_std value: 7.10024701386895 - type: nauc_ndcg_at_3_diff1 value: 45.35982873879988 - type: nauc_ndcg_at_3_max value: 30.288312457948702 - type: nauc_ndcg_at_3_std value: 4.653900898293395 - type: nauc_ndcg_at_5_diff1 value: 44.324558115380185 - type: nauc_ndcg_at_5_max value: 30.048149698941373 - type: nauc_ndcg_at_5_std value: 5.6684459618413205 - type: nauc_precision_at_1000_diff1 value: -7.282175798304458 - type: nauc_precision_at_1000_max value: 7.820142031765352 - type: nauc_precision_at_1000_std value: 11.736131836431172 - type: nauc_precision_at_100_diff1 value: 1.0222940256506976 - type: nauc_precision_at_100_max value: 16.12346497070298 - type: nauc_precision_at_100_std value: 18.202607395247874 - type: nauc_precision_at_10_diff1 value: 18.289439185857837 - type: nauc_precision_at_10_max value: 26.116517399154375 - type: nauc_precision_at_10_std value: 13.921214069982302 - type: nauc_precision_at_1_diff1 value: 51.013730325062156 - type: nauc_precision_at_1_max value: 32.77457396492779 - type: nauc_precision_at_1_std value: 4.415684893471724 - type: nauc_precision_at_20_diff1 value: 12.365165405210886 - type: nauc_precision_at_20_max value: 22.946297258937367 - type: nauc_precision_at_20_std value: 16.13862870358933 - type: nauc_precision_at_3_diff1 value: 32.063423642849685 - type: nauc_precision_at_3_max value: 30.140965811989407 - type: nauc_precision_at_3_std value: 8.501746262550146 - type: nauc_precision_at_5_diff1 value: 24.777203357717948 - type: nauc_precision_at_5_max value: 28.401579566848472 - type: nauc_precision_at_5_std value: 11.643246774390914 - type: nauc_recall_at_1000_diff1 value: 30.04216463401409 - type: nauc_recall_at_1000_max value: 34.98067760563842 - type: nauc_recall_at_1000_std value: 48.01453905250591 - type: nauc_recall_at_100_diff1 value: 31.193415507513972 - type: nauc_recall_at_100_max value: 28.69740149270981 - type: nauc_recall_at_100_std value: 25.20960758920368 - type: nauc_recall_at_10_diff1 value: 36.18870823636506 - type: nauc_recall_at_10_max value: 26.005625231341238 - type: nauc_recall_at_10_std value: 8.891983977041376 - type: nauc_recall_at_1_diff1 value: 51.56118117729493 - type: nauc_recall_at_1_max value: 27.94885243863768 - type: nauc_recall_at_1_std value: 1.700366508927356 - type: nauc_recall_at_20_diff1 value: 34.93996118564803 - type: nauc_recall_at_20_max value: 26.149961715956138 - type: nauc_recall_at_20_std value: 12.0657502367633 - type: nauc_recall_at_3_diff1 value: 40.80743946709512 - type: nauc_recall_at_3_max value: 26.443127773025783 - type: nauc_recall_at_3_std value: 3.7011448604241477 - type: nauc_recall_at_5_diff1 value: 37.608535157055776 - type: nauc_recall_at_5_max value: 26.168016189725822 - type: nauc_recall_at_5_std value: 6.344191564595316 - type: ndcg_at_1 value: 34.09083333333333 - type: ndcg_at_10 value: 44.361250000000005 - type: ndcg_at_100 value: 49.586166666666664 - type: ndcg_at_1000 value: 51.623583333333336 - type: ndcg_at_20 value: 46.40158333333333 - type: ndcg_at_3 value: 39.27733333333333 - type: ndcg_at_5 value: 41.662333333333336 - type: precision_at_1 value: 34.09083333333333 - type: precision_at_10 value: 7.957000000000002 - type: precision_at_100 value: 1.2521666666666669 - type: precision_at_1000 value: 0.16125 - type: precision_at_20 value: 4.6755 - type: precision_at_3 value: 18.402083333333334 - type: precision_at_5 value: 13.104333333333335 - type: recall_at_1 value: 28.304499999999997 - type: recall_at_10 value: 56.80666666666667 - type: recall_at_100 value: 79.66208333333334 - type: recall_at_1000 value: 93.6455 - type: recall_at_20 value: 64.2495 - type: recall_at_3 value: 42.431333333333335 - type: recall_at_5 value: 48.665416666666665 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 43.525999999999996 - type: map_at_1 value: 19.291 - type: map_at_10 value: 33.471000000000004 - type: map_at_100 value: 35.388999999999996 - type: map_at_1000 value: 35.568 - type: map_at_20 value: 34.496 - type: map_at_3 value: 28.713 - type: map_at_5 value: 31.384 - type: mrr_at_1 value: 43.77850162866449 - type: mrr_at_10 value: 56.28576598934912 - type: mrr_at_100 value: 56.8588518168194 - type: mrr_at_1000 value: 56.878236725973544 - type: mrr_at_20 value: 56.6409328120183 - type: mrr_at_3 value: 53.56134636264935 - type: mrr_at_5 value: 55.27795874049956 - type: nauc_map_at_1000_diff1 value: 27.262513153363876 - type: nauc_map_at_1000_max value: 40.099398684385584 - type: nauc_map_at_1000_std value: 18.847812394005512 - type: nauc_map_at_100_diff1 value: 27.238993503030745 - type: nauc_map_at_100_max value: 40.07730434492169 - type: nauc_map_at_100_std value: 18.795349250833684 - type: nauc_map_at_10_diff1 value: 27.70929180366227 - type: nauc_map_at_10_max value: 39.55987024970173 - type: nauc_map_at_10_std value: 17.214881544648996 - type: nauc_map_at_1_diff1 value: 43.34155892182403 - type: nauc_map_at_1_max value: 38.23324890148018 - type: nauc_map_at_1_std value: 6.0781444393516075 - type: nauc_map_at_20_diff1 value: 27.311577477800103 - type: nauc_map_at_20_max value: 39.624414083413456 - type: nauc_map_at_20_std value: 18.149811054163287 - type: nauc_map_at_3_diff1 value: 30.475965062734367 - type: nauc_map_at_3_max value: 38.49324825043695 - type: nauc_map_at_3_std value: 13.357656038648487 - type: nauc_map_at_5_diff1 value: 28.425110095017747 - type: nauc_map_at_5_max value: 39.017894870747796 - type: nauc_map_at_5_std value: 15.543817194122564 - type: nauc_mrr_at_1000_diff1 value: 33.16689354701644 - type: nauc_mrr_at_1000_max value: 41.70755363247148 - type: nauc_mrr_at_1000_std value: 24.61667417463176 - type: nauc_mrr_at_100_diff1 value: 33.147229262917506 - type: nauc_mrr_at_100_max value: 41.712455697170725 - type: nauc_mrr_at_100_std value: 24.6418922043652 - type: nauc_mrr_at_10_diff1 value: 32.94185191112572 - type: nauc_mrr_at_10_max value: 41.64272730141954 - type: nauc_mrr_at_10_std value: 24.663391015702707 - type: nauc_mrr_at_1_diff1 value: 39.571969559016395 - type: nauc_mrr_at_1_max value: 39.396249211263495 - type: nauc_mrr_at_1_std value: 16.984149923258357 - type: nauc_mrr_at_20_diff1 value: 33.10040770334742 - type: nauc_mrr_at_20_max value: 41.807565560083034 - type: nauc_mrr_at_20_std value: 24.8064180365271 - type: nauc_mrr_at_3_diff1 value: 33.065406161485704 - type: nauc_mrr_at_3_max value: 41.049510969934694 - type: nauc_mrr_at_3_std value: 23.18371458928609 - type: nauc_mrr_at_5_diff1 value: 33.2389593543916 - type: nauc_mrr_at_5_max value: 41.629486918949915 - type: nauc_mrr_at_5_std value: 24.5777253036149 - type: nauc_ndcg_at_1000_diff1 value: 25.868840609197637 - type: nauc_ndcg_at_1000_max value: 42.79564910784761 - type: nauc_ndcg_at_1000_std value: 27.035091271680113 - type: nauc_ndcg_at_100_diff1 value: 25.019789319579942 - type: nauc_ndcg_at_100_max value: 42.482345143533735 - type: nauc_ndcg_at_100_std value: 26.76872010731345 - type: nauc_ndcg_at_10_diff1 value: 25.949464660653238 - type: nauc_ndcg_at_10_max value: 40.79769544643906 - type: nauc_ndcg_at_10_std value: 22.486116508973204 - type: nauc_ndcg_at_1_diff1 value: 39.571969559016395 - type: nauc_ndcg_at_1_max value: 39.396249211263495 - type: nauc_ndcg_at_1_std value: 16.984149923258357 - type: nauc_ndcg_at_20_diff1 value: 25.173455685962214 - type: nauc_ndcg_at_20_max value: 40.88873540662413 - type: nauc_ndcg_at_20_std value: 24.4451041955519 - type: nauc_ndcg_at_3_diff1 value: 28.185416070726333 - type: nauc_ndcg_at_3_max value: 39.10600031163912 - type: nauc_ndcg_at_3_std value: 18.42694044215541 - type: nauc_ndcg_at_5_diff1 value: 27.112647584005583 - type: nauc_ndcg_at_5_max value: 40.154045682322526 - type: nauc_ndcg_at_5_std value: 20.26822517176828 - type: nauc_precision_at_1000_diff1 value: -16.42087927044017 - type: nauc_precision_at_1000_max value: 3.5326295053913 - type: nauc_precision_at_1000_std value: 24.406810708493197 - type: nauc_precision_at_100_diff1 value: -12.17648135724982 - type: nauc_precision_at_100_max value: 15.895489260126183 - type: nauc_precision_at_100_std value: 32.48346122610907 - type: nauc_precision_at_10_diff1 value: -1.2493131347748072 - type: nauc_precision_at_10_max value: 26.409459305604376 - type: nauc_precision_at_10_std value: 31.115432019300016 - type: nauc_precision_at_1_diff1 value: 39.571969559016395 - type: nauc_precision_at_1_max value: 39.396249211263495 - type: nauc_precision_at_1_std value: 16.984149923258357 - type: nauc_precision_at_20_diff1 value: -6.597509397240593 - type: nauc_precision_at_20_max value: 21.461984620659695 - type: nauc_precision_at_20_std value: 32.9450259748889 - type: nauc_precision_at_3_diff1 value: 9.46378764865453 - type: nauc_precision_at_3_max value: 32.03650819375425 - type: nauc_precision_at_3_std value: 26.489382638510765 - type: nauc_precision_at_5_diff1 value: 3.5987036728169537 - type: nauc_precision_at_5_max value: 30.633955978579703 - type: nauc_precision_at_5_std value: 30.532430088014443 - type: nauc_recall_at_1000_diff1 value: 10.714633106872254 - type: nauc_recall_at_1000_max value: 43.94958623961 - type: nauc_recall_at_1000_std value: 51.78914468954123 - type: nauc_recall_at_100_diff1 value: 9.63781472255557 - type: nauc_recall_at_100_max value: 38.50917465255336 - type: nauc_recall_at_100_std value: 37.78623984642377 - type: nauc_recall_at_10_diff1 value: 16.480342820841688 - type: nauc_recall_at_10_max value: 35.982566867357406 - type: nauc_recall_at_10_std value: 23.30688188788895 - type: nauc_recall_at_1_diff1 value: 43.34155892182403 - type: nauc_recall_at_1_max value: 38.23324890148018 - type: nauc_recall_at_1_std value: 6.0781444393516075 - type: nauc_recall_at_20_diff1 value: 13.521048985146367 - type: nauc_recall_at_20_max value: 34.62462209239834 - type: nauc_recall_at_20_std value: 27.85924191501618 - type: nauc_recall_at_3_diff1 value: 23.57032748533523 - type: nauc_recall_at_3_max value: 36.32703197635613 - type: nauc_recall_at_3_std value: 15.730238734014337 - type: nauc_recall_at_5_diff1 value: 19.61387036368584 - type: nauc_recall_at_5_max value: 36.22030835529556 - type: nauc_recall_at_5_std value: 19.76310648649897 - type: ndcg_at_1 value: 43.779 - type: ndcg_at_10 value: 43.525999999999996 - type: ndcg_at_100 value: 50.138000000000005 - type: ndcg_at_1000 value: 52.991 - type: ndcg_at_20 value: 46.083 - type: ndcg_at_3 value: 38.002 - type: ndcg_at_5 value: 39.842 - type: precision_at_1 value: 43.779 - type: precision_at_10 value: 13.205 - type: precision_at_100 value: 2.051 - type: precision_at_1000 value: 0.259 - type: precision_at_20 value: 7.722999999999999 - type: precision_at_3 value: 28.903000000000002 - type: precision_at_5 value: 21.368000000000002 - type: recall_at_1 value: 19.291 - type: recall_at_10 value: 48.754 - type: recall_at_100 value: 70.97200000000001 - type: recall_at_1000 value: 86.611 - type: recall_at_20 value: 55.884 - type: recall_at_3 value: 34.101 - type: recall_at_5 value: 40.784 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 49.884 - type: map_at_1 value: 9.913 - type: map_at_10 value: 23.186999999999998 - type: map_at_100 value: 34.207 - type: map_at_1000 value: 36.318 - type: map_at_20 value: 27.419 - type: map_at_3 value: 15.656 - type: map_at_5 value: 18.945999999999998 - type: mrr_at_1 value: 75.75 - type: mrr_at_10 value: 82.16279761904761 - type: mrr_at_100 value: 82.48445635330299 - type: mrr_at_1000 value: 82.4870246719901 - type: mrr_at_20 value: 82.36203632968338 - type: mrr_at_3 value: 81.29166666666666 - type: mrr_at_5 value: 82.02916666666667 - type: nauc_map_at_1000_diff1 value: 17.0739966990996 - type: nauc_map_at_1000_max value: 28.440065298437133 - type: nauc_map_at_1000_std value: 20.83498154003865 - type: nauc_map_at_100_diff1 value: 17.75982086107111 - type: nauc_map_at_100_max value: 26.87850835673573 - type: nauc_map_at_100_std value: 18.350282298599275 - type: nauc_map_at_10_diff1 value: 17.15984258564116 - type: nauc_map_at_10_max value: 10.846179132675553 - type: nauc_map_at_10_std value: -6.263534464094614 - type: nauc_map_at_1_diff1 value: 24.014897777973694 - type: nauc_map_at_1_max value: -4.556638938723358 - type: nauc_map_at_1_std value: -22.7844467526989 - type: nauc_map_at_20_diff1 value: 16.3179372493187 - type: nauc_map_at_20_max value: 17.176378915498915 - type: nauc_map_at_20_std value: 1.9378637630340372 - type: nauc_map_at_3_diff1 value: 19.12786794046792 - type: nauc_map_at_3_max value: 0.09063919305677291 - type: nauc_map_at_3_std value: -16.713143158330492 - type: nauc_map_at_5_diff1 value: 18.76504725420023 - type: nauc_map_at_5_max value: 5.040867712207419 - type: nauc_map_at_5_std value: -12.382578318931165 - type: nauc_mrr_at_1000_diff1 value: 54.61266255011247 - type: nauc_mrr_at_1000_max value: 60.83961280977112 - type: nauc_mrr_at_1000_std value: 32.70429260443016 - type: nauc_mrr_at_100_diff1 value: 54.61346236538542 - type: nauc_mrr_at_100_max value: 60.8407974416647 - type: nauc_mrr_at_100_std value: 32.69272843993462 - type: nauc_mrr_at_10_diff1 value: 54.74633685810871 - type: nauc_mrr_at_10_max value: 61.084525933097865 - type: nauc_mrr_at_10_std value: 33.001220210025565 - type: nauc_mrr_at_1_diff1 value: 56.12708423835806 - type: nauc_mrr_at_1_max value: 58.9314540998289 - type: nauc_mrr_at_1_std value: 27.39422607651012 - type: nauc_mrr_at_20_diff1 value: 54.58896150245695 - type: nauc_mrr_at_20_max value: 60.890929983464815 - type: nauc_mrr_at_20_std value: 32.65559641276393 - type: nauc_mrr_at_3_diff1 value: 54.38229071443791 - type: nauc_mrr_at_3_max value: 59.987849044098596 - type: nauc_mrr_at_3_std value: 33.439813880719974 - type: nauc_mrr_at_5_diff1 value: 54.961790262449824 - type: nauc_mrr_at_5_max value: 61.17705173908951 - type: nauc_mrr_at_5_std value: 33.30939850734856 - type: nauc_ndcg_at_1000_diff1 value: 29.27465932507067 - type: nauc_ndcg_at_1000_max value: 47.952543312315214 - type: nauc_ndcg_at_1000_std value: 36.17132236391485 - type: nauc_ndcg_at_100_diff1 value: 28.63072328980134 - type: nauc_ndcg_at_100_max value: 41.460833419186564 - type: nauc_ndcg_at_100_std value: 27.157100358988135 - type: nauc_ndcg_at_10_diff1 value: 23.41488013023301 - type: nauc_ndcg_at_10_max value: 39.27798133072349 - type: nauc_ndcg_at_10_std value: 21.979241438928312 - type: nauc_ndcg_at_1_diff1 value: 46.12120543657642 - type: nauc_ndcg_at_1_max value: 47.28452124039853 - type: nauc_ndcg_at_1_std value: 19.799884708952543 - type: nauc_ndcg_at_20_diff1 value: 23.627669045115574 - type: nauc_ndcg_at_20_max value: 35.88225062457673 - type: nauc_ndcg_at_20_std value: 18.218628030529498 - type: nauc_ndcg_at_3_diff1 value: 25.37309228946118 - type: nauc_ndcg_at_3_max value: 40.64426332992231 - type: nauc_ndcg_at_3_std value: 24.608330645901482 - type: nauc_ndcg_at_5_diff1 value: 24.055798594999654 - type: nauc_ndcg_at_5_max value: 41.16180524175431 - type: nauc_ndcg_at_5_std value: 24.048305528761315 - type: nauc_precision_at_1000_diff1 value: -18.234943251015576 - type: nauc_precision_at_1000_max value: 0.48708502364659184 - type: nauc_precision_at_1000_std value: 2.4473601543134027 - type: nauc_precision_at_100_diff1 value: -3.0077810947381227 - type: nauc_precision_at_100_max value: 25.27249321108913 - type: nauc_precision_at_100_std value: 37.36575792126928 - type: nauc_precision_at_10_diff1 value: -0.2393778190297635 - type: nauc_precision_at_10_max value: 36.40513293547299 - type: nauc_precision_at_10_std value: 37.4827885766009 - type: nauc_precision_at_1_diff1 value: 56.12708423835806 - type: nauc_precision_at_1_max value: 58.9314540998289 - type: nauc_precision_at_1_std value: 27.39422607651012 - type: nauc_precision_at_20_diff1 value: -1.2010133229402933 - type: nauc_precision_at_20_max value: 34.117541814385966 - type: nauc_precision_at_20_std value: 39.13273254177449 - type: nauc_precision_at_3_diff1 value: 11.757378092198486 - type: nauc_precision_at_3_max value: 42.637962482588875 - type: nauc_precision_at_3_std value: 37.42465077352342 - type: nauc_precision_at_5_diff1 value: 7.233177203405101 - type: nauc_precision_at_5_max value: 43.1663582897407 - type: nauc_precision_at_5_std value: 38.848449220750055 - type: nauc_recall_at_1000_diff1 value: 27.33938551969145 - type: nauc_recall_at_1000_max value: 45.5614254479334 - type: nauc_recall_at_1000_std value: 50.58528916250458 - type: nauc_recall_at_100_diff1 value: 23.610383761920097 - type: nauc_recall_at_100_max value: 31.422168485847184 - type: nauc_recall_at_100_std value: 25.58649926458304 - type: nauc_recall_at_10_diff1 value: 14.62495111808408 - type: nauc_recall_at_10_max value: 7.4295041277681095 - type: nauc_recall_at_10_std value: -9.32297089600654 - type: nauc_recall_at_1_diff1 value: 24.014897777973694 - type: nauc_recall_at_1_max value: -4.556638938723358 - type: nauc_recall_at_1_std value: -22.7844467526989 - type: nauc_recall_at_20_diff1 value: 14.027862330014662 - type: nauc_recall_at_20_max value: 12.437478731690844 - type: nauc_recall_at_20_std value: -3.0740743798103676 - type: nauc_recall_at_3_diff1 value: 16.354018356566712 - type: nauc_recall_at_3_max value: -2.9812231240997917 - type: nauc_recall_at_3_std value: -18.27746460743442 - type: nauc_recall_at_5_diff1 value: 16.81486583473587 - type: nauc_recall_at_5_max value: 2.420128513974744 - type: nauc_recall_at_5_std value: -14.441820321214108 - type: ndcg_at_1 value: 63.87500000000001 - type: ndcg_at_10 value: 49.884 - type: ndcg_at_100 value: 54.738 - type: ndcg_at_1000 value: 61.635 - type: ndcg_at_20 value: 48.894999999999996 - type: ndcg_at_3 value: 54.287 - type: ndcg_at_5 value: 52.40899999999999 - type: precision_at_1 value: 75.75 - type: precision_at_10 value: 40.9 - type: precision_at_100 value: 13.139999999999999 - type: precision_at_1000 value: 2.533 - type: precision_at_20 value: 30.8 - type: precision_at_3 value: 57.667 - type: precision_at_5 value: 51.05 - type: recall_at_1 value: 9.913 - type: recall_at_10 value: 28.591 - type: recall_at_100 value: 61.017999999999994 - type: recall_at_1000 value: 83.383 - type: recall_at_20 value: 37.834 - type: recall_at_3 value: 17.049 - type: recall_at_5 value: 21.685 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 78.77499999999999 - type: f1 value: 73.74058240799386 - type: f1_weighted value: 79.78804377638227 - type: main_score value: 78.77499999999999 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 90.986 - type: map_at_1 value: 81.601 - type: map_at_10 value: 88.242 - type: map_at_100 value: 88.46000000000001 - type: map_at_1000 value: 88.472 - type: map_at_20 value: 88.375 - type: map_at_3 value: 87.237 - type: map_at_5 value: 87.85300000000001 - type: mrr_at_1 value: 87.81878187818782 - type: mrr_at_10 value: 92.20301196786335 - type: mrr_at_100 value: 92.24884236673292 - type: mrr_at_1000 value: 92.2496338899362 - type: mrr_at_20 value: 92.23112073283473 - type: mrr_at_3 value: 91.77417741774165 - type: mrr_at_5 value: 92.03970397039689 - type: nauc_map_at_1000_diff1 value: 56.54670664910505 - type: nauc_map_at_1000_max value: 33.08375749975477 - type: nauc_map_at_1000_std value: 2.7491595418252865 - type: nauc_map_at_100_diff1 value: 56.50887688686924 - type: nauc_map_at_100_max value: 33.075487189958494 - type: nauc_map_at_100_std value: 2.7675869969253375 - type: nauc_map_at_10_diff1 value: 56.08080806610569 - type: nauc_map_at_10_max value: 32.776972098819066 - type: nauc_map_at_10_std value: 2.5904846711290097 - type: nauc_map_at_1_diff1 value: 60.645344065853145 - type: nauc_map_at_1_max value: 31.232776777514797 - type: nauc_map_at_1_std value: -1.1946138176109171 - type: nauc_map_at_20_diff1 value: 56.28378454162355 - type: nauc_map_at_20_max value: 32.98207150385811 - type: nauc_map_at_20_std value: 2.8469814040214025 - type: nauc_map_at_3_diff1 value: 55.81958007095375 - type: nauc_map_at_3_max value: 31.602707711038313 - type: nauc_map_at_3_std value: 0.8117019292273401 - type: nauc_map_at_5_diff1 value: 55.706025752316535 - type: nauc_map_at_5_max value: 32.16032683604737 - type: nauc_map_at_5_std value: 1.8853201503498669 - type: nauc_mrr_at_1000_diff1 value: 75.4997173366251 - type: nauc_mrr_at_1000_max value: 41.49117135484116 - type: nauc_mrr_at_1000_std value: -2.0636172883680852 - type: nauc_mrr_at_100_diff1 value: 75.50118860648519 - type: nauc_mrr_at_100_max value: 41.49490161517194 - type: nauc_mrr_at_100_std value: -2.057024385178682 - type: nauc_mrr_at_10_diff1 value: 75.47295153099428 - type: nauc_mrr_at_10_max value: 41.55003304042536 - type: nauc_mrr_at_10_std value: -2.0353663198929253 - type: nauc_mrr_at_1_diff1 value: 76.632058433229 - type: nauc_mrr_at_1_max value: 39.754483718891656 - type: nauc_mrr_at_1_std value: -2.962241058101701 - type: nauc_mrr_at_20_diff1 value: 75.47221882396194 - type: nauc_mrr_at_20_max value: 41.50779280480839 - type: nauc_mrr_at_20_std value: -1.9620212266426307 - type: nauc_mrr_at_3_diff1 value: 75.5682297897137 - type: nauc_mrr_at_3_max value: 41.53543801506081 - type: nauc_mrr_at_3_std value: -3.391681195945978 - type: nauc_mrr_at_5_diff1 value: 75.37562775183947 - type: nauc_mrr_at_5_max value: 41.42028509006753 - type: nauc_mrr_at_5_std value: -2.418698675622726 - type: nauc_ndcg_at_1000_diff1 value: 59.364557011624 - type: nauc_ndcg_at_1000_max value: 35.4112238125149 - type: nauc_ndcg_at_1000_std value: 3.717516193303376 - type: nauc_ndcg_at_100_diff1 value: 58.55706703023122 - type: nauc_ndcg_at_100_max value: 35.352285999934594 - type: nauc_ndcg_at_100_std value: 4.273437944266781 - type: nauc_ndcg_at_10_diff1 value: 56.77422701267037 - type: nauc_ndcg_at_10_max value: 34.24909893882957 - type: nauc_ndcg_at_10_std value: 4.178151434006727 - type: nauc_ndcg_at_1_diff1 value: 76.632058433229 - type: nauc_ndcg_at_1_max value: 39.754483718891656 - type: nauc_ndcg_at_1_std value: -2.962241058101701 - type: nauc_ndcg_at_20_diff1 value: 57.27343398231262 - type: nauc_ndcg_at_20_max value: 34.7416626740278 - type: nauc_ndcg_at_20_std value: 4.955858766014002 - type: nauc_ndcg_at_3_diff1 value: 57.69267803121093 - type: nauc_ndcg_at_3_max value: 33.13744317023105 - type: nauc_ndcg_at_3_std value: 0.40380284030057023 - type: nauc_ndcg_at_5_diff1 value: 56.57461019113917 - type: nauc_ndcg_at_5_max value: 33.244657840804386 - type: nauc_ndcg_at_5_std value: 2.5121440827702046 - type: nauc_precision_at_1000_diff1 value: -14.54492513449718 - type: nauc_precision_at_1000_max value: -5.94552147573623 - type: nauc_precision_at_1000_std value: 1.2446209816057374 - type: nauc_precision_at_100_diff1 value: -15.452676132568344 - type: nauc_precision_at_100_max value: -3.760241749847617 - type: nauc_precision_at_100_std value: 4.623534605290865 - type: nauc_precision_at_10_diff1 value: -12.712908026086176 - type: nauc_precision_at_10_max value: 0.45241316994816805 - type: nauc_precision_at_10_std value: 7.849478570138391 - type: nauc_precision_at_1_diff1 value: 76.632058433229 - type: nauc_precision_at_1_max value: 39.754483718891656 - type: nauc_precision_at_1_std value: -2.962241058101701 - type: nauc_precision_at_20_diff1 value: -14.514618673172041 - type: nauc_precision_at_20_max value: -1.113635490621818 - type: nauc_precision_at_20_std value: 8.599811730457576 - type: nauc_precision_at_3_diff1 value: 6.1367799850003815 - type: nauc_precision_at_3_max value: 8.466271950897857 - type: nauc_precision_at_3_std value: 1.7458051543195068 - type: nauc_precision_at_5_diff1 value: -5.804548945783379 - type: nauc_precision_at_5_max value: 3.4060251839074818 - type: nauc_precision_at_5_std value: 5.583410511782371 - type: nauc_recall_at_1000_diff1 value: 19.329432953574095 - type: nauc_recall_at_1000_max value: 43.260442595158736 - type: nauc_recall_at_1000_std value: 53.89644660661804 - type: nauc_recall_at_100_diff1 value: 21.265326296051235 - type: nauc_recall_at_100_max value: 38.573000195373695 - type: nauc_recall_at_100_std value: 42.169391082152785 - type: nauc_recall_at_10_diff1 value: 29.785129558987432 - type: nauc_recall_at_10_max value: 28.379657867558034 - type: nauc_recall_at_10_std value: 21.132574624091973 - type: nauc_recall_at_1_diff1 value: 60.645344065853145 - type: nauc_recall_at_1_max value: 31.232776777514797 - type: nauc_recall_at_1_std value: -1.1946138176109171 - type: nauc_recall_at_20_diff1 value: 25.88845612373954 - type: nauc_recall_at_20_max value: 30.24785945821152 - type: nauc_recall_at_20_std value: 31.73911437468067 - type: nauc_recall_at_3_diff1 value: 42.2968464797395 - type: nauc_recall_at_3_max value: 26.494318009870018 - type: nauc_recall_at_3_std value: 2.6045977160467544 - type: nauc_recall_at_5_diff1 value: 35.81340094401374 - type: nauc_recall_at_5_max value: 25.91082947510634 - type: nauc_recall_at_5_std value: 9.759404930864779 - type: ndcg_at_1 value: 87.819 - type: ndcg_at_10 value: 90.986 - type: ndcg_at_100 value: 91.69 - type: ndcg_at_1000 value: 91.863 - type: ndcg_at_20 value: 91.293 - type: ndcg_at_3 value: 89.621 - type: ndcg_at_5 value: 90.333 - type: precision_at_1 value: 87.819 - type: precision_at_10 value: 10.753 - type: precision_at_100 value: 1.138 - type: precision_at_1000 value: 0.117 - type: precision_at_20 value: 5.4879999999999995 - type: precision_at_3 value: 33.703 - type: precision_at_5 value: 20.831 - type: recall_at_1 value: 81.601 - type: recall_at_10 value: 95.44200000000001 - type: recall_at_100 value: 98.14399999999999 - type: recall_at_1000 value: 99.157 - type: recall_at_20 value: 96.43 - type: recall_at_3 value: 91.729 - type: recall_at_5 value: 93.552 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 56.056 - type: map_at_1 value: 28.666000000000004 - type: map_at_10 value: 47.437000000000005 - type: map_at_100 value: 49.537 - type: map_at_1000 value: 49.665 - type: map_at_20 value: 48.618 - type: map_at_3 value: 41.355 - type: map_at_5 value: 44.525 - type: mrr_at_1 value: 55.55555555555556 - type: mrr_at_10 value: 63.705173427395614 - type: mrr_at_100 value: 64.25449940779741 - type: mrr_at_1000 value: 64.27635581092147 - type: mrr_at_20 value: 64.03796029079103 - type: mrr_at_3 value: 61.49691358024688 - type: mrr_at_5 value: 62.73148148148143 - type: nauc_map_at_1000_diff1 value: 43.24282910397747 - type: nauc_map_at_1000_max value: 28.506093180265644 - type: nauc_map_at_1000_std value: -13.040508386155054 - type: nauc_map_at_100_diff1 value: 43.23650442904607 - type: nauc_map_at_100_max value: 28.470565635459156 - type: nauc_map_at_100_std value: -12.988098780714935 - type: nauc_map_at_10_diff1 value: 43.393840733087686 - type: nauc_map_at_10_max value: 26.637302062720153 - type: nauc_map_at_10_std value: -14.47500292113762 - type: nauc_map_at_1_diff1 value: 47.705150227211725 - type: nauc_map_at_1_max value: 15.354189686550129 - type: nauc_map_at_1_std value: -14.559819859039067 - type: nauc_map_at_20_diff1 value: 43.14121075706104 - type: nauc_map_at_20_max value: 27.811170590408395 - type: nauc_map_at_20_std value: -13.459413585283583 - type: nauc_map_at_3_diff1 value: 44.33938667720801 - type: nauc_map_at_3_max value: 21.785619884549398 - type: nauc_map_at_3_std value: -15.569980103071593 - type: nauc_map_at_5_diff1 value: 43.39280905665027 - type: nauc_map_at_5_max value: 25.021492190645017 - type: nauc_map_at_5_std value: -14.48856622187443 - type: nauc_mrr_at_1000_diff1 value: 52.971563939946286 - type: nauc_mrr_at_1000_max value: 38.88019486172324 - type: nauc_mrr_at_1000_std value: -12.412991642381616 - type: nauc_mrr_at_100_diff1 value: 52.978468139876945 - type: nauc_mrr_at_100_max value: 38.89751787948751 - type: nauc_mrr_at_100_std value: -12.3677876252269 - type: nauc_mrr_at_10_diff1 value: 52.78507148048174 - type: nauc_mrr_at_10_max value: 38.55079809310022 - type: nauc_mrr_at_10_std value: -12.944127025078755 - type: nauc_mrr_at_1_diff1 value: 55.52626805861546 - type: nauc_mrr_at_1_max value: 40.49306809164979 - type: nauc_mrr_at_1_std value: -12.886607701317681 - type: nauc_mrr_at_20_diff1 value: 52.9592152665678 - type: nauc_mrr_at_20_max value: 38.88514014589964 - type: nauc_mrr_at_20_std value: -12.434464359819444 - type: nauc_mrr_at_3_diff1 value: 52.73696844091174 - type: nauc_mrr_at_3_max value: 38.61018727252859 - type: nauc_mrr_at_3_std value: -13.123989867364166 - type: nauc_mrr_at_5_diff1 value: 53.037110010188 - type: nauc_mrr_at_5_max value: 38.44770729849151 - type: nauc_mrr_at_5_std value: -13.49318771828972 - type: nauc_ndcg_at_1000_diff1 value: 44.73813840091289 - type: nauc_ndcg_at_1000_max value: 33.70113904685389 - type: nauc_ndcg_at_1000_std value: -10.328687058192742 - type: nauc_ndcg_at_100_diff1 value: 44.595174119928835 - type: nauc_ndcg_at_100_max value: 33.4788285112467 - type: nauc_ndcg_at_100_std value: -8.695355259716946 - type: nauc_ndcg_at_10_diff1 value: 44.39837225263 - type: nauc_ndcg_at_10_max value: 29.188289725593393 - type: nauc_ndcg_at_10_std value: -13.67608323673103 - type: nauc_ndcg_at_1_diff1 value: 55.52626805861546 - type: nauc_ndcg_at_1_max value: 40.49306809164979 - type: nauc_ndcg_at_1_std value: -12.886607701317681 - type: nauc_ndcg_at_20_diff1 value: 44.24661739902305 - type: nauc_ndcg_at_20_max value: 31.667868318249965 - type: nauc_ndcg_at_20_std value: -10.65470780066342 - type: nauc_ndcg_at_3_diff1 value: 43.39857166975522 - type: nauc_ndcg_at_3_max value: 31.764668313577495 - type: nauc_ndcg_at_3_std value: -14.494866954678152 - type: nauc_ndcg_at_5_diff1 value: 43.16976647347281 - type: nauc_ndcg_at_5_max value: 29.878329062643143 - type: nauc_ndcg_at_5_std value: -13.987689089179739 - type: nauc_precision_at_1000_diff1 value: -9.807973252625484 - type: nauc_precision_at_1000_max value: 26.6279603849494 - type: nauc_precision_at_1000_std value: 7.113187103520632 - type: nauc_precision_at_100_diff1 value: -4.777149603323976 - type: nauc_precision_at_100_max value: 31.03410463692187 - type: nauc_precision_at_100_std value: 10.463144150275435 - type: nauc_precision_at_10_diff1 value: 8.691528703215962 - type: nauc_precision_at_10_max value: 33.329579434123374 - type: nauc_precision_at_10_std value: -0.8002015226329403 - type: nauc_precision_at_1_diff1 value: 55.52626805861546 - type: nauc_precision_at_1_max value: 40.49306809164979 - type: nauc_precision_at_1_std value: -12.886607701317681 - type: nauc_precision_at_20_diff1 value: 3.4564653474184284 - type: nauc_precision_at_20_max value: 34.401070158471136 - type: nauc_precision_at_20_std value: 5.813431200164549 - type: nauc_precision_at_3_diff1 value: 22.463219705462187 - type: nauc_precision_at_3_max value: 34.77413976546924 - type: nauc_precision_at_3_std value: -7.083890789741479 - type: nauc_precision_at_5_diff1 value: 14.011006004883154 - type: nauc_precision_at_5_max value: 35.73655466853702 - type: nauc_precision_at_5_std value: -2.8395172077771598 - type: nauc_recall_at_1000_diff1 value: 16.478046357391555 - type: nauc_recall_at_1000_max value: 43.231704288282344 - type: nauc_recall_at_1000_std value: 38.430684937573645 - type: nauc_recall_at_100_diff1 value: 30.764718344602436 - type: nauc_recall_at_100_max value: 31.769050487166655 - type: nauc_recall_at_100_std value: 23.48468311677149 - type: nauc_recall_at_10_diff1 value: 34.47339565324045 - type: nauc_recall_at_10_max value: 19.054212335800454 - type: nauc_recall_at_10_std value: -11.039734015330437 - type: nauc_recall_at_1_diff1 value: 47.705150227211725 - type: nauc_recall_at_1_max value: 15.354189686550129 - type: nauc_recall_at_1_std value: -14.559819859039067 - type: nauc_recall_at_20_diff1 value: 32.1011474016873 - type: nauc_recall_at_20_max value: 25.546372988304423 - type: nauc_recall_at_20_std value: -0.007233471152482897 - type: nauc_recall_at_3_diff1 value: 37.5708138019065 - type: nauc_recall_at_3_max value: 16.66410785756736 - type: nauc_recall_at_3_std value: -15.404817020108966 - type: nauc_recall_at_5_diff1 value: 35.714519648479595 - type: nauc_recall_at_5_max value: 19.02075233009296 - type: nauc_recall_at_5_std value: -13.180963359760725 - type: ndcg_at_1 value: 55.556000000000004 - type: ndcg_at_10 value: 56.056 - type: ndcg_at_100 value: 62.44 - type: ndcg_at_1000 value: 64.263 - type: ndcg_at_20 value: 58.638999999999996 - type: ndcg_at_3 value: 51.722 - type: ndcg_at_5 value: 52.701 - type: precision_at_1 value: 55.556000000000004 - type: precision_at_10 value: 15.679000000000002 - type: precision_at_100 value: 2.252 - type: precision_at_1000 value: 0.257 - type: precision_at_20 value: 9.02 - type: precision_at_3 value: 34.619 - type: precision_at_5 value: 25.093 - type: recall_at_1 value: 28.666000000000004 - type: recall_at_10 value: 63.717999999999996 - type: recall_at_100 value: 86.938 - type: recall_at_1000 value: 97.603 - type: recall_at_20 value: 71.649 - type: recall_at_3 value: 46.663 - type: recall_at_5 value: 53.313 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 71.74199999999999 - type: map_at_1 value: 41.729 - type: map_at_10 value: 63.168 - type: map_at_100 value: 64.132 - type: map_at_1000 value: 64.199 - type: map_at_20 value: 63.736000000000004 - type: map_at_3 value: 59.826 - type: map_at_5 value: 61.882000000000005 - type: mrr_at_1 value: 83.45712356515868 - type: mrr_at_10 value: 87.850342432719 - type: mrr_at_100 value: 88.0016320691113 - type: mrr_at_1000 value: 88.00576596968136 - type: mrr_at_20 value: 87.94463253190389 - type: mrr_at_3 value: 87.13706954760278 - type: mrr_at_5 value: 87.59419311276136 - type: nauc_map_at_1000_diff1 value: 13.635446621095054 - type: nauc_map_at_1000_max value: 18.670632529445633 - type: nauc_map_at_1000_std value: 10.444842636150575 - type: nauc_map_at_100_diff1 value: 13.599262398010783 - type: nauc_map_at_100_max value: 18.636389405484806 - type: nauc_map_at_100_std value: 10.460027483576043 - type: nauc_map_at_10_diff1 value: 13.235053919323942 - type: nauc_map_at_10_max value: 18.252140477080047 - type: nauc_map_at_10_std value: 9.9075337042203 - type: nauc_map_at_1_diff1 value: 76.51940497836482 - type: nauc_map_at_1_max value: 51.251419487235474 - type: nauc_map_at_1_std value: 0.16714896857146574 - type: nauc_map_at_20_diff1 value: 13.4178245722222 - type: nauc_map_at_20_max value: 18.40988771210718 - type: nauc_map_at_20_std value: 10.216685163366282 - type: nauc_map_at_3_diff1 value: 13.38370761663418 - type: nauc_map_at_3_max value: 17.760962555456537 - type: nauc_map_at_3_std value: 7.15741965624388 - type: nauc_map_at_5_diff1 value: 13.138133309724855 - type: nauc_map_at_5_max value: 17.871761295251044 - type: nauc_map_at_5_std value: 8.475147426940074 - type: nauc_mrr_at_1000_diff1 value: 75.82650818891959 - type: nauc_mrr_at_1000_max value: 53.6736100668434 - type: nauc_mrr_at_1000_std value: 1.8025016349213916 - type: nauc_mrr_at_100_diff1 value: 75.82530574210111 - type: nauc_mrr_at_100_max value: 53.68067545829002 - type: nauc_mrr_at_100_std value: 1.8147470536495791 - type: nauc_mrr_at_10_diff1 value: 75.8330135686799 - type: nauc_mrr_at_10_max value: 53.78626885349077 - type: nauc_mrr_at_10_std value: 1.7975782717226636 - type: nauc_mrr_at_1_diff1 value: 76.51940497836482 - type: nauc_mrr_at_1_max value: 51.251419487235474 - type: nauc_mrr_at_1_std value: 0.16714896857146574 - type: nauc_mrr_at_20_diff1 value: 75.82783382464166 - type: nauc_mrr_at_20_max value: 53.68364567043885 - type: nauc_mrr_at_20_std value: 1.742037904463963 - type: nauc_mrr_at_3_diff1 value: 75.6944609768663 - type: nauc_mrr_at_3_max value: 53.803941340341666 - type: nauc_mrr_at_3_std value: 1.1849945458077804 - type: nauc_mrr_at_5_diff1 value: 75.73006960604903 - type: nauc_mrr_at_5_max value: 53.62223096420106 - type: nauc_mrr_at_5_std value: 1.6144067563410909 - type: nauc_ndcg_at_1000_diff1 value: 21.58025241642726 - type: nauc_ndcg_at_1000_max value: 24.675747527001153 - type: nauc_ndcg_at_1000_std value: 13.075943547492718 - type: nauc_ndcg_at_100_diff1 value: 20.30260137544846 - type: nauc_ndcg_at_100_max value: 23.757528813872018 - type: nauc_ndcg_at_100_std value: 13.648994687574062 - type: nauc_ndcg_at_10_diff1 value: 18.995052360997818 - type: nauc_ndcg_at_10_max value: 22.254260808196037 - type: nauc_ndcg_at_10_std value: 11.27212390633054 - type: nauc_ndcg_at_1_diff1 value: 76.51940497836482 - type: nauc_ndcg_at_1_max value: 51.251419487235474 - type: nauc_ndcg_at_1_std value: 0.16714896857146574 - type: nauc_ndcg_at_20_diff1 value: 19.333742380695757 - type: nauc_ndcg_at_20_max value: 22.527779834633364 - type: nauc_ndcg_at_20_std value: 12.161009000707917 - type: nauc_ndcg_at_3_diff1 value: 20.013329040965534 - type: nauc_ndcg_at_3_max value: 21.99692460311921 - type: nauc_ndcg_at_3_std value: 6.8076290638386165 - type: nauc_ndcg_at_5_diff1 value: 19.08226315942471 - type: nauc_ndcg_at_5_max value: 21.71185964294168 - type: nauc_ndcg_at_5_std value: 8.671911269518214 - type: nauc_precision_at_1000_diff1 value: 2.4462475489446764 - type: nauc_precision_at_1000_max value: 29.145662064268578 - type: nauc_precision_at_1000_std value: 49.20704909525856 - type: nauc_precision_at_100_diff1 value: 0.11271196725540299 - type: nauc_precision_at_100_max value: 17.37584606388067 - type: nauc_precision_at_100_std value: 34.66099346244071 - type: nauc_precision_at_10_diff1 value: 2.9923183951227825 - type: nauc_precision_at_10_max value: 14.261884731124264 - type: nauc_precision_at_10_std value: 18.084188795498378 - type: nauc_precision_at_1_diff1 value: 76.51940497836482 - type: nauc_precision_at_1_max value: 51.251419487235474 - type: nauc_precision_at_1_std value: 0.16714896857146574 - type: nauc_precision_at_20_diff1 value: 1.9180293008303761 - type: nauc_precision_at_20_max value: 13.832269193468512 - type: nauc_precision_at_20_std value: 21.65284406055607 - type: nauc_precision_at_3_diff1 value: 7.226609484731811 - type: nauc_precision_at_3_max value: 15.162908526977272 - type: nauc_precision_at_3_std value: 8.451859972962776 - type: nauc_precision_at_5_diff1 value: 4.705236845538159 - type: nauc_precision_at_5_max value: 14.022910843582666 - type: nauc_precision_at_5_std value: 11.777269322821605 - type: nauc_recall_at_1000_diff1 value: 2.446247548945172 - type: nauc_recall_at_1000_max value: 29.14566206426889 - type: nauc_recall_at_1000_std value: 49.20704909525879 - type: nauc_recall_at_100_diff1 value: 0.1127119672553316 - type: nauc_recall_at_100_max value: 17.37584606388062 - type: nauc_recall_at_100_std value: 34.660993462440686 - type: nauc_recall_at_10_diff1 value: 2.9923183951227927 - type: nauc_recall_at_10_max value: 14.261884731124299 - type: nauc_recall_at_10_std value: 18.08418879549837 - type: nauc_recall_at_1_diff1 value: 76.51940497836482 - type: nauc_recall_at_1_max value: 51.251419487235474 - type: nauc_recall_at_1_std value: 0.16714896857146574 - type: nauc_recall_at_20_diff1 value: 1.918029300830432 - type: nauc_recall_at_20_max value: 13.832269193468566 - type: nauc_recall_at_20_std value: 21.65284406055605 - type: nauc_recall_at_3_diff1 value: 7.226609484731802 - type: nauc_recall_at_3_max value: 15.162908526977182 - type: nauc_recall_at_3_std value: 8.451859972962634 - type: nauc_recall_at_5_diff1 value: 4.705236845538197 - type: nauc_recall_at_5_max value: 14.02291084358265 - type: nauc_recall_at_5_std value: 11.777269322821638 - type: ndcg_at_1 value: 83.45700000000001 - type: ndcg_at_10 value: 71.74199999999999 - type: ndcg_at_100 value: 75.008 - type: ndcg_at_1000 value: 76.242 - type: ndcg_at_20 value: 73.114 - type: ndcg_at_3 value: 67.128 - type: ndcg_at_5 value: 69.645 - type: precision_at_1 value: 83.45700000000001 - type: precision_at_10 value: 14.747 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.189 - type: precision_at_20 value: 7.8149999999999995 - type: precision_at_3 value: 42.323 - type: precision_at_5 value: 27.381 - type: recall_at_1 value: 41.729 - type: recall_at_10 value: 73.734 - type: recall_at_100 value: 86.502 - type: recall_at_1000 value: 94.60499999999999 - type: recall_at_20 value: 78.14999999999999 - type: recall_at_3 value: 63.483999999999995 - type: recall_at_5 value: 68.45400000000001 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.4904 - type: ap value: 94.85481918794709 - type: ap_weighted value: 94.85481918794709 - type: f1 value: 96.4898592305707 - type: f1_weighted value: 96.4898592305707 - type: main_score value: 96.4904 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 43.692 - type: map_at_1 value: 23.751 - type: map_at_10 value: 36.553999999999995 - type: map_at_100 value: 37.721 - type: map_at_1000 value: 37.763999999999996 - type: map_at_20 value: 37.289 - type: map_at_3 value: 32.643 - type: map_at_5 value: 34.851 - type: mrr_at_1 value: 24.455587392550143 - type: mrr_at_10 value: 37.18388706963206 - type: mrr_at_100 value: 38.28330737932916 - type: mrr_at_1000 value: 38.32054399710817 - type: mrr_at_20 value: 37.8818001216278 - type: mrr_at_3 value: 33.35721107927405 - type: mrr_at_5 value: 35.52483285577843 - type: nauc_map_at_1000_diff1 value: 36.3576177260684 - type: nauc_map_at_1000_max value: 7.854511605962703 - type: nauc_map_at_1000_std value: -17.701121059746878 - type: nauc_map_at_100_diff1 value: 36.356075649230505 - type: nauc_map_at_100_max value: 7.862168042999533 - type: nauc_map_at_100_std value: -17.670102459097233 - type: nauc_map_at_10_diff1 value: 36.22122978875574 - type: nauc_map_at_10_max value: 7.80848606967416 - type: nauc_map_at_10_std value: -18.3265151386167 - type: nauc_map_at_1_diff1 value: 39.28605466408357 - type: nauc_map_at_1_max value: 6.20202977590459 - type: nauc_map_at_1_std value: -15.734334090045026 - type: nauc_map_at_20_diff1 value: 36.33637880909657 - type: nauc_map_at_20_max value: 7.843437969476022 - type: nauc_map_at_20_std value: -17.917533363025996 - type: nauc_map_at_3_diff1 value: 36.24864976076741 - type: nauc_map_at_3_max value: 7.420345251835957 - type: nauc_map_at_3_std value: -18.71678497722944 - type: nauc_map_at_5_diff1 value: 36.0789619291824 - type: nauc_map_at_5_max value: 7.7314285669514495 - type: nauc_map_at_5_std value: -18.748688764538706 - type: nauc_mrr_at_1000_diff1 value: 36.23912675623378 - type: nauc_mrr_at_1000_max value: 7.690553436255147 - type: nauc_mrr_at_1000_std value: -17.609526070212304 - type: nauc_mrr_at_100_diff1 value: 36.23782651189002 - type: nauc_mrr_at_100_max value: 7.70075095171647 - type: nauc_mrr_at_100_std value: -17.575714144960184 - type: nauc_mrr_at_10_diff1 value: 36.125229472534215 - type: nauc_mrr_at_10_max value: 7.635472248755658 - type: nauc_mrr_at_10_std value: -18.208166616511086 - type: nauc_mrr_at_1_diff1 value: 39.20986875554532 - type: nauc_mrr_at_1_max value: 6.062668487561363 - type: nauc_mrr_at_1_std value: -16.04130340817602 - type: nauc_mrr_at_20_diff1 value: 36.21207088739667 - type: nauc_mrr_at_20_max value: 7.699610250145951 - type: nauc_mrr_at_20_std value: -17.778245221724028 - type: nauc_mrr_at_3_diff1 value: 36.03957583885305 - type: nauc_mrr_at_3_max value: 7.225515576504581 - type: nauc_mrr_at_3_std value: -18.74478742943741 - type: nauc_mrr_at_5_diff1 value: 35.969152496648974 - type: nauc_mrr_at_5_max value: 7.584059789018233 - type: nauc_mrr_at_5_std value: -18.569374723129332 - type: nauc_ndcg_at_1000_diff1 value: 35.894655529841806 - type: nauc_ndcg_at_1000_max value: 8.579327424366236 - type: nauc_ndcg_at_1000_std value: -16.359677367747896 - type: nauc_ndcg_at_100_diff1 value: 35.89861902483983 - type: nauc_ndcg_at_100_max value: 8.830873623962242 - type: nauc_ndcg_at_100_std value: -15.173125564722978 - type: nauc_ndcg_at_10_diff1 value: 35.36499811105169 - type: nauc_ndcg_at_10_max value: 8.449267180956992 - type: nauc_ndcg_at_10_std value: -18.41978802362402 - type: nauc_ndcg_at_1_diff1 value: 39.15422481210622 - type: nauc_ndcg_at_1_max value: 6.055515791928331 - type: nauc_ndcg_at_1_std value: -16.042779610876252 - type: nauc_ndcg_at_20_diff1 value: 35.73402868264468 - type: nauc_ndcg_at_20_max value: 8.695705518210847 - type: nauc_ndcg_at_20_std value: -16.7735829470466 - type: nauc_ndcg_at_3_diff1 value: 35.31358242856231 - type: nauc_ndcg_at_3_max value: 7.645692789058997 - type: nauc_ndcg_at_3_std value: -19.460003734786874 - type: nauc_ndcg_at_5_diff1 value: 35.05216588927143 - type: nauc_ndcg_at_5_max value: 8.216690520604715 - type: nauc_ndcg_at_5_std value: -19.3982054492159 - type: nauc_precision_at_1000_diff1 value: -4.440002625111349 - type: nauc_precision_at_1000_max value: 7.886988951901723 - type: nauc_precision_at_1000_std value: 9.88111187048247 - type: nauc_precision_at_100_diff1 value: 15.728286119463325 - type: nauc_precision_at_100_max value: 13.218650824470654 - type: nauc_precision_at_100_std value: 16.113245895522553 - type: nauc_precision_at_10_diff1 value: 29.51218489610567 - type: nauc_precision_at_10_max value: 10.197432401942912 - type: nauc_precision_at_10_std value: -16.950603431359493 - type: nauc_precision_at_1_diff1 value: 39.15422481210622 - type: nauc_precision_at_1_max value: 6.055515791928331 - type: nauc_precision_at_1_std value: -16.042779610876252 - type: nauc_precision_at_20_diff1 value: 27.825993070397338 - type: nauc_precision_at_20_max value: 11.437632287846007 - type: nauc_precision_at_20_std value: -7.450353566405601 - type: nauc_precision_at_3_diff1 value: 32.14135556796588 - type: nauc_precision_at_3_max value: 7.989252443574163 - type: nauc_precision_at_3_std value: -21.566254595671055 - type: nauc_precision_at_5_diff1 value: 30.68778685307082 - type: nauc_precision_at_5_max value: 9.332160758499892 - type: nauc_precision_at_5_std value: -20.928554713448914 - type: nauc_recall_at_1000_diff1 value: 25.00810478716878 - type: nauc_recall_at_1000_max value: 46.518165765201644 - type: nauc_recall_at_1000_std value: 61.4734635576085 - type: nauc_recall_at_100_diff1 value: 33.895581318261726 - type: nauc_recall_at_100_max value: 20.10706035872801 - type: nauc_recall_at_100_std value: 24.204226584457047 - type: nauc_recall_at_10_diff1 value: 32.363127359576296 - type: nauc_recall_at_10_max value: 10.729923804989545 - type: nauc_recall_at_10_std value: -18.1335370184202 - type: nauc_recall_at_1_diff1 value: 39.28605466408357 - type: nauc_recall_at_1_max value: 6.20202977590459 - type: nauc_recall_at_1_std value: -15.734334090045026 - type: nauc_recall_at_20_diff1 value: 33.47804003169795 - type: nauc_recall_at_20_max value: 12.781494765263382 - type: nauc_recall_at_20_std value: -9.263970132202658 - type: nauc_recall_at_3_diff1 value: 32.71001429428999 - type: nauc_recall_at_3_max value: 8.353439197382693 - type: nauc_recall_at_3_std value: -21.235097744366954 - type: nauc_recall_at_5_diff1 value: 31.87451464963415 - type: nauc_recall_at_5_max value: 9.635051450907305 - type: nauc_recall_at_5_std value: -21.113235357132794 - type: ndcg_at_1 value: 24.47 - type: ndcg_at_10 value: 43.692 - type: ndcg_at_100 value: 49.211 - type: ndcg_at_1000 value: 50.244 - type: ndcg_at_20 value: 46.278000000000006 - type: ndcg_at_3 value: 35.719 - type: ndcg_at_5 value: 39.652 - type: precision_at_1 value: 24.47 - type: precision_at_10 value: 6.857 - type: precision_at_100 value: 0.9610000000000001 - type: precision_at_1000 value: 0.105 - type: precision_at_20 value: 3.968 - type: precision_at_3 value: 15.181000000000001 - type: precision_at_5 value: 11.117 - type: recall_at_1 value: 23.751 - type: recall_at_10 value: 65.64 - type: recall_at_100 value: 90.967 - type: recall_at_1000 value: 98.738 - type: recall_at_20 value: 75.639 - type: recall_at_3 value: 43.927 - type: recall_at_5 value: 53.366 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 98.82580939352485 - type: f1 value: 98.75201754333801 - type: f1_weighted value: 98.82795205108245 - type: main_score value: 98.82580939352485 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 92.29822161422709 - type: f1 value: 77.75210224871594 - type: f1_weighted value: 93.58661422540348 - type: main_score value: 92.29822161422709 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 85.17484868863484 - type: f1 value: 81.94484244487094 - type: f1_weighted value: 85.21022593423332 - type: main_score value: 85.17484868863484 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 89.61667787491594 - type: f1 value: 89.02701927621264 - type: f1_weighted value: 89.56306982022801 - type: main_score value: 89.61667787491594 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 46.318282423948574 - type: v_measure value: 46.318282423948574 - type: v_measure_std value: 0.9729055662461538 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 44.29033625273981 - type: v_measure value: 44.29033625273981 - type: v_measure_std value: 1.0596383629128594 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 33.0526129239962 - type: map value: 33.0526129239962 - type: mrr value: 34.29260046890935 - type: nAUC_map_diff1 value: 12.579738077238032 - type: nAUC_map_max value: -20.936629344962 - type: nAUC_map_std value: -1.6096805784945216 - type: nAUC_mrr_diff1 value: 11.597584463580807 - type: nAUC_mrr_max value: -15.723702838537504 - type: nAUC_mrr_std value: 0.2719172965777737 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 41.486000000000004 - type: map_at_1 value: 6.866 - type: map_at_10 value: 15.895999999999999 - type: map_at_100 value: 21.093 - type: map_at_1000 value: 23.067 - type: map_at_20 value: 18.125 - type: map_at_3 value: 11.421000000000001 - type: map_at_5 value: 13.415 - type: mrr_at_1 value: 52.63157894736842 - type: mrr_at_10 value: 61.486805248415166 - type: mrr_at_100 value: 62.08211009182091 - type: mrr_at_1000 value: 62.10828701365016 - type: mrr_at_20 value: 61.904411187915784 - type: mrr_at_3 value: 59.90712074303407 - type: mrr_at_5 value: 60.91331269349847 - type: nauc_map_at_1000_diff1 value: 25.484625278529403 - type: nauc_map_at_1000_max value: 31.206600396418853 - type: nauc_map_at_1000_std value: 15.569448072357156 - type: nauc_map_at_100_diff1 value: 27.636750226316764 - type: nauc_map_at_100_max value: 29.66992681250722 - type: nauc_map_at_100_std value: 10.570600484002671 - type: nauc_map_at_10_diff1 value: 32.76642525548697 - type: nauc_map_at_10_max value: 21.459225397237663 - type: nauc_map_at_10_std value: -3.546494734209264 - type: nauc_map_at_1_diff1 value: 48.8002894871328 - type: nauc_map_at_1_max value: 5.7236722609868815 - type: nauc_map_at_1_std value: -13.283554044471352 - type: nauc_map_at_20_diff1 value: 30.57169701502308 - type: nauc_map_at_20_max value: 25.79666139518404 - type: nauc_map_at_20_std value: 1.781732492989651 - type: nauc_map_at_3_diff1 value: 40.076315947201095 - type: nauc_map_at_3_max value: 12.862524429140054 - type: nauc_map_at_3_std value: -9.188349777126817 - type: nauc_map_at_5_diff1 value: 36.9918718052938 - type: nauc_map_at_5_max value: 16.74234374361876 - type: nauc_map_at_5_std value: -7.818523349307494 - type: nauc_mrr_at_1000_diff1 value: 26.88183002609805 - type: nauc_mrr_at_1000_max value: 47.10209348428658 - type: nauc_mrr_at_1000_std value: 32.067825924992924 - type: nauc_mrr_at_100_diff1 value: 26.871482491566745 - type: nauc_mrr_at_100_max value: 47.11303868498556 - type: nauc_mrr_at_100_std value: 32.08961428818868 - type: nauc_mrr_at_10_diff1 value: 26.6356914977722 - type: nauc_mrr_at_10_max value: 47.091624558810366 - type: nauc_mrr_at_10_std value: 31.942424120660164 - type: nauc_mrr_at_1_diff1 value: 28.19774198483673 - type: nauc_mrr_at_1_max value: 41.44380927834253 - type: nauc_mrr_at_1_std value: 25.18222691885917 - type: nauc_mrr_at_20_diff1 value: 26.86487347109452 - type: nauc_mrr_at_20_max value: 47.1987778214726 - type: nauc_mrr_at_20_std value: 32.143517921610034 - type: nauc_mrr_at_3_diff1 value: 27.34340373236422 - type: nauc_mrr_at_3_max value: 46.358726506276646 - type: nauc_mrr_at_3_std value: 31.74924155572593 - type: nauc_mrr_at_5_diff1 value: 27.209667205060672 - type: nauc_mrr_at_5_max value: 46.79883369072009 - type: nauc_mrr_at_5_std value: 31.655605306670758 - type: nauc_ndcg_at_1000_diff1 value: 18.940195769769687 - type: nauc_ndcg_at_1000_max value: 46.48551313937331 - type: nauc_ndcg_at_1000_std value: 33.64819502089232 - type: nauc_ndcg_at_100_diff1 value: 19.50885253809146 - type: nauc_ndcg_at_100_max value: 40.53174462354878 - type: nauc_ndcg_at_100_std value: 28.516152877751118 - type: nauc_ndcg_at_10_diff1 value: 16.01699218096564 - type: nauc_ndcg_at_10_max value: 41.17322878314514 - type: nauc_ndcg_at_10_std value: 29.002233224832196 - type: nauc_ndcg_at_1_diff1 value: 27.443547710102205 - type: nauc_ndcg_at_1_max value: 40.66529763309582 - type: nauc_ndcg_at_1_std value: 24.15016766225869 - type: nauc_ndcg_at_20_diff1 value: 17.541197675685062 - type: nauc_ndcg_at_20_max value: 40.53231266973844 - type: nauc_ndcg_at_20_std value: 29.54096347876548 - type: nauc_ndcg_at_3_diff1 value: 18.649628357473716 - type: nauc_ndcg_at_3_max value: 41.18603570171764 - type: nauc_ndcg_at_3_std value: 27.125524188420396 - type: nauc_ndcg_at_5_diff1 value: 17.519593751448483 - type: nauc_ndcg_at_5_max value: 42.715997890377345 - type: nauc_ndcg_at_5_std value: 27.902627839899868 - type: nauc_precision_at_1000_diff1 value: -15.528797630565155 - type: nauc_precision_at_1000_max value: 13.741640921778671 - type: nauc_precision_at_1000_std value: 44.50896053788372 - type: nauc_precision_at_100_diff1 value: -14.491464489721887 - type: nauc_precision_at_100_max value: 23.136434418999457 - type: nauc_precision_at_100_std value: 49.73145147863128 - type: nauc_precision_at_10_diff1 value: -4.829188942994277 - type: nauc_precision_at_10_max value: 40.327612559528866 - type: nauc_precision_at_10_std value: 39.34919529635044 - type: nauc_precision_at_1_diff1 value: 28.19774198483673 - type: nauc_precision_at_1_max value: 41.44380927834253 - type: nauc_precision_at_1_std value: 25.18222691885917 - type: nauc_precision_at_20_diff1 value: -7.210726293112847 - type: nauc_precision_at_20_max value: 37.195679576636984 - type: nauc_precision_at_20_std value: 45.4597096418357 - type: nauc_precision_at_3_diff1 value: 7.578219537774854 - type: nauc_precision_at_3_max value: 41.59775233475654 - type: nauc_precision_at_3_std value: 30.764584790895118 - type: nauc_precision_at_5_diff1 value: 1.655451789039598 - type: nauc_precision_at_5_max value: 43.435739407610455 - type: nauc_precision_at_5_std value: 33.42552263325999 - type: nauc_recall_at_1000_diff1 value: 5.030705700690516 - type: nauc_recall_at_1000_max value: 19.108072570815583 - type: nauc_recall_at_1000_std value: 14.697734974217308 - type: nauc_recall_at_100_diff1 value: 14.746540318132407 - type: nauc_recall_at_100_max value: 21.798705033854795 - type: nauc_recall_at_100_std value: 11.416195108842587 - type: nauc_recall_at_10_diff1 value: 25.548642427860486 - type: nauc_recall_at_10_max value: 18.711677681987474 - type: nauc_recall_at_10_std value: -5.988904818971677 - type: nauc_recall_at_1_diff1 value: 48.8002894871328 - type: nauc_recall_at_1_max value: 5.7236722609868815 - type: nauc_recall_at_1_std value: -13.283554044471352 - type: nauc_recall_at_20_diff1 value: 23.39140739154809 - type: nauc_recall_at_20_max value: 19.351150636155474 - type: nauc_recall_at_20_std value: -2.757280266915132 - type: nauc_recall_at_3_diff1 value: 38.17453576012812 - type: nauc_recall_at_3_max value: 13.47003839643972 - type: nauc_recall_at_3_std value: -8.75780163862688 - type: nauc_recall_at_5_diff1 value: 33.02812855226899 - type: nauc_recall_at_5_max value: 15.477626408978477 - type: nauc_recall_at_5_std value: -9.072206441070708 - type: ndcg_at_1 value: 50.773999999999994 - type: ndcg_at_10 value: 41.486000000000004 - type: ndcg_at_100 value: 39.051 - type: ndcg_at_1000 value: 48.106 - type: ndcg_at_20 value: 39.432 - type: ndcg_at_3 value: 47.428 - type: ndcg_at_5 value: 45.227000000000004 - type: precision_at_1 value: 52.632 - type: precision_at_10 value: 31.146 - type: precision_at_100 value: 10.328 - type: precision_at_1000 value: 2.432 - type: precision_at_20 value: 23.793 - type: precision_at_3 value: 45.201 - type: precision_at_5 value: 39.876 - type: recall_at_1 value: 6.866 - type: recall_at_10 value: 20.447000000000003 - type: recall_at_100 value: 40.607 - type: recall_at_1000 value: 73.411 - type: recall_at_20 value: 26.082 - type: recall_at_3 value: 12.484 - type: recall_at_5 value: 15.847 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 69.072 - type: map_at_1 value: 45.483000000000004 - type: map_at_10 value: 62.050000000000004 - type: map_at_100 value: 62.693 - type: map_at_1000 value: 62.702999999999996 - type: map_at_20 value: 62.498 - type: map_at_3 value: 58.285 - type: map_at_5 value: 60.711000000000006 - type: mrr_at_1 value: 50.840092699884124 - type: mrr_at_10 value: 64.54635224116673 - type: mrr_at_100 value: 64.9526548702289 - type: mrr_at_1000 value: 64.95908460752281 - type: mrr_at_20 value: 64.82949565799959 - type: mrr_at_3 value: 61.89165701042856 - type: mrr_at_5 value: 63.632676709154026 - type: nauc_map_at_1000_diff1 value: 43.187285304185224 - type: nauc_map_at_1000_max value: 32.39921659632756 - type: nauc_map_at_1000_std value: -5.780901333066553 - type: nauc_map_at_100_diff1 value: 43.184487221204456 - type: nauc_map_at_100_max value: 32.41176116347982 - type: nauc_map_at_100_std value: -5.76422606662383 - type: nauc_map_at_10_diff1 value: 42.967066814031746 - type: nauc_map_at_10_max value: 32.489617364418514 - type: nauc_map_at_10_std value: -6.029045531102664 - type: nauc_map_at_1_diff1 value: 46.16376563218624 - type: nauc_map_at_1_max value: 26.342624776802232 - type: nauc_map_at_1_std value: -7.142171388751972 - type: nauc_map_at_20_diff1 value: 43.15894358608328 - type: nauc_map_at_20_max value: 32.46492198956245 - type: nauc_map_at_20_std value: -5.788373305449195 - type: nauc_map_at_3_diff1 value: 43.231752344608545 - type: nauc_map_at_3_max value: 31.68003009949564 - type: nauc_map_at_3_std value: -8.015235132765458 - type: nauc_map_at_5_diff1 value: 42.86197608819917 - type: nauc_map_at_5_max value: 32.363857571094485 - type: nauc_map_at_5_std value: -6.780487416387977 - type: nauc_mrr_at_1000_diff1 value: 43.40542912045782 - type: nauc_mrr_at_1000_max value: 32.8461770324533 - type: nauc_mrr_at_1000_std value: -3.6505425530008204 - type: nauc_mrr_at_100_diff1 value: 43.40233508014468 - type: nauc_mrr_at_100_max value: 32.85598538385942 - type: nauc_mrr_at_100_std value: -3.637477352635459 - type: nauc_mrr_at_10_diff1 value: 43.260179162806054 - type: nauc_mrr_at_10_max value: 32.942643527040474 - type: nauc_mrr_at_10_std value: -3.712052825320437 - type: nauc_mrr_at_1_diff1 value: 46.354919460881206 - type: nauc_mrr_at_1_max value: 29.1760258591106 - type: nauc_mrr_at_1_std value: -4.107225031227406 - type: nauc_mrr_at_20_diff1 value: 43.37092385434311 - type: nauc_mrr_at_20_max value: 32.93390254712846 - type: nauc_mrr_at_20_std value: -3.5719056112132006 - type: nauc_mrr_at_3_diff1 value: 43.1744474040527 - type: nauc_mrr_at_3_max value: 32.741290559777994 - type: nauc_mrr_at_3_std value: -4.72677925120697 - type: nauc_mrr_at_5_diff1 value: 43.108396819975674 - type: nauc_mrr_at_5_max value: 32.970519514893084 - type: nauc_mrr_at_5_std value: -4.090906158975974 - type: nauc_ndcg_at_1000_diff1 value: 42.786664193638714 - type: nauc_ndcg_at_1000_max value: 33.65554095609296 - type: nauc_ndcg_at_1000_std value: -4.024030130584482 - type: nauc_ndcg_at_100_diff1 value: 42.691246775210814 - type: nauc_ndcg_at_100_max value: 34.063232335110875 - type: nauc_ndcg_at_100_std value: -3.477813807415248 - type: nauc_ndcg_at_10_diff1 value: 41.90988990571757 - type: nauc_ndcg_at_10_max value: 34.58934812881633 - type: nauc_ndcg_at_10_std value: -4.3295110195497655 - type: nauc_ndcg_at_1_diff1 value: 46.354919460881206 - type: nauc_ndcg_at_1_max value: 29.1760258591106 - type: nauc_ndcg_at_1_std value: -4.107225031227406 - type: nauc_ndcg_at_20_diff1 value: 42.493206675867114 - type: nauc_ndcg_at_20_max value: 34.562441307459544 - type: nauc_ndcg_at_20_std value: -3.4456116866749107 - type: nauc_ndcg_at_3_diff1 value: 42.24180336502808 - type: nauc_ndcg_at_3_max value: 33.064267018100594 - type: nauc_ndcg_at_3_std value: -7.786248093572142 - type: nauc_ndcg_at_5_diff1 value: 41.692714787779565 - type: nauc_ndcg_at_5_max value: 34.20502498949156 - type: nauc_ndcg_at_5_std value: -5.979557859282785 - type: nauc_precision_at_1000_diff1 value: -13.779832506640702 - type: nauc_precision_at_1000_max value: 1.243001688631421 - type: nauc_precision_at_1000_std value: 17.351623398622323 - type: nauc_precision_at_100_diff1 value: -11.310526816290297 - type: nauc_precision_at_100_max value: 5.771669506192959 - type: nauc_precision_at_100_std value: 19.917795079540113 - type: nauc_precision_at_10_diff1 value: 2.163699384635286 - type: nauc_precision_at_10_max value: 19.66440698458386 - type: nauc_precision_at_10_std value: 13.689876348315726 - type: nauc_precision_at_1_diff1 value: 46.354919460881206 - type: nauc_precision_at_1_max value: 29.1760258591106 - type: nauc_precision_at_1_std value: -4.107225031227406 - type: nauc_precision_at_20_diff1 value: -3.038735879584471 - type: nauc_precision_at_20_max value: 14.132968299701695 - type: nauc_precision_at_20_std value: 17.78069734664346 - type: nauc_precision_at_3_diff1 value: 21.783760758070095 - type: nauc_precision_at_3_max value: 30.244127986404497 - type: nauc_precision_at_3_std value: -0.12411163467738723 - type: nauc_precision_at_5_diff1 value: 10.980635723302418 - type: nauc_precision_at_5_max value: 25.302293738975575 - type: nauc_precision_at_5_std value: 6.4740817488722024 - type: nauc_recall_at_1000_diff1 value: 34.10343772356593 - type: nauc_recall_at_1000_max value: 80.72497340357538 - type: nauc_recall_at_1000_std value: 69.54564103264093 - type: nauc_recall_at_100_diff1 value: 33.427719956774126 - type: nauc_recall_at_100_max value: 71.54086768335449 - type: nauc_recall_at_100_std value: 49.66157377654885 - type: nauc_recall_at_10_diff1 value: 33.70139560054039 - type: nauc_recall_at_10_max value: 45.47878072860151 - type: nauc_recall_at_10_std value: 1.4188516615716378 - type: nauc_recall_at_1_diff1 value: 46.16376563218624 - type: nauc_recall_at_1_max value: 26.342624776802232 - type: nauc_recall_at_1_std value: -7.142171388751972 - type: nauc_recall_at_20_diff1 value: 35.805379874970086 - type: nauc_recall_at_20_max value: 51.80479822253392 - type: nauc_recall_at_20_std value: 13.531467576460143 - type: nauc_recall_at_3_diff1 value: 37.288500141631616 - type: nauc_recall_at_3_max value: 35.07078243516728 - type: nauc_recall_at_3_std value: -10.452926441410405 - type: nauc_recall_at_5_diff1 value: 34.83186104526897 - type: nauc_recall_at_5_max value: 39.58488976496973 - type: nauc_recall_at_5_std value: -6.3049292065708835 - type: ndcg_at_1 value: 50.839999999999996 - type: ndcg_at_10 value: 69.072 - type: ndcg_at_100 value: 71.538 - type: ndcg_at_1000 value: 71.77799999999999 - type: ndcg_at_20 value: 70.41 - type: ndcg_at_3 value: 62.544999999999995 - type: ndcg_at_5 value: 66.33099999999999 - type: precision_at_1 value: 50.839999999999996 - type: precision_at_10 value: 10.495000000000001 - type: precision_at_100 value: 1.1900000000000002 - type: precision_at_1000 value: 0.121 - type: precision_at_20 value: 5.5809999999999995 - type: precision_at_3 value: 27.636 - type: precision_at_5 value: 18.864 - type: recall_at_1 value: 45.483000000000004 - type: recall_at_10 value: 87.483 - type: recall_at_100 value: 97.844 - type: recall_at_1000 value: 99.66199999999999 - type: recall_at_20 value: 92.294 - type: recall_at_3 value: 71.2 - type: recall_at_5 value: 79.753 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 89.58 - type: map_at_1 value: 71.819 - type: map_at_10 value: 86.04899999999999 - type: map_at_100 value: 86.648 - type: map_at_1000 value: 86.66199999999999 - type: map_at_20 value: 86.441 - type: map_at_3 value: 83.114 - type: map_at_5 value: 84.981 - type: mrr_at_1 value: 82.62 - type: mrr_at_10 value: 88.62899999999979 - type: mrr_at_100 value: 88.70918591324215 - type: mrr_at_1000 value: 88.70973091492397 - type: mrr_at_20 value: 88.68914765317221 - type: mrr_at_3 value: 87.74999999999979 - type: mrr_at_5 value: 88.36799999999974 - type: nauc_map_at_1000_diff1 value: 77.89207709760448 - type: nauc_map_at_1000_max value: 29.63371361495422 - type: nauc_map_at_1000_std value: -48.628180385874344 - type: nauc_map_at_100_diff1 value: 77.89592179104915 - type: nauc_map_at_100_max value: 29.617171506130756 - type: nauc_map_at_100_std value: -48.66057170774648 - type: nauc_map_at_10_diff1 value: 78.0618161228185 - type: nauc_map_at_10_max value: 29.178490609366737 - type: nauc_map_at_10_std value: -50.74755004592002 - type: nauc_map_at_1_diff1 value: 81.64335579973574 - type: nauc_map_at_1_max value: 21.813832226652174 - type: nauc_map_at_1_std value: -42.57570978190876 - type: nauc_map_at_20_diff1 value: 77.9299081005938 - type: nauc_map_at_20_max value: 29.458718470003888 - type: nauc_map_at_20_std value: -49.63337236763102 - type: nauc_map_at_3_diff1 value: 78.72941448509229 - type: nauc_map_at_3_max value: 26.600997896960056 - type: nauc_map_at_3_std value: -51.889002227479885 - type: nauc_map_at_5_diff1 value: 78.31466610917171 - type: nauc_map_at_5_max value: 28.09863984582896 - type: nauc_map_at_5_std value: -52.14058096096497 - type: nauc_mrr_at_1000_diff1 value: 78.42667263739992 - type: nauc_mrr_at_1000_max value: 31.98996235127974 - type: nauc_mrr_at_1000_std value: -44.380439148429296 - type: nauc_mrr_at_100_diff1 value: 78.42661032698115 - type: nauc_mrr_at_100_max value: 31.991652631740102 - type: nauc_mrr_at_100_std value: -44.37854108460535 - type: nauc_mrr_at_10_diff1 value: 78.39126022544136 - type: nauc_mrr_at_10_max value: 32.02023484451197 - type: nauc_mrr_at_10_std value: -44.561252349176954 - type: nauc_mrr_at_1_diff1 value: 79.21630894647448 - type: nauc_mrr_at_1_max value: 31.526303156060177 - type: nauc_mrr_at_1_std value: -41.887504422443136 - type: nauc_mrr_at_20_diff1 value: 78.42548039170424 - type: nauc_mrr_at_20_max value: 31.99588275070137 - type: nauc_mrr_at_20_std value: -44.44957722627042 - type: nauc_mrr_at_3_diff1 value: 78.26165151833735 - type: nauc_mrr_at_3_max value: 32.18028826126801 - type: nauc_mrr_at_3_std value: -44.6998237213182 - type: nauc_mrr_at_5_diff1 value: 78.34786430903962 - type: nauc_mrr_at_5_max value: 32.168476272879566 - type: nauc_mrr_at_5_std value: -44.7915919956712 - type: nauc_ndcg_at_1000_diff1 value: 77.79198355957816 - type: nauc_ndcg_at_1000_max value: 31.14363511518406 - type: nauc_ndcg_at_1000_std value: -46.69335151274275 - type: nauc_ndcg_at_100_diff1 value: 77.79898090286419 - type: nauc_ndcg_at_100_max value: 31.115103811629215 - type: nauc_ndcg_at_100_std value: -46.73078913421965 - type: nauc_ndcg_at_10_diff1 value: 77.74856635461343 - type: nauc_ndcg_at_10_max value: 30.279584686212747 - type: nauc_ndcg_at_10_std value: -50.23514662356807 - type: nauc_ndcg_at_1_diff1 value: 79.17833000040999 - type: nauc_ndcg_at_1_max value: 31.703788144510746 - type: nauc_ndcg_at_1_std value: -41.854817402870715 - type: nauc_ndcg_at_20_diff1 value: 77.7380353804671 - type: nauc_ndcg_at_20_max value: 30.622294129001553 - type: nauc_ndcg_at_20_std value: -49.035794761065254 - type: nauc_ndcg_at_3_diff1 value: 77.41476880573593 - type: nauc_ndcg_at_3_max value: 29.015949978243032 - type: nauc_ndcg_at_3_std value: -49.78627087622648 - type: nauc_ndcg_at_5_diff1 value: 77.64439137502896 - type: nauc_ndcg_at_5_max value: 29.444684897492206 - type: nauc_ndcg_at_5_std value: -51.21908400252501 - type: nauc_precision_at_1000_diff1 value: -44.92396459446822 - type: nauc_precision_at_1000_max value: -3.674153720989045 - type: nauc_precision_at_1000_std value: 39.56552468277785 - type: nauc_precision_at_100_diff1 value: -44.75143023259094 - type: nauc_precision_at_100_max value: -3.705280025140011 - type: nauc_precision_at_100_std value: 39.433619999113326 - type: nauc_precision_at_10_diff1 value: -41.0651074726579 - type: nauc_precision_at_10_max value: -0.21097985601783667 - type: nauc_precision_at_10_std value: 26.24652824589493 - type: nauc_precision_at_1_diff1 value: 79.17833000040999 - type: nauc_precision_at_1_max value: 31.703788144510746 - type: nauc_precision_at_1_std value: -41.854817402870715 - type: nauc_precision_at_20_diff1 value: -43.368001340920294 - type: nauc_precision_at_20_max value: -2.036990010399129 - type: nauc_precision_at_20_std value: 32.37747041406297 - type: nauc_precision_at_3_diff1 value: -22.089307548346877 - type: nauc_precision_at_3_max value: 6.2280973175296 - type: nauc_precision_at_3_std value: 5.323992514036145 - type: nauc_precision_at_5_diff1 value: -34.07115055244003 - type: nauc_precision_at_5_max value: 2.5955315789198834 - type: nauc_precision_at_5_std value: 16.26096689407332 - type: nauc_recall_at_1000_diff1 value: 58.27703860947467 - type: nauc_recall_at_1000_max value: 68.59835835315768 - type: nauc_recall_at_1000_std value: 77.96687006056064 - type: nauc_recall_at_100_diff1 value: 73.24371223081737 - type: nauc_recall_at_100_max value: 39.55925344664591 - type: nauc_recall_at_100_std value: -32.25605030215798 - type: nauc_recall_at_10_diff1 value: 73.41261201339202 - type: nauc_recall_at_10_max value: 26.822979434062926 - type: nauc_recall_at_10_std value: -74.2909332592806 - type: nauc_recall_at_1_diff1 value: 81.64335579973574 - type: nauc_recall_at_1_max value: 21.813832226652174 - type: nauc_recall_at_1_std value: -42.57570978190876 - type: nauc_recall_at_20_diff1 value: 72.7621297920656 - type: nauc_recall_at_20_max value: 26.02492304096079 - type: nauc_recall_at_20_std value: -77.8724532438279 - type: nauc_recall_at_3_diff1 value: 75.25149312810714 - type: nauc_recall_at_3_max value: 23.20545662481487 - type: nauc_recall_at_3_std value: -59.69689982140521 - type: nauc_recall_at_5_diff1 value: 73.69807273001406 - type: nauc_recall_at_5_max value: 24.073666798066057 - type: nauc_recall_at_5_std value: -67.91121268130719 - type: ndcg_at_1 value: 82.64 - type: ndcg_at_10 value: 89.58 - type: ndcg_at_100 value: 90.606 - type: ndcg_at_1000 value: 90.676 - type: ndcg_at_20 value: 90.132 - type: ndcg_at_3 value: 86.88 - type: ndcg_at_5 value: 88.40299999999999 - type: precision_at_1 value: 82.64 - type: precision_at_10 value: 13.604 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_20 value: 7.188 - type: precision_at_3 value: 38.083 - type: precision_at_5 value: 25.018 - type: recall_at_1 value: 71.819 - type: recall_at_10 value: 96.34700000000001 - type: recall_at_100 value: 99.715 - type: recall_at_1000 value: 99.995 - type: recall_at_20 value: 98.073 - type: recall_at_3 value: 88.57300000000001 - type: recall_at_5 value: 92.908 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 71.18966762070158 - type: v_measure value: 71.18966762070158 - type: v_measure_std value: 2.7498969054457048 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 74.42014716862516 - type: v_measure value: 74.42014716862516 - type: v_measure_std value: 9.909739891410648 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 25.041999999999998 - type: map_at_1 value: 5.893000000000001 - type: map_at_10 value: 15.260000000000002 - type: map_at_100 value: 18.084 - type: map_at_1000 value: 18.467 - type: map_at_20 value: 16.675 - type: map_at_3 value: 10.526 - type: map_at_5 value: 12.775 - type: mrr_at_1 value: 28.999999999999996 - type: mrr_at_10 value: 41.03575396825395 - type: mrr_at_100 value: 42.136771862785835 - type: mrr_at_1000 value: 42.16698555415099 - type: mrr_at_20 value: 41.707493696104315 - type: mrr_at_3 value: 37.34999999999998 - type: mrr_at_5 value: 39.59999999999995 - type: nauc_map_at_1000_diff1 value: 12.080002654911883 - type: nauc_map_at_1000_max value: 29.813563682286276 - type: nauc_map_at_1000_std value: 20.36659817908673 - type: nauc_map_at_100_diff1 value: 12.108735517749706 - type: nauc_map_at_100_max value: 29.76830671710955 - type: nauc_map_at_100_std value: 20.3433621032846 - type: nauc_map_at_10_diff1 value: 12.91575031185637 - type: nauc_map_at_10_max value: 29.427600958386318 - type: nauc_map_at_10_std value: 16.89867275177153 - type: nauc_map_at_1_diff1 value: 19.353069488987916 - type: nauc_map_at_1_max value: 17.093914951159693 - type: nauc_map_at_1_std value: 8.19886078055046 - type: nauc_map_at_20_diff1 value: 11.977233457943113 - type: nauc_map_at_20_max value: 29.171812822948805 - type: nauc_map_at_20_std value: 18.780517506173965 - type: nauc_map_at_3_diff1 value: 14.453129464176092 - type: nauc_map_at_3_max value: 25.801958649112077 - type: nauc_map_at_3_std value: 11.572823684429643 - type: nauc_map_at_5_diff1 value: 13.167155808104997 - type: nauc_map_at_5_max value: 27.355626948365792 - type: nauc_map_at_5_std value: 14.414151839192183 - type: nauc_mrr_at_1000_diff1 value: 17.262104643988636 - type: nauc_mrr_at_1000_max value: 23.991373837217058 - type: nauc_mrr_at_1000_std value: 12.44755488671623 - type: nauc_mrr_at_100_diff1 value: 17.267280132318703 - type: nauc_mrr_at_100_max value: 24.022189287889294 - type: nauc_mrr_at_100_std value: 12.480695500214788 - type: nauc_mrr_at_10_diff1 value: 17.012383998246268 - type: nauc_mrr_at_10_max value: 24.192637911171722 - type: nauc_mrr_at_10_std value: 12.524608847408917 - type: nauc_mrr_at_1_diff1 value: 19.43518811038007 - type: nauc_mrr_at_1_max value: 17.747482933395602 - type: nauc_mrr_at_1_std value: 8.410779775558684 - type: nauc_mrr_at_20_diff1 value: 17.202663281407446 - type: nauc_mrr_at_20_max value: 24.091991130543118 - type: nauc_mrr_at_20_std value: 12.503814263019908 - type: nauc_mrr_at_3_diff1 value: 17.52733013432995 - type: nauc_mrr_at_3_max value: 23.569459518780214 - type: nauc_mrr_at_3_std value: 11.770846827520726 - type: nauc_mrr_at_5_diff1 value: 17.10817561975543 - type: nauc_mrr_at_5_max value: 23.945141435234678 - type: nauc_mrr_at_5_std value: 12.034468615317719 - type: nauc_ndcg_at_1000_diff1 value: 12.317811393346936 - type: nauc_ndcg_at_1000_max value: 30.809991350156103 - type: nauc_ndcg_at_1000_std value: 24.517501065205067 - type: nauc_ndcg_at_100_diff1 value: 12.824804203182936 - type: nauc_ndcg_at_100_max value: 30.895499817010748 - type: nauc_ndcg_at_100_std value: 25.424376279745402 - type: nauc_ndcg_at_10_diff1 value: 13.32724552457439 - type: nauc_ndcg_at_10_max value: 30.409088666807456 - type: nauc_ndcg_at_10_std value: 18.216330475714113 - type: nauc_ndcg_at_1_diff1 value: 19.43518811038007 - type: nauc_ndcg_at_1_max value: 17.747482933395602 - type: nauc_ndcg_at_1_std value: 8.410779775558684 - type: nauc_ndcg_at_20_diff1 value: 12.224399111852902 - type: nauc_ndcg_at_20_max value: 29.86352330445272 - type: nauc_ndcg_at_20_std value: 21.196937851331807 - type: nauc_ndcg_at_3_diff1 value: 15.367489533734027 - type: nauc_ndcg_at_3_max value: 26.76486390741532 - type: nauc_ndcg_at_3_std value: 12.606077508789923 - type: nauc_ndcg_at_5_diff1 value: 13.831157482390935 - type: nauc_ndcg_at_5_max value: 28.070226983968904 - type: nauc_ndcg_at_5_std value: 15.236787943125435 - type: nauc_precision_at_1000_diff1 value: 0.016122957101357048 - type: nauc_precision_at_1000_max value: 24.380929903557334 - type: nauc_precision_at_1000_std value: 34.54045112720052 - type: nauc_precision_at_100_diff1 value: 7.255224788507301 - type: nauc_precision_at_100_max value: 27.98453788447542 - type: nauc_precision_at_100_std value: 35.38999555441665 - type: nauc_precision_at_10_diff1 value: 9.69185099834181 - type: nauc_precision_at_10_max value: 32.532315522580454 - type: nauc_precision_at_10_std value: 21.48948348473612 - type: nauc_precision_at_1_diff1 value: 19.43518811038007 - type: nauc_precision_at_1_max value: 17.747482933395602 - type: nauc_precision_at_1_std value: 8.410779775558684 - type: nauc_precision_at_20_diff1 value: 6.964076536695672 - type: nauc_precision_at_20_max value: 29.30087236410044 - type: nauc_precision_at_20_std value: 26.413625895571986 - type: nauc_precision_at_3_diff1 value: 14.145134359925155 - type: nauc_precision_at_3_max value: 29.915650960808303 - type: nauc_precision_at_3_std value: 14.095370019867797 - type: nauc_precision_at_5_diff1 value: 11.043933558522692 - type: nauc_precision_at_5_max value: 30.93016505807111 - type: nauc_precision_at_5_std value: 17.749256196062603 - type: nauc_recall_at_1000_diff1 value: -0.7776817772090345 - type: nauc_recall_at_1000_max value: 23.094717340324518 - type: nauc_recall_at_1000_std value: 37.189908681396425 - type: nauc_recall_at_100_diff1 value: 6.887748742013364 - type: nauc_recall_at_100_max value: 27.00798435230277 - type: nauc_recall_at_100_std value: 35.908147807345344 - type: nauc_recall_at_10_diff1 value: 9.605632017480751 - type: nauc_recall_at_10_max value: 31.845202901168655 - type: nauc_recall_at_10_std value: 21.497414586634683 - type: nauc_recall_at_1_diff1 value: 19.353069488987916 - type: nauc_recall_at_1_max value: 17.093914951159693 - type: nauc_recall_at_1_std value: 8.19886078055046 - type: nauc_recall_at_20_diff1 value: 6.927503731844782 - type: nauc_recall_at_20_max value: 28.611698183338202 - type: nauc_recall_at_20_std value: 26.69018660149911 - type: nauc_recall_at_3_diff1 value: 14.043724087062268 - type: nauc_recall_at_3_max value: 29.269835821380465 - type: nauc_recall_at_3_std value: 14.104419605998094 - type: nauc_recall_at_5_diff1 value: 11.017319452873336 - type: nauc_recall_at_5_max value: 30.295720628306228 - type: nauc_recall_at_5_std value: 17.758048545573825 - type: ndcg_at_1 value: 28.999999999999996 - type: ndcg_at_10 value: 25.041999999999998 - type: ndcg_at_100 value: 35.045 - type: ndcg_at_1000 value: 40.803 - type: ndcg_at_20 value: 28.584 - type: ndcg_at_3 value: 23.249 - type: ndcg_at_5 value: 20.533 - type: precision_at_1 value: 28.999999999999996 - type: precision_at_10 value: 13.120000000000001 - type: precision_at_100 value: 2.7470000000000003 - type: precision_at_1000 value: 0.41200000000000003 - type: precision_at_20 value: 8.584999999999999 - type: precision_at_3 value: 21.633 - type: precision_at_5 value: 18.099999999999998 - type: recall_at_1 value: 5.893000000000001 - type: recall_at_10 value: 26.567 - type: recall_at_100 value: 55.800000000000004 - type: recall_at_1000 value: 83.608 - type: recall_at_20 value: 34.86 - type: recall_at_3 value: 13.153 - type: recall_at_5 value: 18.323 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 86.57284584320382 - type: cosine_spearman value: 82.20531642680812 - type: euclidean_pearson value: 83.94261758556554 - type: euclidean_spearman value: 82.20721497738559 - type: main_score value: 82.20531642680812 - type: manhattan_pearson value: 84.15902154703083 - type: manhattan_spearman value: 82.19506027155957 - type: pearson value: 86.57284584320382 - type: spearman value: 82.20531642680812 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 86.28047602146931 - type: cosine_spearman value: 79.51504881448884 - type: euclidean_pearson value: 83.10545189967856 - type: euclidean_spearman value: 79.50586960492797 - type: main_score value: 79.51504881448884 - type: manhattan_pearson value: 83.44244457500889 - type: manhattan_spearman value: 79.730303339846 - type: pearson value: 86.28047602146931 - type: spearman value: 79.51504881448884 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 88.74723553048702 - type: cosine_spearman value: 89.18936052329725 - type: euclidean_pearson value: 88.90400878928668 - type: euclidean_spearman value: 89.19174821431281 - type: main_score value: 89.18936052329725 - type: manhattan_pearson value: 88.81504628424054 - type: manhattan_spearman value: 89.18063294142597 - type: pearson value: 88.74723553048702 - type: spearman value: 89.18936052329725 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 86.45403437836023 - type: cosine_spearman value: 85.14654611519086 - type: euclidean_pearson value: 85.87509624462743 - type: euclidean_spearman value: 85.1391108856681 - type: main_score value: 85.14654611519086 - type: manhattan_pearson value: 85.96635794953866 - type: manhattan_spearman value: 85.3271371527667 - type: pearson value: 86.45403437836023 - type: spearman value: 85.14654611519086 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 87.84742260009705 - type: cosine_spearman value: 89.10215217191254 - type: euclidean_pearson value: 88.97393286325477 - type: euclidean_spearman value: 89.1014105509662 - type: main_score value: 89.10215217191254 - type: manhattan_pearson value: 89.31698781090151 - type: manhattan_spearman value: 89.53000001764433 - type: pearson value: 87.84742260009705 - type: spearman value: 89.10215217191254 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 85.22397535461835 - type: cosine_spearman value: 87.14066355879785 - type: euclidean_pearson value: 86.31393364087295 - type: euclidean_spearman value: 87.14018892702765 - type: main_score value: 87.14066355879785 - type: manhattan_pearson value: 86.36366855248434 - type: manhattan_spearman value: 87.20858630423012 - type: pearson value: 85.22397535461835 - type: spearman value: 87.14066355879785 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 90.66131612061355 - type: cosine_spearman value: 90.97082650129164 - type: euclidean_pearson value: 90.98181906744969 - type: euclidean_spearman value: 90.99008476850047 - type: main_score value: 90.97082650129164 - type: manhattan_pearson value: 90.75245040709021 - type: manhattan_spearman value: 90.6199877691265 - type: pearson value: 90.66131612061355 - type: spearman value: 90.97082650129164 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 67.270656447085 - type: cosine_spearman value: 67.82870469746828 - type: euclidean_pearson value: 69.03857775285664 - type: euclidean_spearman value: 67.74455108773341 - type: main_score value: 67.82870469746828 - type: manhattan_pearson value: 69.25304172245812 - type: manhattan_spearman value: 68.00987097916055 - type: pearson value: 67.270656447085 - type: spearman value: 67.82870469746828 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 87.17245205384889 - type: cosine_spearman value: 87.7360146030987 - type: euclidean_pearson value: 87.48919412794656 - type: euclidean_spearman value: 87.7312047878383 - type: main_score value: 87.7360146030987 - type: manhattan_pearson value: 87.61476224354806 - type: manhattan_spearman value: 87.95220889254693 - type: pearson value: 87.17245205384889 - type: spearman value: 87.7360146030987 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 88.43547871921146 - type: map value: 88.43547871921146 - type: mrr value: 96.5564473652709 - type: nAUC_map_diff1 value: -13.66029392579231 - type: nAUC_map_max value: 50.325613574053506 - type: nAUC_map_std value: 60.02986231275796 - type: nAUC_mrr_diff1 value: 23.83821476411125 - type: nAUC_mrr_max value: 86.72643311769906 - type: nAUC_mrr_std value: 72.12741063469213 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 78.233 - type: map_at_1 value: 61.49400000000001 - type: map_at_10 value: 73.30600000000001 - type: map_at_100 value: 73.719 - type: map_at_1000 value: 73.724 - type: map_at_20 value: 73.611 - type: map_at_3 value: 70.626 - type: map_at_5 value: 72.417 - type: mrr_at_1 value: 64.66666666666666 - type: mrr_at_10 value: 74.30357142857143 - type: mrr_at_100 value: 74.56950898079988 - type: mrr_at_1000 value: 74.57295833098681 - type: mrr_at_20 value: 74.46165223665226 - type: mrr_at_3 value: 72.3888888888889 - type: mrr_at_5 value: 73.60555555555557 - type: nauc_map_at_1000_diff1 value: 76.51524604780636 - type: nauc_map_at_1000_max value: 53.48521938401881 - type: nauc_map_at_1000_std value: -7.347799382158861 - type: nauc_map_at_100_diff1 value: 76.5122888096236 - type: nauc_map_at_100_max value: 53.49221847471618 - type: nauc_map_at_100_std value: -7.329683735681086 - type: nauc_map_at_10_diff1 value: 76.30928630674504 - type: nauc_map_at_10_max value: 53.00102977185941 - type: nauc_map_at_10_std value: -7.7467740085108705 - type: nauc_map_at_1_diff1 value: 79.54189281784247 - type: nauc_map_at_1_max value: 46.630071622109526 - type: nauc_map_at_1_std value: -14.395943134644112 - type: nauc_map_at_20_diff1 value: 76.41604361947962 - type: nauc_map_at_20_max value: 53.578883876146875 - type: nauc_map_at_20_std value: -7.403103451288041 - type: nauc_map_at_3_diff1 value: 76.25911617571941 - type: nauc_map_at_3_max value: 49.140287380513605 - type: nauc_map_at_3_std value: -11.35992449218983 - type: nauc_map_at_5_diff1 value: 76.35122077770336 - type: nauc_map_at_5_max value: 52.1744367901208 - type: nauc_map_at_5_std value: -7.85753955055384 - type: nauc_mrr_at_1000_diff1 value: 76.97223309515867 - type: nauc_mrr_at_1000_max value: 57.263787498613326 - type: nauc_mrr_at_1000_std value: -4.884090708840035 - type: nauc_mrr_at_100_diff1 value: 76.97312970894603 - type: nauc_mrr_at_100_max value: 57.26850730446478 - type: nauc_mrr_at_100_std value: -4.875200894216617 - type: nauc_mrr_at_10_diff1 value: 76.65927674223613 - type: nauc_mrr_at_10_max value: 57.30979763941454 - type: nauc_mrr_at_10_std value: -4.863331094022142 - type: nauc_mrr_at_1_diff1 value: 80.0454932568644 - type: nauc_mrr_at_1_max value: 56.76038421319305 - type: nauc_mrr_at_1_std value: -4.101939392632653 - type: nauc_mrr_at_20_diff1 value: 76.87237970440503 - type: nauc_mrr_at_20_max value: 57.33843605225869 - type: nauc_mrr_at_20_std value: -4.96248984417978 - type: nauc_mrr_at_3_diff1 value: 76.74130186666727 - type: nauc_mrr_at_3_max value: 56.19313244846155 - type: nauc_mrr_at_3_std value: -5.684365934009136 - type: nauc_mrr_at_5_diff1 value: 76.66406918799962 - type: nauc_mrr_at_5_max value: 57.56110093228628 - type: nauc_mrr_at_5_std value: -3.7464413085588073 - type: nauc_ndcg_at_1000_diff1 value: 76.19194173971773 - type: nauc_ndcg_at_1000_max value: 55.57464600170693 - type: nauc_ndcg_at_1000_std value: -6.0761689532372625 - type: nauc_ndcg_at_100_diff1 value: 76.14631273843654 - type: nauc_ndcg_at_100_max value: 55.72246565373382 - type: nauc_ndcg_at_100_std value: -5.595160698860595 - type: nauc_ndcg_at_10_diff1 value: 75.0108223611192 - type: nauc_ndcg_at_10_max value: 55.27894212877493 - type: nauc_ndcg_at_10_std value: -6.968331740214591 - type: nauc_ndcg_at_1_diff1 value: 80.0454932568644 - type: nauc_ndcg_at_1_max value: 56.76038421319305 - type: nauc_ndcg_at_1_std value: -4.101939392632653 - type: nauc_ndcg_at_20_diff1 value: 75.54887755702472 - type: nauc_ndcg_at_20_max value: 56.406879417251496 - type: nauc_ndcg_at_20_std value: -6.495231061329629 - type: nauc_ndcg_at_3_diff1 value: 75.03620356688509 - type: nauc_ndcg_at_3_max value: 52.147381077773424 - type: nauc_ndcg_at_3_std value: -8.448005688956199 - type: nauc_ndcg_at_5_diff1 value: 75.1195898074229 - type: nauc_ndcg_at_5_max value: 54.2321033861173 - type: nauc_ndcg_at_5_std value: -5.882690780895338 - type: nauc_precision_at_1000_diff1 value: -28.081979732100532 - type: nauc_precision_at_1000_max value: 35.055348014832916 - type: nauc_precision_at_1000_std value: 59.61280468927384 - type: nauc_precision_at_100_diff1 value: -25.112740730587458 - type: nauc_precision_at_100_max value: 38.26331300116496 - type: nauc_precision_at_100_std value: 62.46316222328831 - type: nauc_precision_at_10_diff1 value: -2.6766206473658833 - type: nauc_precision_at_10_max value: 45.95321867204845 - type: nauc_precision_at_10_std value: 45.07212468670564 - type: nauc_precision_at_1_diff1 value: 80.0454932568644 - type: nauc_precision_at_1_max value: 56.76038421319305 - type: nauc_precision_at_1_std value: -4.101939392632653 - type: nauc_precision_at_20_diff1 value: -10.698911116738385 - type: nauc_precision_at_20_max value: 43.467275950182994 - type: nauc_precision_at_20_std value: 48.00467321991766 - type: nauc_precision_at_3_diff1 value: 33.6344708541193 - type: nauc_precision_at_3_max value: 49.309242331670504 - type: nauc_precision_at_3_std value: 21.02940391379915 - type: nauc_precision_at_5_diff1 value: 13.560415600596318 - type: nauc_precision_at_5_max value: 48.918726500100085 - type: nauc_precision_at_5_std value: 39.940930429172184 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 70.82166199813196 - type: nauc_recall_at_100_max value: 76.6106442577042 - type: nauc_recall_at_100_std value: 66.47992530345513 - type: nauc_recall_at_10_diff1 value: 62.68908885556092 - type: nauc_recall_at_10_max value: 58.14262437741839 - type: nauc_recall_at_10_std value: -12.946717875063369 - type: nauc_recall_at_1_diff1 value: 79.54189281784247 - type: nauc_recall_at_1_max value: 46.630071622109526 - type: nauc_recall_at_1_std value: -14.395943134644112 - type: nauc_recall_at_20_diff1 value: 65.79470497876567 - type: nauc_recall_at_20_max value: 71.68308183488456 - type: nauc_recall_at_20_std value: -12.556850697268453 - type: nauc_recall_at_3_diff1 value: 68.3240211318129 - type: nauc_recall_at_3_max value: 45.05998217275036 - type: nauc_recall_at_3_std value: -14.23179772593869 - type: nauc_recall_at_5_diff1 value: 67.53366869904056 - type: nauc_recall_at_5_max value: 53.57935627081027 - type: nauc_recall_at_5_std value: -3.3271112904853393 - type: ndcg_at_1 value: 64.667 - type: ndcg_at_10 value: 78.233 - type: ndcg_at_100 value: 79.806 - type: ndcg_at_1000 value: 79.92099999999999 - type: ndcg_at_20 value: 79.006 - type: ndcg_at_3 value: 74.018 - type: ndcg_at_5 value: 76.334 - type: precision_at_1 value: 64.667 - type: precision_at_10 value: 10.4 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.383 - type: precision_at_3 value: 29.444 - type: precision_at_5 value: 19.467000000000002 - type: recall_at_1 value: 61.49400000000001 - type: recall_at_10 value: 92.156 - type: recall_at_100 value: 99.167 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 94.833 - type: recall_at_3 value: 80.833 - type: recall_at_5 value: 86.6 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.8039603960396 - type: cosine_accuracy_threshold value: 84.54211950302124 - type: cosine_ap value: 95.59056372734358 - type: cosine_f1 value: 90.1394422310757 - type: cosine_f1_threshold value: 84.54211950302124 - type: cosine_precision value: 89.78174603174604 - type: cosine_recall value: 90.5 - type: dot_accuracy value: 99.80594059405941 - type: dot_accuracy_threshold value: 85.57180166244507 - type: dot_ap value: 95.53453431914399 - type: dot_f1 value: 90.10442565887618 - type: dot_f1_threshold value: 84.59715843200684 - type: dot_precision value: 89.61424332344214 - type: dot_recall value: 90.60000000000001 - type: euclidean_accuracy value: 99.8039603960396 - type: euclidean_accuracy_threshold value: 53.253382444381714 - type: euclidean_ap value: 95.5850992402159 - type: euclidean_f1 value: 90.09457441513192 - type: euclidean_f1_threshold value: 55.725520849227905 - type: euclidean_precision value: 89.69276511397423 - type: euclidean_recall value: 90.5 - type: main_score value: 95.7485189884476 - type: manhattan_accuracy value: 99.81485148514851 - type: manhattan_accuracy_threshold value: 3491.29638671875 - type: manhattan_ap value: 95.7485189884476 - type: manhattan_f1 value: 90.464048954615 - type: manhattan_f1_threshold value: 3491.29638671875 - type: manhattan_precision value: 92.2996878251821 - type: manhattan_recall value: 88.7 - type: max_ap value: 95.7485189884476 - type: max_f1 value: 90.464048954615 - type: max_precision value: 92.2996878251821 - type: max_recall value: 90.60000000000001 - type: similarity_accuracy value: 99.8039603960396 - type: similarity_accuracy_threshold value: 84.54211950302124 - type: similarity_ap value: 95.59056372734358 - type: similarity_f1 value: 90.1394422310757 - type: similarity_f1_threshold value: 84.54211950302124 - type: similarity_precision value: 89.78174603174604 - type: similarity_recall value: 90.5 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 78.49205191950675 - type: v_measure value: 78.49205191950675 - type: v_measure_std value: 2.84869550699959 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 48.90421736513028 - type: v_measure value: 48.90421736513028 - type: v_measure_std value: 1.6875865714471023 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 52.9874730481696 - type: map value: 52.9874730481696 - type: mrr value: 53.85867604617604 - type: nAUC_map_diff1 value: 39.633429293407616 - type: nAUC_map_max value: 10.236807988858546 - type: nAUC_map_std value: 10.276522217929674 - type: nAUC_mrr_diff1 value: 40.0543079218377 - type: nAUC_mrr_max value: 10.96209807382042 - type: nAUC_mrr_std value: 10.524400196109918 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 30.727801109114232 - type: cosine_spearman value: 31.66058223980157 - type: dot_pearson value: 30.78818248622866 - type: dot_spearman value: 31.525158776890265 - type: main_score value: 31.66058223980157 - type: pearson value: 30.727801109114232 - type: spearman value: 31.66058223980157 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 85.206 - type: map_at_1 value: 0.246 - type: map_at_10 value: 2.1950000000000003 - type: map_at_100 value: 14.179 - type: map_at_1000 value: 35.037 - type: map_at_20 value: 4.143 - type: map_at_3 value: 0.7100000000000001 - type: map_at_5 value: 1.135 - type: mrr_at_1 value: 94.0 - type: mrr_at_10 value: 96.66666666666666 - type: mrr_at_100 value: 96.66666666666666 - type: mrr_at_1000 value: 96.66666666666666 - type: mrr_at_20 value: 96.66666666666666 - type: mrr_at_3 value: 96.66666666666666 - type: mrr_at_5 value: 96.66666666666666 - type: nauc_map_at_1000_diff1 value: -4.6264497624527525 - type: nauc_map_at_1000_max value: 44.594457564749355 - type: nauc_map_at_1000_std value: 73.17642341400133 - type: nauc_map_at_100_diff1 value: 23.451335157405726 - type: nauc_map_at_100_max value: 25.426398857299525 - type: nauc_map_at_100_std value: 64.07416694472633 - type: nauc_map_at_10_diff1 value: 46.57568738568346 - type: nauc_map_at_10_max value: 9.693233249079238 - type: nauc_map_at_10_std value: 28.549530265164357 - type: nauc_map_at_1_diff1 value: 53.48238396620123 - type: nauc_map_at_1_max value: 0.33476619393733076 - type: nauc_map_at_1_std value: 8.906362219128463 - type: nauc_map_at_20_diff1 value: 39.40719602207749 - type: nauc_map_at_20_max value: 9.635915072074045 - type: nauc_map_at_20_std value: 35.15634791346394 - type: nauc_map_at_3_diff1 value: 53.11784737840137 - type: nauc_map_at_3_max value: 3.059682761072153 - type: nauc_map_at_3_std value: 21.310633086556617 - type: nauc_map_at_5_diff1 value: 49.91570701185436 - type: nauc_map_at_5_max value: 8.045082896244576 - type: nauc_map_at_5_std value: 20.597686235051647 - type: nauc_mrr_at_1000_diff1 value: 41.98412698412726 - type: nauc_mrr_at_1000_max value: 78.24463118580779 - type: nauc_mrr_at_1000_std value: 0.30812324930028195 - type: nauc_mrr_at_100_diff1 value: 41.98412698412726 - type: nauc_mrr_at_100_max value: 78.24463118580779 - type: nauc_mrr_at_100_std value: 0.30812324930028195 - type: nauc_mrr_at_10_diff1 value: 41.98412698412726 - type: nauc_mrr_at_10_max value: 78.24463118580779 - type: nauc_mrr_at_10_std value: 0.30812324930028195 - type: nauc_mrr_at_1_diff1 value: 38.62433862433873 - type: nauc_mrr_at_1_max value: 80.78120136943666 - type: nauc_mrr_at_1_std value: -10.768751945222197 - type: nauc_mrr_at_20_diff1 value: 41.98412698412726 - type: nauc_mrr_at_20_max value: 78.24463118580779 - type: nauc_mrr_at_20_std value: 0.30812324930028195 - type: nauc_mrr_at_3_diff1 value: 41.98412698412726 - type: nauc_mrr_at_3_max value: 78.24463118580779 - type: nauc_mrr_at_3_std value: 0.30812324930028195 - type: nauc_mrr_at_5_diff1 value: 41.98412698412726 - type: nauc_mrr_at_5_max value: 78.24463118580779 - type: nauc_mrr_at_5_std value: 0.30812324930028195 - type: nauc_ndcg_at_1000_diff1 value: 0.5174948602880207 - type: nauc_ndcg_at_1000_max value: 48.60686602077053 - type: nauc_ndcg_at_1000_std value: 75.72456343175277 - type: nauc_ndcg_at_100_diff1 value: -20.747252137999254 - type: nauc_ndcg_at_100_max value: 49.985132618254994 - type: nauc_ndcg_at_100_std value: 61.096383293836574 - type: nauc_ndcg_at_10_diff1 value: 6.791377920463332 - type: nauc_ndcg_at_10_max value: 57.50019332833286 - type: nauc_ndcg_at_10_std value: 49.201028841219426 - type: nauc_ndcg_at_1_diff1 value: 54.92683440362145 - type: nauc_ndcg_at_1_max value: 83.8667228129276 - type: nauc_ndcg_at_1_std value: 1.6738604063586122 - type: nauc_ndcg_at_20_diff1 value: -5.1948699196314925 - type: nauc_ndcg_at_20_max value: 54.483087684806556 - type: nauc_ndcg_at_20_std value: 50.54823818118781 - type: nauc_ndcg_at_3_diff1 value: 26.267246500164372 - type: nauc_ndcg_at_3_max value: 63.0173212926611 - type: nauc_ndcg_at_3_std value: 41.025597406368256 - type: nauc_ndcg_at_5_diff1 value: 16.910185454343036 - type: nauc_ndcg_at_5_max value: 60.9328683868778 - type: nauc_ndcg_at_5_std value: 36.70169905857712 - type: nauc_precision_at_1000_diff1 value: -46.374447765983525 - type: nauc_precision_at_1000_max value: 35.36052337813863 - type: nauc_precision_at_1000_std value: 14.219220668161018 - type: nauc_precision_at_100_diff1 value: -29.7838083657744 - type: nauc_precision_at_100_max value: 43.93589400385112 - type: nauc_precision_at_100_std value: 55.425045718579945 - type: nauc_precision_at_10_diff1 value: -12.016613405227687 - type: nauc_precision_at_10_max value: 57.79924427743131 - type: nauc_precision_at_10_std value: 49.022036703550675 - type: nauc_precision_at_1_diff1 value: 38.62433862433873 - type: nauc_precision_at_1_max value: 80.78120136943666 - type: nauc_precision_at_1_std value: -10.768751945222197 - type: nauc_precision_at_20_diff1 value: -23.95633847880195 - type: nauc_precision_at_20_max value: 48.34715917258276 - type: nauc_precision_at_20_std value: 48.82198285255887 - type: nauc_precision_at_3_diff1 value: 6.871296905858807 - type: nauc_precision_at_3_max value: 70.54805793285054 - type: nauc_precision_at_3_std value: 44.65108624094803 - type: nauc_precision_at_5_diff1 value: -9.074932448759695 - type: nauc_precision_at_5_max value: 67.41284242437573 - type: nauc_precision_at_5_std value: 23.876891983919577 - type: nauc_recall_at_1000_diff1 value: 8.142288830293255 - type: nauc_recall_at_1000_max value: 38.85182826835104 - type: nauc_recall_at_1000_std value: 68.60783819217335 - type: nauc_recall_at_100_diff1 value: 34.262914076287466 - type: nauc_recall_at_100_max value: 12.87009658528838 - type: nauc_recall_at_100_std value: 56.21330603762995 - type: nauc_recall_at_10_diff1 value: 49.33830945338758 - type: nauc_recall_at_10_max value: 0.3539875530671406 - type: nauc_recall_at_10_std value: 26.85864465557644 - type: nauc_recall_at_1_diff1 value: 53.48238396620123 - type: nauc_recall_at_1_max value: 0.33476619393733076 - type: nauc_recall_at_1_std value: 8.906362219128463 - type: nauc_recall_at_20_diff1 value: 44.21928181266254 - type: nauc_recall_at_20_max value: -0.9198356057088594 - type: nauc_recall_at_20_std value: 31.484376992896784 - type: nauc_recall_at_3_diff1 value: 53.038093080990876 - type: nauc_recall_at_3_max value: -1.4170895916973003 - type: nauc_recall_at_3_std value: 21.890202855574497 - type: nauc_recall_at_5_diff1 value: 49.39742214825278 - type: nauc_recall_at_5_max value: 2.8412267611894517 - type: nauc_recall_at_5_std value: 18.01598921859512 - type: ndcg_at_1 value: 91.0 - type: ndcg_at_10 value: 85.206 - type: ndcg_at_100 value: 67.29 - type: ndcg_at_1000 value: 60.584 - type: ndcg_at_20 value: 82.321 - type: ndcg_at_3 value: 88.642 - type: ndcg_at_5 value: 87.063 - type: precision_at_1 value: 94.0 - type: precision_at_10 value: 89.8 - type: precision_at_100 value: 69.78 - type: precision_at_1000 value: 26.738 - type: precision_at_20 value: 87.2 - type: precision_at_3 value: 92.0 - type: precision_at_5 value: 90.8 - type: recall_at_1 value: 0.246 - type: recall_at_10 value: 2.344 - type: recall_at_100 value: 16.962 - type: recall_at_1000 value: 57.325 - type: recall_at_20 value: 4.517 - type: recall_at_3 value: 0.731 - type: recall_at_5 value: 1.1780000000000002 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 31.455 - type: map_at_1 value: 2.9739999999999998 - type: map_at_10 value: 12.183 - type: map_at_100 value: 18.772 - type: map_at_1000 value: 20.415 - type: map_at_20 value: 14.451 - type: map_at_3 value: 6.507000000000001 - type: map_at_5 value: 8.66 - type: mrr_at_1 value: 40.816326530612244 - type: mrr_at_10 value: 57.70975056689341 - type: mrr_at_100 value: 58.18379126542391 - type: mrr_at_1000 value: 58.18379126542391 - type: mrr_at_20 value: 57.85552316164561 - type: mrr_at_3 value: 54.08163265306123 - type: mrr_at_5 value: 56.42857142857143 - type: nauc_map_at_1000_diff1 value: 3.1567471051481437 - type: nauc_map_at_1000_max value: -1.5882060729791523 - type: nauc_map_at_1000_std value: 18.69622198722074 - type: nauc_map_at_100_diff1 value: 3.3449677678147536 - type: nauc_map_at_100_max value: -2.8928606866168405 - type: nauc_map_at_100_std value: 15.789984947653412 - type: nauc_map_at_10_diff1 value: 2.9696743570444264 - type: nauc_map_at_10_max value: -9.096749212011876 - type: nauc_map_at_10_std value: -5.38545817258353 - type: nauc_map_at_1_diff1 value: 20.680780404542546 - type: nauc_map_at_1_max value: -7.04722927447817 - type: nauc_map_at_1_std value: -7.062494733973898 - type: nauc_map_at_20_diff1 value: 4.070437790119271 - type: nauc_map_at_20_max value: -4.84491434686032 - type: nauc_map_at_20_std value: 0.5846341109021014 - type: nauc_map_at_3_diff1 value: 11.9634978045925 - type: nauc_map_at_3_max value: -8.27834591046608 - type: nauc_map_at_3_std value: -8.687615453381065 - type: nauc_map_at_5_diff1 value: 0.9195191526009436 - type: nauc_map_at_5_max value: -1.673813362719489 - type: nauc_map_at_5_std value: -6.67549753473631 - type: nauc_mrr_at_1000_diff1 value: 19.877993208719573 - type: nauc_mrr_at_1000_max value: -10.37776706406218 - type: nauc_mrr_at_1000_std value: 7.132169578056367 - type: nauc_mrr_at_100_diff1 value: 19.877993208719573 - type: nauc_mrr_at_100_max value: -10.37776706406218 - type: nauc_mrr_at_100_std value: 7.132169578056367 - type: nauc_mrr_at_10_diff1 value: 20.414285568401457 - type: nauc_mrr_at_10_max value: -9.677800295687861 - type: nauc_mrr_at_10_std value: 8.001103690180859 - type: nauc_mrr_at_1_diff1 value: 22.393284073955723 - type: nauc_mrr_at_1_max value: -5.889370191243167 - type: nauc_mrr_at_1_std value: -1.5183536173658247 - type: nauc_mrr_at_20_diff1 value: 20.455564720604055 - type: nauc_mrr_at_20_max value: -10.230642830103074 - type: nauc_mrr_at_20_std value: 7.863582453266621 - type: nauc_mrr_at_3_diff1 value: 17.554895390732618 - type: nauc_mrr_at_3_max value: -15.618463505555052 - type: nauc_mrr_at_3_std value: 5.913231577966864 - type: nauc_mrr_at_5_diff1 value: 18.393678507779914 - type: nauc_mrr_at_5_max value: -11.903593353147762 - type: nauc_mrr_at_5_std value: 7.580745996262831 - type: nauc_ndcg_at_1000_diff1 value: 13.746937095530473 - type: nauc_ndcg_at_1000_max value: -0.9319249687895838 - type: nauc_ndcg_at_1000_std value: 38.56328031451904 - type: nauc_ndcg_at_100_diff1 value: 13.854865944415895 - type: nauc_ndcg_at_100_max value: -7.142142012591404 - type: nauc_ndcg_at_100_std value: 35.61341954818848 - type: nauc_ndcg_at_10_diff1 value: 9.010144273248759 - type: nauc_ndcg_at_10_max value: -15.320014897424574 - type: nauc_ndcg_at_10_std value: 2.84883880489144 - type: nauc_ndcg_at_1_diff1 value: 20.939533945592967 - type: nauc_ndcg_at_1_max value: -6.387319972188946 - type: nauc_ndcg_at_1_std value: -0.5258673122126726 - type: nauc_ndcg_at_20_diff1 value: 14.660827309009496 - type: nauc_ndcg_at_20_max value: -13.476196120145994 - type: nauc_ndcg_at_20_std value: 8.22391881710838 - type: nauc_ndcg_at_3_diff1 value: 13.429985227235935 - type: nauc_ndcg_at_3_max value: -14.904544592570247 - type: nauc_ndcg_at_3_std value: 1.599779998183342 - type: nauc_ndcg_at_5_diff1 value: 8.085466231900622 - type: nauc_ndcg_at_5_max value: -9.09591969526831 - type: nauc_ndcg_at_5_std value: 3.5794092637248505 - type: nauc_precision_at_1000_diff1 value: -9.31941215946743 - type: nauc_precision_at_1000_max value: 31.52913520470716 - type: nauc_precision_at_1000_std value: 22.720784312185856 - type: nauc_precision_at_100_diff1 value: 8.958548406995279 - type: nauc_precision_at_100_max value: 15.100597910674104 - type: nauc_precision_at_100_std value: 71.04548238175113 - type: nauc_precision_at_10_diff1 value: 12.4698194690008 - type: nauc_precision_at_10_max value: -15.84870544871496 - type: nauc_precision_at_10_std value: 7.575297622501928 - type: nauc_precision_at_1_diff1 value: 22.393284073955723 - type: nauc_precision_at_1_max value: -5.889370191243167 - type: nauc_precision_at_1_std value: -1.5183536173658247 - type: nauc_precision_at_20_diff1 value: 15.393505718138758 - type: nauc_precision_at_20_max value: -3.70684298539384 - type: nauc_precision_at_20_std value: 29.426137824970304 - type: nauc_precision_at_3_diff1 value: 9.997768085465394 - type: nauc_precision_at_3_max value: -17.12224314347674 - type: nauc_precision_at_3_std value: -1.343018166772313 - type: nauc_precision_at_5_diff1 value: 3.8936997437913554 - type: nauc_precision_at_5_max value: -5.689104289687632 - type: nauc_precision_at_5_std value: 3.181098051304285 - type: nauc_recall_at_1000_diff1 value: 9.908303508158387 - type: nauc_recall_at_1000_max value: 6.174506592699848 - type: nauc_recall_at_1000_std value: 77.41931114780012 - type: nauc_recall_at_100_diff1 value: 10.286839241876192 - type: nauc_recall_at_100_max value: -6.6138697026666815 - type: nauc_recall_at_100_std value: 49.608313692633224 - type: nauc_recall_at_10_diff1 value: 2.215545846659851 - type: nauc_recall_at_10_max value: -17.83025802478445 - type: nauc_recall_at_10_std value: -3.3784768673705465 - type: nauc_recall_at_1_diff1 value: 20.680780404542546 - type: nauc_recall_at_1_max value: -7.04722927447817 - type: nauc_recall_at_1_std value: -7.062494733973898 - type: nauc_recall_at_20_diff1 value: 6.974410239251615 - type: nauc_recall_at_20_max value: -14.161147924731646 - type: nauc_recall_at_20_std value: 9.328412057721454 - type: nauc_recall_at_3_diff1 value: 7.904589805754212 - type: nauc_recall_at_3_max value: -12.1912388648593 - type: nauc_recall_at_3_std value: -9.221542013385555 - type: nauc_recall_at_5_diff1 value: -3.2604132752706914 - type: nauc_recall_at_5_max value: -6.886351441658915 - type: nauc_recall_at_5_std value: -7.014252851712789 - type: ndcg_at_1 value: 39.796 - type: ndcg_at_10 value: 31.455 - type: ndcg_at_100 value: 42.388999999999996 - type: ndcg_at_1000 value: 53.556000000000004 - type: ndcg_at_20 value: 30.808000000000003 - type: ndcg_at_3 value: 35.831 - type: ndcg_at_5 value: 32.845 - type: precision_at_1 value: 40.816 - type: precision_at_10 value: 27.143 - type: precision_at_100 value: 8.449 - type: precision_at_1000 value: 1.6179999999999999 - type: precision_at_20 value: 19.387999999999998 - type: precision_at_3 value: 35.374 - type: precision_at_5 value: 31.019999999999996 - type: recall_at_1 value: 2.9739999999999998 - type: recall_at_10 value: 19.39 - type: recall_at_100 value: 51.636 - type: recall_at_1000 value: 86.99900000000001 - type: recall_at_20 value: 26.478 - type: recall_at_3 value: 7.703 - type: recall_at_5 value: 11.42 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 86.9384765625 - type: ap value: 31.737513704141552 - type: ap_weighted value: 31.737513704141552 - type: f1 value: 71.5490757306975 - type: f1_weighted value: 89.14632533489856 - type: main_score value: 86.9384765625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 73.57668364459535 - type: f1 value: 73.90467103648074 - type: f1_weighted value: 73.42158415034704 - type: main_score value: 73.57668364459535 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 58.574148097494685 - type: v_measure value: 58.574148097494685 - type: v_measure_std value: 0.9443161637490822 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 88.1385229778864 - type: cosine_accuracy_threshold value: 83.86307954788208 - type: cosine_ap value: 80.17965893449055 - type: cosine_f1 value: 73.0614300100705 - type: cosine_f1_threshold value: 80.7942807674408 - type: cosine_precision value: 69.8603755416466 - type: cosine_recall value: 76.56992084432717 - type: dot_accuracy value: 88.2100494724921 - type: dot_accuracy_threshold value: 83.84793996810913 - type: dot_ap value: 80.18603932881858 - type: dot_f1 value: 73.07643714466204 - type: dot_f1_threshold value: 80.87586164474487 - type: dot_precision value: 70.10909090909091 - type: dot_recall value: 76.3060686015831 - type: euclidean_accuracy value: 88.1385229778864 - type: euclidean_accuracy_threshold value: 56.77661895751953 - type: euclidean_ap value: 80.1784070881624 - type: euclidean_f1 value: 73.04830369529574 - type: euclidean_f1_threshold value: 61.91838979721069 - type: euclidean_precision value: 69.96859144720948 - type: euclidean_recall value: 76.41160949868075 - type: main_score value: 80.18603932881858 - type: manhattan_accuracy value: 88.0431543184121 - type: manhattan_accuracy_threshold value: 3755.6137084960938 - type: manhattan_ap value: 79.98270453664578 - type: manhattan_f1 value: 72.68242015061023 - type: manhattan_f1_threshold value: 3892.494583129883 - type: manhattan_precision value: 71.54907975460122 - type: manhattan_recall value: 73.85224274406332 - type: max_ap value: 80.18603932881858 - type: max_f1 value: 73.07643714466204 - type: max_precision value: 71.54907975460122 - type: max_recall value: 76.56992084432717 - type: similarity_accuracy value: 88.1385229778864 - type: similarity_accuracy_threshold value: 83.86307954788208 - type: similarity_ap value: 80.17965893449055 - type: similarity_f1 value: 73.0614300100705 - type: similarity_f1_threshold value: 80.7942807674408 - type: similarity_precision value: 69.8603755416466 - type: similarity_recall value: 76.56992084432717 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 89.7892653393876 - type: cosine_accuracy_threshold value: 79.69566583633423 - type: cosine_ap value: 87.4579867302024 - type: cosine_f1 value: 79.91620843152658 - type: cosine_f1_threshold value: 78.53609323501587 - type: cosine_precision value: 77.7155329210622 - type: cosine_recall value: 82.24514936864799 - type: dot_accuracy value: 89.78732487289945 - type: dot_accuracy_threshold value: 80.05315661430359 - type: dot_ap value: 87.44916182456272 - type: dot_f1 value: 79.90419878751591 - type: dot_f1_threshold value: 78.57890725135803 - type: dot_precision value: 77.73409057812728 - type: dot_recall value: 82.19895287958116 - type: euclidean_accuracy value: 89.78538440641131 - type: euclidean_accuracy_threshold value: 62.29925751686096 - type: euclidean_ap value: 87.45904868911386 - type: euclidean_f1 value: 79.93127404474657 - type: euclidean_f1_threshold value: 65.61101078987122 - type: euclidean_precision value: 77.62060210373595 - type: euclidean_recall value: 82.38373883584848 - type: main_score value: 87.46554314325058 - type: manhattan_accuracy value: 89.76597974152986 - type: manhattan_accuracy_threshold value: 3988.5299682617188 - type: manhattan_ap value: 87.46554314325058 - type: manhattan_f1 value: 79.97181740645973 - type: manhattan_f1_threshold value: 4235.905838012695 - type: manhattan_precision value: 77.13713427283783 - type: manhattan_recall value: 83.02279026793964 - type: max_ap value: 87.46554314325058 - type: max_f1 value: 79.97181740645973 - type: max_precision value: 77.73409057812728 - type: max_recall value: 83.02279026793964 - type: similarity_accuracy value: 89.7892653393876 - type: similarity_accuracy_threshold value: 79.69566583633423 - type: similarity_ap value: 87.4579867302024 - type: similarity_f1 value: 79.91620843152658 - type: similarity_f1_threshold value: 78.53609323501587 - type: similarity_precision value: 77.7155329210622 - type: similarity_recall value: 82.24514936864799 --- # Updates We released a Jasper and Stella model technology report and code.(2025.1) **Report:** https://arxiv.org/abs/2412.19048 **Codes:** https://github.com/NLPJCL/RAG-Retrieval # Introduction The models are trained based on `Alibaba-NLP/gte-large-en-v1.5` and `Alibaba-NLP/gte-Qwen2-1.5B-instruct`. Thanks for their contributions! **We simplify usage of prompts, providing two prompts for most general tasks, one is for s2p, another one is for s2s.** Prompt of s2p task(e.g. retrieve task): ```text Instruct: Given a web search query, retrieve relevant passages that answer the query.\nQuery: {query} ``` Prompt of s2s task(e.g. semantic textual similarity task): ```text Instruct: Retrieve semantically similar text.\nQuery: {query} ``` The models are finally trained by [MRL](https://arxiv.org/abs/2205.13147), so they have multiple dimensions: 512, 768, 1024, 2048, 4096, 6144 and 8192. The higher the dimension, the better the performance. **Generally speaking, 1024d is good enough.** The MTEB score of 1024d is only 0.001 lower than 8192d. # Model directory structure The model directory structure is very simple, it is a standard SentenceTransformer directory **with a series of `2_Dense_{dims}` folders**, where `dims` represents the final vector dimension. For example, the `2_Dense_256` folder stores Linear weights that convert vector dimensions to 256 dimensions. Please refer to the following chapters for specific instructions on how to use them. # Usage You can use `SentenceTransformers` or `transformers` library to encode text. ## Sentence Transformers ```python from sentence_transformers import SentenceTransformer # This model supports two prompts: "s2p_query" and "s2s_query" for sentence-to-passage and sentence-to-sentence tasks, respectively. # They are defined in `config_sentence_transformers.json` query_prompt_name = "s2p_query" queries = [ "What are some ways to reduce stress?", "What are the benefits of drinking green tea?", ] # docs do not need any prompts docs = [ "There are many effective ways to reduce stress. Some common techniques include deep breathing, meditation, and physical activity. Engaging in hobbies, spending time in nature, and connecting with loved ones can also help alleviate stress. Additionally, setting boundaries, practicing self-care, and learning to say no can prevent stress from building up.", "Green tea has been consumed for centuries and is known for its potential health benefits. It contains antioxidants that may help protect the body against damage caused by free radicals. Regular consumption of green tea has been associated with improved heart health, enhanced cognitive function, and a reduced risk of certain types of cancer. The polyphenols in green tea may also have anti-inflammatory and weight loss properties.", ] # !The default dimension is 1024, if you need other dimensions, please clone the model and modify `modules.json` to replace `2_Dense_1024` with another dimension, e.g. `2_Dense_256` or `2_Dense_8192` ! # on gpu model = SentenceTransformer("dunzhang/stella_en_400M_v5", trust_remote_code=True).cuda() # you can also use this model without the features of `use_memory_efficient_attention` and `unpad_inputs`. It can be worked in CPU. # model = SentenceTransformer( # "dunzhang/stella_en_400M_v5", # trust_remote_code=True, # device="cpu", # config_kwargs={"use_memory_efficient_attention": False, "unpad_inputs": False} # ) query_embeddings = model.encode(queries, prompt_name=query_prompt_name) doc_embeddings = model.encode(docs) print(query_embeddings.shape, doc_embeddings.shape) # (2, 1024) (2, 1024) similarities = model.similarity(query_embeddings, doc_embeddings) print(similarities) # tensor([[0.8398, 0.2990], # [0.3282, 0.8095]]) ``` ## Transformers ```python import os import torch from transformers import AutoModel, AutoTokenizer from sklearn.preprocessing import normalize query_prompt = "Instruct: Given a web search query, retrieve relevant passages that answer the query.\nQuery: " queries = [ "What are some ways to reduce stress?", "What are the benefits of drinking green tea?", ] queries = [query_prompt + query for query in queries] # docs do not need any prompts docs = [ "There are many effective ways to reduce stress. Some common techniques include deep breathing, meditation, and physical activity. Engaging in hobbies, spending time in nature, and connecting with loved ones can also help alleviate stress. Additionally, setting boundaries, practicing self-care, and learning to say no can prevent stress from building up.", "Green tea has been consumed for centuries and is known for its potential health benefits. It contains antioxidants that may help protect the body against damage caused by free radicals. Regular consumption of green tea has been associated with improved heart health, enhanced cognitive function, and a reduced risk of certain types of cancer. The polyphenols in green tea may also have anti-inflammatory and weight loss properties.", ] # The path of your model after cloning it model_dir = "{Your MODEL_PATH}" vector_dim = 1024 vector_linear_directory = f"2_Dense_{vector_dim}" model = AutoModel.from_pretrained(model_dir, trust_remote_code=True).cuda().eval() # you can also use this model without the features of `use_memory_efficient_attention` and `unpad_inputs`. It can be worked in CPU. # model = AutoModel.from_pretrained(model_dir, trust_remote_code=True,use_memory_efficient_attention=False,unpad_inputs=False).cuda().eval() tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True) vector_linear = torch.nn.Linear(in_features=model.config.hidden_size, out_features=vector_dim) vector_linear_dict = { k.replace("linear.", ""): v for k, v in torch.load(os.path.join(model_dir, f"{vector_linear_directory}/pytorch_model.bin")).items() } vector_linear.load_state_dict(vector_linear_dict) vector_linear.cuda() # Embed the queries with torch.no_grad(): input_data = tokenizer(queries, padding="longest", truncation=True, max_length=512, return_tensors="pt") input_data = {k: v.cuda() for k, v in input_data.items()} attention_mask = input_data["attention_mask"] last_hidden_state = model(**input_data)[0] last_hidden = last_hidden_state.masked_fill(~attention_mask[..., None].bool(), 0.0) query_vectors = last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] query_vectors = normalize(vector_linear(query_vectors).cpu().numpy()) # Embed the documents with torch.no_grad(): input_data = tokenizer(docs, padding="longest", truncation=True, max_length=512, return_tensors="pt") input_data = {k: v.cuda() for k, v in input_data.items()} attention_mask = input_data["attention_mask"] last_hidden_state = model(**input_data)[0] last_hidden = last_hidden_state.masked_fill(~attention_mask[..., None].bool(), 0.0) docs_vectors = last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] docs_vectors = normalize(vector_linear(docs_vectors).cpu().numpy()) print(query_vectors.shape, docs_vectors.shape) # (2, 1024) (2, 1024) similarities = query_vectors @ docs_vectors.T print(similarities) # [[0.8397531 0.29900077] # [0.32818374 0.80954516]] ``` ### infinity_emb Usage via [infinity, MIT Licensed](https://github.com/michaelfeil/infinity). ```bash docker run \ --gpus all -p "7997":"7997" \ michaelf34/infinity:0.0.69 \ v2 --model-id dunzhang/stella_en_400M_v5 --revision "refs/pr/24" --dtype bfloat16 --batch-size 16 --device cuda --engine torch --port 7997 --no-bettertransformer ``` # Citation ``` @misc{zhang2025jasperstelladistillationsota, title={Jasper and Stella: distillation of SOTA embedding models}, author={Dun Zhang and Jiacheng Li and Ziyang Zeng and Fulong Wang}, year={2025}, eprint={2412.19048}, archivePrefix={arXiv}, primaryClass={cs.IR}, url={https://arxiv.org/abs/2412.19048}, } ``` # FAQ Q: The details of training? A: The training method and datasets will be released in the future. (specific time unknown, may be provided in a paper) Q: How to choose a suitable prompt for my own task? A: In most cases, please use the s2p and s2s prompts. These two prompts account for the vast majority of the training data. Q: How to reproduce MTEB results? A: Please use evaluation scripts in `Alibaba-NLP/gte-Qwen2-1.5B-instruct` or `intfloat/e5-mistral-7b-instruct` Q: Why each dimension has a linear weight? A: MRL has multiple training methods, we choose this method which has the best performance. Q: What is the sequence length of models? A: 512 is recommended, in our experiments, almost all models perform poorly on specialized long text retrieval datasets. Besides, the model is trained on datasets of 512 length. This may be an optimization term. If you have any questions, please start a discussion on community.
[ "BIOSSES", "SCIFACT" ]
intfloat/e5-base
intfloat
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "bert", "mteb", "Sentence Transformers", "sentence-similarity", "en", "arxiv:2212.03533", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2022-12-26T05:58:05Z"
2023-08-07T04:59:19+00:00
294,150
20
--- language: - en license: mit tags: - mteb - Sentence Transformers - sentence-similarity - sentence-transformers model-index: - name: e5-base results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 79.71641791044777 - type: ap value: 44.15426065428253 - type: f1 value: 73.89474407693241 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 87.9649 - type: ap value: 84.10171551915973 - type: f1 value: 87.94148377827356 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 42.645999999999994 - type: f1 value: 42.230574673549 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 26.814 - type: map_at_10 value: 42.681999999999995 - type: map_at_100 value: 43.714 - type: map_at_1000 value: 43.724000000000004 - type: map_at_3 value: 38.11 - type: map_at_5 value: 40.666999999999994 - type: mrr_at_1 value: 27.168999999999997 - type: mrr_at_10 value: 42.84 - type: mrr_at_100 value: 43.864 - type: mrr_at_1000 value: 43.875 - type: mrr_at_3 value: 38.193 - type: mrr_at_5 value: 40.793 - type: ndcg_at_1 value: 26.814 - type: ndcg_at_10 value: 51.410999999999994 - type: ndcg_at_100 value: 55.713 - type: ndcg_at_1000 value: 55.957 - type: ndcg_at_3 value: 41.955 - type: ndcg_at_5 value: 46.558 - type: precision_at_1 value: 26.814 - type: precision_at_10 value: 7.922999999999999 - type: precision_at_100 value: 0.9780000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 17.71 - type: precision_at_5 value: 12.859000000000002 - type: recall_at_1 value: 26.814 - type: recall_at_10 value: 79.232 - type: recall_at_100 value: 97.795 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 53.129000000000005 - type: recall_at_5 value: 64.29599999999999 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 44.56933066536439 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 40.47647746165173 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 59.65675531567043 - type: mrr value: 72.95255683067317 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.83147014162338 - type: cos_sim_spearman value: 85.1031439521441 - type: euclidean_pearson value: 83.53609085510973 - type: euclidean_spearman value: 84.59650590202833 - type: manhattan_pearson value: 83.14611947586386 - type: manhattan_spearman value: 84.13384475757064 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 83.32792207792208 - type: f1 value: 83.32037485050513 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 36.18605446588703 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 32.72379130181917 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 30.659 - type: map_at_10 value: 40.333999999999996 - type: map_at_100 value: 41.763 - type: map_at_1000 value: 41.894 - type: map_at_3 value: 37.561 - type: map_at_5 value: 39.084 - type: mrr_at_1 value: 37.482 - type: mrr_at_10 value: 45.736 - type: mrr_at_100 value: 46.591 - type: mrr_at_1000 value: 46.644999999999996 - type: mrr_at_3 value: 43.491 - type: mrr_at_5 value: 44.75 - type: ndcg_at_1 value: 37.482 - type: ndcg_at_10 value: 45.606 - type: ndcg_at_100 value: 51.172 - type: ndcg_at_1000 value: 53.407000000000004 - type: ndcg_at_3 value: 41.808 - type: ndcg_at_5 value: 43.449 - type: precision_at_1 value: 37.482 - type: precision_at_10 value: 8.254999999999999 - type: precision_at_100 value: 1.3719999999999999 - type: precision_at_1000 value: 0.186 - type: precision_at_3 value: 19.695 - type: precision_at_5 value: 13.847999999999999 - type: recall_at_1 value: 30.659 - type: recall_at_10 value: 55.409 - type: recall_at_100 value: 78.687 - type: recall_at_1000 value: 93.068 - type: recall_at_3 value: 43.891999999999996 - type: recall_at_5 value: 48.678 - type: map_at_1 value: 30.977 - type: map_at_10 value: 40.296 - type: map_at_100 value: 41.453 - type: map_at_1000 value: 41.581 - type: map_at_3 value: 37.619 - type: map_at_5 value: 39.181 - type: mrr_at_1 value: 39.108 - type: mrr_at_10 value: 46.894000000000005 - type: mrr_at_100 value: 47.55 - type: mrr_at_1000 value: 47.598 - type: mrr_at_3 value: 44.766 - type: mrr_at_5 value: 46.062999999999995 - type: ndcg_at_1 value: 39.108 - type: ndcg_at_10 value: 45.717 - type: ndcg_at_100 value: 49.941 - type: ndcg_at_1000 value: 52.138 - type: ndcg_at_3 value: 42.05 - type: ndcg_at_5 value: 43.893 - type: precision_at_1 value: 39.108 - type: precision_at_10 value: 8.306 - type: precision_at_100 value: 1.3419999999999999 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 19.979 - type: precision_at_5 value: 14.038 - type: recall_at_1 value: 30.977 - type: recall_at_10 value: 54.688 - type: recall_at_100 value: 72.556 - type: recall_at_1000 value: 86.53800000000001 - type: recall_at_3 value: 43.388 - type: recall_at_5 value: 48.717 - type: map_at_1 value: 39.812 - type: map_at_10 value: 50.1 - type: map_at_100 value: 51.193999999999996 - type: map_at_1000 value: 51.258 - type: map_at_3 value: 47.510999999999996 - type: map_at_5 value: 48.891 - type: mrr_at_1 value: 45.266 - type: mrr_at_10 value: 53.459999999999994 - type: mrr_at_100 value: 54.19199999999999 - type: mrr_at_1000 value: 54.228 - type: mrr_at_3 value: 51.296 - type: mrr_at_5 value: 52.495999999999995 - type: ndcg_at_1 value: 45.266 - type: ndcg_at_10 value: 55.034000000000006 - type: ndcg_at_100 value: 59.458 - type: ndcg_at_1000 value: 60.862 - type: ndcg_at_3 value: 50.52799999999999 - type: ndcg_at_5 value: 52.564 - type: precision_at_1 value: 45.266 - type: precision_at_10 value: 8.483 - type: precision_at_100 value: 1.162 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 21.944 - type: precision_at_5 value: 14.721 - type: recall_at_1 value: 39.812 - type: recall_at_10 value: 66.36 - type: recall_at_100 value: 85.392 - type: recall_at_1000 value: 95.523 - type: recall_at_3 value: 54.127 - type: recall_at_5 value: 59.245000000000005 - type: map_at_1 value: 26.186 - type: map_at_10 value: 33.18 - type: map_at_100 value: 34.052 - type: map_at_1000 value: 34.149 - type: map_at_3 value: 31.029 - type: map_at_5 value: 32.321 - type: mrr_at_1 value: 28.136 - type: mrr_at_10 value: 35.195 - type: mrr_at_100 value: 35.996 - type: mrr_at_1000 value: 36.076 - type: mrr_at_3 value: 33.051 - type: mrr_at_5 value: 34.407 - type: ndcg_at_1 value: 28.136 - type: ndcg_at_10 value: 37.275999999999996 - type: ndcg_at_100 value: 41.935 - type: ndcg_at_1000 value: 44.389 - type: ndcg_at_3 value: 33.059 - type: ndcg_at_5 value: 35.313 - type: precision_at_1 value: 28.136 - type: precision_at_10 value: 5.457999999999999 - type: precision_at_100 value: 0.826 - type: precision_at_1000 value: 0.107 - type: precision_at_3 value: 13.522 - type: precision_at_5 value: 9.424000000000001 - type: recall_at_1 value: 26.186 - type: recall_at_10 value: 47.961999999999996 - type: recall_at_100 value: 70.072 - type: recall_at_1000 value: 88.505 - type: recall_at_3 value: 36.752 - type: recall_at_5 value: 42.168 - type: map_at_1 value: 16.586000000000002 - type: map_at_10 value: 23.637 - type: map_at_100 value: 24.82 - type: map_at_1000 value: 24.95 - type: map_at_3 value: 21.428 - type: map_at_5 value: 22.555 - type: mrr_at_1 value: 20.771 - type: mrr_at_10 value: 27.839999999999996 - type: mrr_at_100 value: 28.887 - type: mrr_at_1000 value: 28.967 - type: mrr_at_3 value: 25.56 - type: mrr_at_5 value: 26.723000000000003 - type: ndcg_at_1 value: 20.771 - type: ndcg_at_10 value: 28.255000000000003 - type: ndcg_at_100 value: 33.886 - type: ndcg_at_1000 value: 36.963 - type: ndcg_at_3 value: 24.056 - type: ndcg_at_5 value: 25.818 - type: precision_at_1 value: 20.771 - type: precision_at_10 value: 5.1 - type: precision_at_100 value: 0.9119999999999999 - type: precision_at_1000 value: 0.132 - type: precision_at_3 value: 11.526 - type: precision_at_5 value: 8.158999999999999 - type: recall_at_1 value: 16.586000000000002 - type: recall_at_10 value: 38.456 - type: recall_at_100 value: 62.666 - type: recall_at_1000 value: 84.47 - type: recall_at_3 value: 26.765 - type: recall_at_5 value: 31.297000000000004 - type: map_at_1 value: 28.831 - type: map_at_10 value: 37.545 - type: map_at_100 value: 38.934999999999995 - type: map_at_1000 value: 39.044000000000004 - type: map_at_3 value: 34.601 - type: map_at_5 value: 36.302 - type: mrr_at_1 value: 34.264 - type: mrr_at_10 value: 42.569 - type: mrr_at_100 value: 43.514 - type: mrr_at_1000 value: 43.561 - type: mrr_at_3 value: 40.167 - type: mrr_at_5 value: 41.678 - type: ndcg_at_1 value: 34.264 - type: ndcg_at_10 value: 42.914 - type: ndcg_at_100 value: 48.931999999999995 - type: ndcg_at_1000 value: 51.004000000000005 - type: ndcg_at_3 value: 38.096999999999994 - type: ndcg_at_5 value: 40.509 - type: precision_at_1 value: 34.264 - type: precision_at_10 value: 7.642 - type: precision_at_100 value: 1.258 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 17.453 - type: precision_at_5 value: 12.608 - type: recall_at_1 value: 28.831 - type: recall_at_10 value: 53.56999999999999 - type: recall_at_100 value: 79.26100000000001 - type: recall_at_1000 value: 92.862 - type: recall_at_3 value: 40.681 - type: recall_at_5 value: 46.597 - type: map_at_1 value: 27.461000000000002 - type: map_at_10 value: 35.885 - type: map_at_100 value: 37.039 - type: map_at_1000 value: 37.16 - type: map_at_3 value: 33.451 - type: map_at_5 value: 34.807 - type: mrr_at_1 value: 34.018 - type: mrr_at_10 value: 41.32 - type: mrr_at_100 value: 42.157 - type: mrr_at_1000 value: 42.223 - type: mrr_at_3 value: 39.288000000000004 - type: mrr_at_5 value: 40.481 - type: ndcg_at_1 value: 34.018 - type: ndcg_at_10 value: 40.821000000000005 - type: ndcg_at_100 value: 46.053 - type: ndcg_at_1000 value: 48.673 - type: ndcg_at_3 value: 36.839 - type: ndcg_at_5 value: 38.683 - type: precision_at_1 value: 34.018 - type: precision_at_10 value: 7.009 - type: precision_at_100 value: 1.123 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 16.933 - type: precision_at_5 value: 11.826 - type: recall_at_1 value: 27.461000000000002 - type: recall_at_10 value: 50.285000000000004 - type: recall_at_100 value: 73.25500000000001 - type: recall_at_1000 value: 91.17699999999999 - type: recall_at_3 value: 39.104 - type: recall_at_5 value: 43.968 - type: map_at_1 value: 26.980083333333337 - type: map_at_10 value: 34.47208333333333 - type: map_at_100 value: 35.609249999999996 - type: map_at_1000 value: 35.72833333333333 - type: map_at_3 value: 32.189416666666666 - type: map_at_5 value: 33.44683333333334 - type: mrr_at_1 value: 31.731666666666662 - type: mrr_at_10 value: 38.518 - type: mrr_at_100 value: 39.38166666666667 - type: mrr_at_1000 value: 39.446999999999996 - type: mrr_at_3 value: 36.49966666666668 - type: mrr_at_5 value: 37.639916666666664 - type: ndcg_at_1 value: 31.731666666666662 - type: ndcg_at_10 value: 38.92033333333333 - type: ndcg_at_100 value: 44.01675 - type: ndcg_at_1000 value: 46.51075 - type: ndcg_at_3 value: 35.09766666666667 - type: ndcg_at_5 value: 36.842999999999996 - type: precision_at_1 value: 31.731666666666662 - type: precision_at_10 value: 6.472583333333332 - type: precision_at_100 value: 1.0665 - type: precision_at_1000 value: 0.14725000000000002 - type: precision_at_3 value: 15.659083333333331 - type: precision_at_5 value: 10.878833333333333 - type: recall_at_1 value: 26.980083333333337 - type: recall_at_10 value: 48.13925 - type: recall_at_100 value: 70.70149999999998 - type: recall_at_1000 value: 88.10775000000001 - type: recall_at_3 value: 37.30091666666667 - type: recall_at_5 value: 41.90358333333333 - type: map_at_1 value: 25.607999999999997 - type: map_at_10 value: 30.523 - type: map_at_100 value: 31.409 - type: map_at_1000 value: 31.507 - type: map_at_3 value: 28.915000000000003 - type: map_at_5 value: 29.756 - type: mrr_at_1 value: 28.681 - type: mrr_at_10 value: 33.409 - type: mrr_at_100 value: 34.241 - type: mrr_at_1000 value: 34.313 - type: mrr_at_3 value: 32.029999999999994 - type: mrr_at_5 value: 32.712 - type: ndcg_at_1 value: 28.681 - type: ndcg_at_10 value: 33.733000000000004 - type: ndcg_at_100 value: 38.32 - type: ndcg_at_1000 value: 40.937 - type: ndcg_at_3 value: 30.898999999999997 - type: ndcg_at_5 value: 32.088 - type: precision_at_1 value: 28.681 - type: precision_at_10 value: 4.968999999999999 - type: precision_at_100 value: 0.79 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 12.73 - type: precision_at_5 value: 8.558 - type: recall_at_1 value: 25.607999999999997 - type: recall_at_10 value: 40.722 - type: recall_at_100 value: 61.956999999999994 - type: recall_at_1000 value: 81.43 - type: recall_at_3 value: 32.785 - type: recall_at_5 value: 35.855 - type: map_at_1 value: 20.399 - type: map_at_10 value: 25.968000000000004 - type: map_at_100 value: 26.985999999999997 - type: map_at_1000 value: 27.105 - type: map_at_3 value: 24.215 - type: map_at_5 value: 25.157 - type: mrr_at_1 value: 24.708 - type: mrr_at_10 value: 29.971999999999998 - type: mrr_at_100 value: 30.858 - type: mrr_at_1000 value: 30.934 - type: mrr_at_3 value: 28.304000000000002 - type: mrr_at_5 value: 29.183999999999997 - type: ndcg_at_1 value: 24.708 - type: ndcg_at_10 value: 29.676000000000002 - type: ndcg_at_100 value: 34.656 - type: ndcg_at_1000 value: 37.588 - type: ndcg_at_3 value: 26.613 - type: ndcg_at_5 value: 27.919 - type: precision_at_1 value: 24.708 - type: precision_at_10 value: 5.01 - type: precision_at_100 value: 0.876 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 11.975 - type: precision_at_5 value: 8.279 - type: recall_at_1 value: 20.399 - type: recall_at_10 value: 36.935 - type: recall_at_100 value: 59.532 - type: recall_at_1000 value: 80.58 - type: recall_at_3 value: 27.979 - type: recall_at_5 value: 31.636999999999997 - type: map_at_1 value: 27.606 - type: map_at_10 value: 34.213 - type: map_at_100 value: 35.339999999999996 - type: map_at_1000 value: 35.458 - type: map_at_3 value: 31.987 - type: map_at_5 value: 33.322 - type: mrr_at_1 value: 31.53 - type: mrr_at_10 value: 37.911 - type: mrr_at_100 value: 38.879000000000005 - type: mrr_at_1000 value: 38.956 - type: mrr_at_3 value: 35.868 - type: mrr_at_5 value: 37.047999999999995 - type: ndcg_at_1 value: 31.53 - type: ndcg_at_10 value: 38.312000000000005 - type: ndcg_at_100 value: 43.812 - type: ndcg_at_1000 value: 46.414 - type: ndcg_at_3 value: 34.319 - type: ndcg_at_5 value: 36.312 - type: precision_at_1 value: 31.53 - type: precision_at_10 value: 5.970000000000001 - type: precision_at_100 value: 0.9939999999999999 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 14.738999999999999 - type: precision_at_5 value: 10.242999999999999 - type: recall_at_1 value: 27.606 - type: recall_at_10 value: 47.136 - type: recall_at_100 value: 71.253 - type: recall_at_1000 value: 89.39399999999999 - type: recall_at_3 value: 36.342 - type: recall_at_5 value: 41.388999999999996 - type: map_at_1 value: 24.855 - type: map_at_10 value: 31.963 - type: map_at_100 value: 33.371 - type: map_at_1000 value: 33.584 - type: map_at_3 value: 29.543999999999997 - type: map_at_5 value: 30.793 - type: mrr_at_1 value: 29.644 - type: mrr_at_10 value: 35.601 - type: mrr_at_100 value: 36.551 - type: mrr_at_1000 value: 36.623 - type: mrr_at_3 value: 33.399 - type: mrr_at_5 value: 34.575 - type: ndcg_at_1 value: 29.644 - type: ndcg_at_10 value: 36.521 - type: ndcg_at_100 value: 42.087 - type: ndcg_at_1000 value: 45.119 - type: ndcg_at_3 value: 32.797 - type: ndcg_at_5 value: 34.208 - type: precision_at_1 value: 29.644 - type: precision_at_10 value: 6.7 - type: precision_at_100 value: 1.374 - type: precision_at_1000 value: 0.22899999999999998 - type: precision_at_3 value: 15.152 - type: precision_at_5 value: 10.671999999999999 - type: recall_at_1 value: 24.855 - type: recall_at_10 value: 45.449 - type: recall_at_100 value: 70.921 - type: recall_at_1000 value: 90.629 - type: recall_at_3 value: 33.526 - type: recall_at_5 value: 37.848 - type: map_at_1 value: 24.781 - type: map_at_10 value: 30.020999999999997 - type: map_at_100 value: 30.948999999999998 - type: map_at_1000 value: 31.05 - type: map_at_3 value: 28.412 - type: map_at_5 value: 29.193 - type: mrr_at_1 value: 27.172 - type: mrr_at_10 value: 32.309 - type: mrr_at_100 value: 33.164 - type: mrr_at_1000 value: 33.239999999999995 - type: mrr_at_3 value: 30.775999999999996 - type: mrr_at_5 value: 31.562 - type: ndcg_at_1 value: 27.172 - type: ndcg_at_10 value: 33.178999999999995 - type: ndcg_at_100 value: 37.949 - type: ndcg_at_1000 value: 40.635 - type: ndcg_at_3 value: 30.107 - type: ndcg_at_5 value: 31.36 - type: precision_at_1 value: 27.172 - type: precision_at_10 value: 4.769 - type: precision_at_100 value: 0.769 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 12.261 - type: precision_at_5 value: 8.17 - type: recall_at_1 value: 24.781 - type: recall_at_10 value: 40.699000000000005 - type: recall_at_100 value: 62.866 - type: recall_at_1000 value: 83.11699999999999 - type: recall_at_3 value: 32.269999999999996 - type: recall_at_5 value: 35.443999999999996 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 5.2139999999999995 - type: map_at_10 value: 9.986 - type: map_at_100 value: 11.343 - type: map_at_1000 value: 11.55 - type: map_at_3 value: 7.961 - type: map_at_5 value: 8.967 - type: mrr_at_1 value: 12.052 - type: mrr_at_10 value: 20.165 - type: mrr_at_100 value: 21.317 - type: mrr_at_1000 value: 21.399 - type: mrr_at_3 value: 17.079 - type: mrr_at_5 value: 18.695 - type: ndcg_at_1 value: 12.052 - type: ndcg_at_10 value: 15.375 - type: ndcg_at_100 value: 21.858 - type: ndcg_at_1000 value: 26.145000000000003 - type: ndcg_at_3 value: 11.334 - type: ndcg_at_5 value: 12.798000000000002 - type: precision_at_1 value: 12.052 - type: precision_at_10 value: 5.16 - type: precision_at_100 value: 1.206 - type: precision_at_1000 value: 0.198 - type: precision_at_3 value: 8.73 - type: precision_at_5 value: 7.114 - type: recall_at_1 value: 5.2139999999999995 - type: recall_at_10 value: 20.669999999999998 - type: recall_at_100 value: 43.901 - type: recall_at_1000 value: 68.447 - type: recall_at_3 value: 11.049000000000001 - type: recall_at_5 value: 14.652999999999999 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.511000000000001 - type: map_at_10 value: 19.503 - type: map_at_100 value: 27.46 - type: map_at_1000 value: 29.187 - type: map_at_3 value: 14.030999999999999 - type: map_at_5 value: 16.329 - type: mrr_at_1 value: 63.74999999999999 - type: mrr_at_10 value: 73.419 - type: mrr_at_100 value: 73.691 - type: mrr_at_1000 value: 73.697 - type: mrr_at_3 value: 71.792 - type: mrr_at_5 value: 72.979 - type: ndcg_at_1 value: 53.125 - type: ndcg_at_10 value: 41.02 - type: ndcg_at_100 value: 45.407 - type: ndcg_at_1000 value: 52.68000000000001 - type: ndcg_at_3 value: 46.088 - type: ndcg_at_5 value: 43.236000000000004 - type: precision_at_1 value: 63.74999999999999 - type: precision_at_10 value: 32.35 - type: precision_at_100 value: 10.363 - type: precision_at_1000 value: 2.18 - type: precision_at_3 value: 49.667 - type: precision_at_5 value: 41.5 - type: recall_at_1 value: 8.511000000000001 - type: recall_at_10 value: 24.851 - type: recall_at_100 value: 50.745 - type: recall_at_1000 value: 73.265 - type: recall_at_3 value: 15.716 - type: recall_at_5 value: 19.256 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 49.43500000000001 - type: f1 value: 44.56288273966374 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 40.858 - type: map_at_10 value: 52.276 - type: map_at_100 value: 52.928 - type: map_at_1000 value: 52.966 - type: map_at_3 value: 49.729 - type: map_at_5 value: 51.27 - type: mrr_at_1 value: 43.624 - type: mrr_at_10 value: 55.22899999999999 - type: mrr_at_100 value: 55.823 - type: mrr_at_1000 value: 55.85 - type: mrr_at_3 value: 52.739999999999995 - type: mrr_at_5 value: 54.251000000000005 - type: ndcg_at_1 value: 43.624 - type: ndcg_at_10 value: 58.23500000000001 - type: ndcg_at_100 value: 61.315 - type: ndcg_at_1000 value: 62.20099999999999 - type: ndcg_at_3 value: 53.22 - type: ndcg_at_5 value: 55.88999999999999 - type: precision_at_1 value: 43.624 - type: precision_at_10 value: 8.068999999999999 - type: precision_at_100 value: 0.975 - type: precision_at_1000 value: 0.107 - type: precision_at_3 value: 21.752 - type: precision_at_5 value: 14.515 - type: recall_at_1 value: 40.858 - type: recall_at_10 value: 73.744 - type: recall_at_100 value: 87.667 - type: recall_at_1000 value: 94.15599999999999 - type: recall_at_3 value: 60.287 - type: recall_at_5 value: 66.703 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 17.864 - type: map_at_10 value: 28.592000000000002 - type: map_at_100 value: 30.165 - type: map_at_1000 value: 30.364 - type: map_at_3 value: 24.586 - type: map_at_5 value: 26.717000000000002 - type: mrr_at_1 value: 35.031 - type: mrr_at_10 value: 43.876 - type: mrr_at_100 value: 44.683 - type: mrr_at_1000 value: 44.736 - type: mrr_at_3 value: 40.998000000000005 - type: mrr_at_5 value: 42.595 - type: ndcg_at_1 value: 35.031 - type: ndcg_at_10 value: 36.368 - type: ndcg_at_100 value: 42.472 - type: ndcg_at_1000 value: 45.973000000000006 - type: ndcg_at_3 value: 31.915 - type: ndcg_at_5 value: 33.394 - type: precision_at_1 value: 35.031 - type: precision_at_10 value: 10.139 - type: precision_at_100 value: 1.6420000000000001 - type: precision_at_1000 value: 0.22699999999999998 - type: precision_at_3 value: 21.142 - type: precision_at_5 value: 15.772 - type: recall_at_1 value: 17.864 - type: recall_at_10 value: 43.991 - type: recall_at_100 value: 66.796 - type: recall_at_1000 value: 87.64 - type: recall_at_3 value: 28.915999999999997 - type: recall_at_5 value: 35.185 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 36.556 - type: map_at_10 value: 53.056000000000004 - type: map_at_100 value: 53.909 - type: map_at_1000 value: 53.98 - type: map_at_3 value: 49.982 - type: map_at_5 value: 51.9 - type: mrr_at_1 value: 73.113 - type: mrr_at_10 value: 79.381 - type: mrr_at_100 value: 79.60300000000001 - type: mrr_at_1000 value: 79.617 - type: mrr_at_3 value: 78.298 - type: mrr_at_5 value: 78.995 - type: ndcg_at_1 value: 73.113 - type: ndcg_at_10 value: 62.21 - type: ndcg_at_100 value: 65.242 - type: ndcg_at_1000 value: 66.667 - type: ndcg_at_3 value: 57.717 - type: ndcg_at_5 value: 60.224 - type: precision_at_1 value: 73.113 - type: precision_at_10 value: 12.842999999999998 - type: precision_at_100 value: 1.522 - type: precision_at_1000 value: 0.17099999999999999 - type: precision_at_3 value: 36.178 - type: precision_at_5 value: 23.695 - type: recall_at_1 value: 36.556 - type: recall_at_10 value: 64.213 - type: recall_at_100 value: 76.077 - type: recall_at_1000 value: 85.53699999999999 - type: recall_at_3 value: 54.266999999999996 - type: recall_at_5 value: 59.236999999999995 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 75.958 - type: ap value: 69.82869527654348 - type: f1 value: 75.89120903005633 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.608 - type: map_at_10 value: 36.144 - type: map_at_100 value: 37.244 - type: map_at_1000 value: 37.291999999999994 - type: map_at_3 value: 32.287 - type: map_at_5 value: 34.473 - type: mrr_at_1 value: 24.226 - type: mrr_at_10 value: 36.711 - type: mrr_at_100 value: 37.758 - type: mrr_at_1000 value: 37.8 - type: mrr_at_3 value: 32.92 - type: mrr_at_5 value: 35.104 - type: ndcg_at_1 value: 24.269 - type: ndcg_at_10 value: 43.138 - type: ndcg_at_100 value: 48.421 - type: ndcg_at_1000 value: 49.592000000000006 - type: ndcg_at_3 value: 35.269 - type: ndcg_at_5 value: 39.175 - type: precision_at_1 value: 24.269 - type: precision_at_10 value: 6.755999999999999 - type: precision_at_100 value: 0.941 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.938 - type: precision_at_5 value: 10.934000000000001 - type: recall_at_1 value: 23.608 - type: recall_at_10 value: 64.679 - type: recall_at_100 value: 89.027 - type: recall_at_1000 value: 97.91 - type: recall_at_3 value: 43.25 - type: recall_at_5 value: 52.617000000000004 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.21477428180576 - type: f1 value: 92.92502305092152 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 74.76744186046511 - type: f1 value: 59.19855520057899 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.24613315400134 - type: f1 value: 70.19950395651232 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.75857431069268 - type: f1 value: 76.5433450230191 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.525463791623604 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.28695907385136 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.068174046665224 - type: mrr value: 30.827586642840803 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.322 - type: map_at_10 value: 13.919999999999998 - type: map_at_100 value: 17.416 - type: map_at_1000 value: 18.836 - type: map_at_3 value: 10.111 - type: map_at_5 value: 11.991999999999999 - type: mrr_at_1 value: 48.297000000000004 - type: mrr_at_10 value: 57.114 - type: mrr_at_100 value: 57.713 - type: mrr_at_1000 value: 57.751 - type: mrr_at_3 value: 55.108000000000004 - type: mrr_at_5 value: 56.533 - type: ndcg_at_1 value: 46.44 - type: ndcg_at_10 value: 36.589 - type: ndcg_at_100 value: 33.202 - type: ndcg_at_1000 value: 41.668 - type: ndcg_at_3 value: 41.302 - type: ndcg_at_5 value: 39.829 - type: precision_at_1 value: 47.988 - type: precision_at_10 value: 27.059 - type: precision_at_100 value: 8.235000000000001 - type: precision_at_1000 value: 2.091 - type: precision_at_3 value: 38.184000000000005 - type: precision_at_5 value: 34.365 - type: recall_at_1 value: 6.322 - type: recall_at_10 value: 18.288 - type: recall_at_100 value: 32.580999999999996 - type: recall_at_1000 value: 63.605999999999995 - type: recall_at_3 value: 11.266 - type: recall_at_5 value: 14.69 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 36.586999999999996 - type: map_at_10 value: 52.464 - type: map_at_100 value: 53.384 - type: map_at_1000 value: 53.405 - type: map_at_3 value: 48.408 - type: map_at_5 value: 50.788999999999994 - type: mrr_at_1 value: 40.904 - type: mrr_at_10 value: 54.974000000000004 - type: mrr_at_100 value: 55.60699999999999 - type: mrr_at_1000 value: 55.623 - type: mrr_at_3 value: 51.73799999999999 - type: mrr_at_5 value: 53.638 - type: ndcg_at_1 value: 40.904 - type: ndcg_at_10 value: 59.965999999999994 - type: ndcg_at_100 value: 63.613 - type: ndcg_at_1000 value: 64.064 - type: ndcg_at_3 value: 52.486 - type: ndcg_at_5 value: 56.377 - type: precision_at_1 value: 40.904 - type: precision_at_10 value: 9.551 - type: precision_at_100 value: 1.162 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.552 - type: precision_at_5 value: 16.436999999999998 - type: recall_at_1 value: 36.586999999999996 - type: recall_at_10 value: 80.094 - type: recall_at_100 value: 95.515 - type: recall_at_1000 value: 98.803 - type: recall_at_3 value: 60.907 - type: recall_at_5 value: 69.817 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.422 - type: map_at_10 value: 84.113 - type: map_at_100 value: 84.744 - type: map_at_1000 value: 84.762 - type: map_at_3 value: 81.171 - type: map_at_5 value: 83.039 - type: mrr_at_1 value: 81.12 - type: mrr_at_10 value: 87.277 - type: mrr_at_100 value: 87.384 - type: mrr_at_1000 value: 87.385 - type: mrr_at_3 value: 86.315 - type: mrr_at_5 value: 86.981 - type: ndcg_at_1 value: 81.12 - type: ndcg_at_10 value: 87.92 - type: ndcg_at_100 value: 89.178 - type: ndcg_at_1000 value: 89.29899999999999 - type: ndcg_at_3 value: 85.076 - type: ndcg_at_5 value: 86.67099999999999 - type: precision_at_1 value: 81.12 - type: precision_at_10 value: 13.325999999999999 - type: precision_at_100 value: 1.524 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.16 - type: precision_at_5 value: 24.456 - type: recall_at_1 value: 70.422 - type: recall_at_10 value: 95.00800000000001 - type: recall_at_100 value: 99.38 - type: recall_at_1000 value: 99.94800000000001 - type: recall_at_3 value: 86.809 - type: recall_at_5 value: 91.334 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 48.18491891699636 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 62.190639679711914 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.478 - type: map_at_10 value: 11.268 - type: map_at_100 value: 13.129 - type: map_at_1000 value: 13.41 - type: map_at_3 value: 8.103 - type: map_at_5 value: 9.609 - type: mrr_at_1 value: 22 - type: mrr_at_10 value: 32.248 - type: mrr_at_100 value: 33.355000000000004 - type: mrr_at_1000 value: 33.42 - type: mrr_at_3 value: 29.15 - type: mrr_at_5 value: 30.785 - type: ndcg_at_1 value: 22 - type: ndcg_at_10 value: 18.990000000000002 - type: ndcg_at_100 value: 26.302999999999997 - type: ndcg_at_1000 value: 31.537 - type: ndcg_at_3 value: 18.034 - type: ndcg_at_5 value: 15.655 - type: precision_at_1 value: 22 - type: precision_at_10 value: 9.91 - type: precision_at_100 value: 2.0420000000000003 - type: precision_at_1000 value: 0.33 - type: precision_at_3 value: 16.933 - type: precision_at_5 value: 13.719999999999999 - type: recall_at_1 value: 4.478 - type: recall_at_10 value: 20.087 - type: recall_at_100 value: 41.457 - type: recall_at_1000 value: 67.10199999999999 - type: recall_at_3 value: 10.313 - type: recall_at_5 value: 13.927999999999999 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.27341574565806 - type: cos_sim_spearman value: 79.66419880841734 - type: euclidean_pearson value: 81.32473321838208 - type: euclidean_spearman value: 79.29828832085133 - type: manhattan_pearson value: 81.25554065883132 - type: manhattan_spearman value: 79.23275543279853 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.40468875905418 - type: cos_sim_spearman value: 74.2189990321174 - type: euclidean_pearson value: 80.74376966290956 - type: euclidean_spearman value: 74.97663839079335 - type: manhattan_pearson value: 80.69779331646207 - type: manhattan_spearman value: 75.00225252917613 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 82.5745290053095 - type: cos_sim_spearman value: 83.31401180333397 - type: euclidean_pearson value: 82.96500607325534 - type: euclidean_spearman value: 83.8534967935793 - type: manhattan_pearson value: 82.83112050632508 - type: manhattan_spearman value: 83.70877296557838 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 80.67833656607704 - type: cos_sim_spearman value: 78.52252410630707 - type: euclidean_pearson value: 80.071189514343 - type: euclidean_spearman value: 78.95143545742796 - type: manhattan_pearson value: 80.0128926165121 - type: manhattan_spearman value: 78.91236678732628 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.48437639980746 - type: cos_sim_spearman value: 88.34876527774259 - type: euclidean_pearson value: 87.64898081823888 - type: euclidean_spearman value: 88.58937180804213 - type: manhattan_pearson value: 87.5942417815288 - type: manhattan_spearman value: 88.53013922267687 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.69189187164781 - type: cos_sim_spearman value: 84.15327883572112 - type: euclidean_pearson value: 83.64202266685898 - type: euclidean_spearman value: 84.6219602318862 - type: manhattan_pearson value: 83.53256698709998 - type: manhattan_spearman value: 84.49260712904946 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.09508017611589 - type: cos_sim_spearman value: 87.23010990417097 - type: euclidean_pearson value: 87.62545569077133 - type: euclidean_spearman value: 86.71152051711714 - type: manhattan_pearson value: 87.5057154278377 - type: manhattan_spearman value: 86.60611898281267 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 61.72129893941176 - type: cos_sim_spearman value: 62.87871412069194 - type: euclidean_pearson value: 63.21077648290454 - type: euclidean_spearman value: 63.03263080805978 - type: manhattan_pearson value: 63.20740860135976 - type: manhattan_spearman value: 62.89930471802817 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.039118236799 - type: cos_sim_spearman value: 86.18102563389962 - type: euclidean_pearson value: 85.62977041471879 - type: euclidean_spearman value: 86.02478990544347 - type: manhattan_pearson value: 85.60786740521806 - type: manhattan_spearman value: 85.99546210442547 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 82.89875069737266 - type: mrr value: 95.42621322033087 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 58.660999999999994 - type: map_at_10 value: 68.738 - type: map_at_100 value: 69.33200000000001 - type: map_at_1000 value: 69.352 - type: map_at_3 value: 66.502 - type: map_at_5 value: 67.686 - type: mrr_at_1 value: 61.667 - type: mrr_at_10 value: 70.003 - type: mrr_at_100 value: 70.441 - type: mrr_at_1000 value: 70.46 - type: mrr_at_3 value: 68.278 - type: mrr_at_5 value: 69.194 - type: ndcg_at_1 value: 61.667 - type: ndcg_at_10 value: 73.083 - type: ndcg_at_100 value: 75.56 - type: ndcg_at_1000 value: 76.01400000000001 - type: ndcg_at_3 value: 69.28699999999999 - type: ndcg_at_5 value: 70.85000000000001 - type: precision_at_1 value: 61.667 - type: precision_at_10 value: 9.6 - type: precision_at_100 value: 1.087 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 27.111 - type: precision_at_5 value: 17.467 - type: recall_at_1 value: 58.660999999999994 - type: recall_at_10 value: 85.02199999999999 - type: recall_at_100 value: 95.933 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 74.506 - type: recall_at_5 value: 78.583 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.8029702970297 - type: cos_sim_ap value: 94.87673936635738 - type: cos_sim_f1 value: 90.00502260170768 - type: cos_sim_precision value: 90.41372351160445 - type: cos_sim_recall value: 89.60000000000001 - type: dot_accuracy value: 99.57524752475247 - type: dot_ap value: 84.81717934496321 - type: dot_f1 value: 78.23026646556059 - type: dot_precision value: 78.66531850353893 - type: dot_recall value: 77.8 - type: euclidean_accuracy value: 99.8029702970297 - type: euclidean_ap value: 94.74658253135284 - type: euclidean_f1 value: 90.08470353761834 - type: euclidean_precision value: 89.77159880834161 - type: euclidean_recall value: 90.4 - type: manhattan_accuracy value: 99.8 - type: manhattan_ap value: 94.69224030742787 - type: manhattan_f1 value: 89.9502487562189 - type: manhattan_precision value: 89.50495049504951 - type: manhattan_recall value: 90.4 - type: max_accuracy value: 99.8029702970297 - type: max_ap value: 94.87673936635738 - type: max_f1 value: 90.08470353761834 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 63.906039623153035 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.56053830923281 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.15326538775145 - type: mrr value: 50.99279295051355 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.44030762047337 - type: cos_sim_spearman value: 31.00910300264562 - type: dot_pearson value: 26.88257194766013 - type: dot_spearman value: 27.646202679013577 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.247 - type: map_at_10 value: 1.9429999999999998 - type: map_at_100 value: 10.82 - type: map_at_1000 value: 25.972 - type: map_at_3 value: 0.653 - type: map_at_5 value: 1.057 - type: mrr_at_1 value: 94 - type: mrr_at_10 value: 96.333 - type: mrr_at_100 value: 96.333 - type: mrr_at_1000 value: 96.333 - type: mrr_at_3 value: 96.333 - type: mrr_at_5 value: 96.333 - type: ndcg_at_1 value: 89 - type: ndcg_at_10 value: 79.63799999999999 - type: ndcg_at_100 value: 57.961 - type: ndcg_at_1000 value: 50.733 - type: ndcg_at_3 value: 84.224 - type: ndcg_at_5 value: 82.528 - type: precision_at_1 value: 94 - type: precision_at_10 value: 84.2 - type: precision_at_100 value: 59.36 - type: precision_at_1000 value: 22.738 - type: precision_at_3 value: 88 - type: precision_at_5 value: 86.8 - type: recall_at_1 value: 0.247 - type: recall_at_10 value: 2.131 - type: recall_at_100 value: 14.035 - type: recall_at_1000 value: 47.457 - type: recall_at_3 value: 0.6779999999999999 - type: recall_at_5 value: 1.124 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.603 - type: map_at_10 value: 11.667 - type: map_at_100 value: 16.474 - type: map_at_1000 value: 18.074 - type: map_at_3 value: 6.03 - type: map_at_5 value: 8.067 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 51.063 - type: mrr_at_100 value: 51.908 - type: mrr_at_1000 value: 51.908 - type: mrr_at_3 value: 47.959 - type: mrr_at_5 value: 49.694 - type: ndcg_at_1 value: 32.653 - type: ndcg_at_10 value: 28.305000000000003 - type: ndcg_at_100 value: 35.311 - type: ndcg_at_1000 value: 47.644999999999996 - type: ndcg_at_3 value: 32.187 - type: ndcg_at_5 value: 29.134999999999998 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 26.122 - type: precision_at_100 value: 6.755 - type: precision_at_1000 value: 1.467 - type: precision_at_3 value: 34.694 - type: precision_at_5 value: 30.203999999999997 - type: recall_at_1 value: 2.603 - type: recall_at_10 value: 18.716 - type: recall_at_100 value: 42.512 - type: recall_at_1000 value: 79.32000000000001 - type: recall_at_3 value: 7.59 - type: recall_at_5 value: 10.949 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 74.117 - type: ap value: 15.89357321699319 - type: f1 value: 57.14385866369257 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.38370118845502 - type: f1 value: 61.67038693866553 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 42.57754941537969 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.1775049174465 - type: cos_sim_ap value: 74.3994879581554 - type: cos_sim_f1 value: 69.32903671308551 - type: cos_sim_precision value: 61.48193508879363 - type: cos_sim_recall value: 79.47229551451187 - type: dot_accuracy value: 81.65345413363534 - type: dot_ap value: 59.690898346685096 - type: dot_f1 value: 57.27622826467499 - type: dot_precision value: 51.34965473948525 - type: dot_recall value: 64.74934036939314 - type: euclidean_accuracy value: 86.04637301066937 - type: euclidean_ap value: 74.33009001775268 - type: euclidean_f1 value: 69.2458374142997 - type: euclidean_precision value: 64.59570580173595 - type: euclidean_recall value: 74.6174142480211 - type: manhattan_accuracy value: 86.11193896405793 - type: manhattan_ap value: 74.2964140130421 - type: manhattan_f1 value: 69.11601528788066 - type: manhattan_precision value: 64.86924323073363 - type: manhattan_recall value: 73.95778364116094 - type: max_accuracy value: 86.1775049174465 - type: max_ap value: 74.3994879581554 - type: max_f1 value: 69.32903671308551 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.01501921061823 - type: cos_sim_ap value: 85.97819287477351 - type: cos_sim_f1 value: 78.33882858518875 - type: cos_sim_precision value: 75.49446626204926 - type: cos_sim_recall value: 81.40591315060055 - type: dot_accuracy value: 86.47494857763806 - type: dot_ap value: 78.77420360340282 - type: dot_f1 value: 73.06433247936238 - type: dot_precision value: 67.92140777983595 - type: dot_recall value: 79.04989220819218 - type: euclidean_accuracy value: 88.7297706368611 - type: euclidean_ap value: 85.61550568529317 - type: euclidean_f1 value: 77.84805525263539 - type: euclidean_precision value: 73.73639994491117 - type: euclidean_recall value: 82.44533415460425 - type: manhattan_accuracy value: 88.75111576823068 - type: manhattan_ap value: 85.58701671476263 - type: manhattan_f1 value: 77.70169909067856 - type: manhattan_precision value: 73.37666780704755 - type: manhattan_recall value: 82.5685247921158 - type: max_accuracy value: 89.01501921061823 - type: max_ap value: 85.97819287477351 - type: max_f1 value: 78.33882858518875 --- ## E5-base **News (May 2023): please switch to [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2), which has better performance and same method of usage.** [Text Embeddings by Weakly-Supervised Contrastive Pre-training](https://arxiv.org/pdf/2212.03533.pdf). Liang Wang, Nan Yang, Xiaolong Huang, Binxing Jiao, Linjun Yang, Daxin Jiang, Rangan Majumder, Furu Wei, arXiv 2022 This model has 12 layers and the embedding size is 768. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ". # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments."] tokenizer = AutoTokenizer.from_pretrained('intfloat/e5-base') model = AutoModel.from_pretrained('intfloat/e5-base') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Training Details Please refer to our paper at [https://arxiv.org/pdf/2212.03533.pdf](https://arxiv.org/pdf/2212.03533.pdf). ## Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/e5-base') input_texts = [ 'query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2022text, title={Text Embeddings by Weakly-Supervised Contrastive Pre-training}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Jiao, Binxing and Yang, Linjun and Jiang, Daxin and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2212.03533}, year={2022} } ``` ## Limitations This model only works for English texts. Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
Snowflake/snowflake-arctic-embed-xs
Snowflake
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "mteb", "arctic", "snowflake-arctic-embed", "transformers.js", "arxiv:2407.18887", "arxiv:2405.05374", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-04-12T13:54:17Z"
2024-12-13T20:54:05+00:00
270,601
35
--- pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb - arctic - snowflake-arctic-embed - transformers.js model-index: - name: snowflake-snowflake-arctic-embed-xs results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 65.08955223880598 - type: ap value: 28.514291209445364 - type: f1 value: 59.2604580112738 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 70.035375 - type: ap value: 64.29444264250405 - type: f1 value: 69.78382333907138 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 35.343999999999994 - type: f1 value: 34.69618251902858 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 28.592000000000002 - type: map_at_10 value: 43.597 - type: map_at_100 value: 44.614 - type: map_at_1000 value: 44.624 - type: map_at_3 value: 38.928000000000004 - type: map_at_5 value: 41.453 - type: mrr_at_1 value: 29.232000000000003 - type: mrr_at_10 value: 43.829 - type: mrr_at_100 value: 44.852 - type: mrr_at_1000 value: 44.862 - type: mrr_at_3 value: 39.118 - type: mrr_at_5 value: 41.703 - type: ndcg_at_1 value: 28.592000000000002 - type: ndcg_at_10 value: 52.081 - type: ndcg_at_100 value: 56.37 - type: ndcg_at_1000 value: 56.598000000000006 - type: ndcg_at_3 value: 42.42 - type: ndcg_at_5 value: 46.965 - type: precision_at_1 value: 28.592000000000002 - type: precision_at_10 value: 7.922999999999999 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 17.52 - type: precision_at_5 value: 12.717 - type: recall_at_1 value: 28.592000000000002 - type: recall_at_10 value: 79.232 - type: recall_at_100 value: 97.866 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 52.559999999999995 - type: recall_at_5 value: 63.585 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 43.50220588953974 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 32.08725826118282 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 60.25381587694928 - type: mrr value: 73.79776194873148 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.47489332445278 - type: cos_sim_spearman value: 84.05432487336698 - type: euclidean_pearson value: 84.5108222177219 - type: euclidean_spearman value: 84.05432487336698 - type: manhattan_pearson value: 84.20440618321464 - type: manhattan_spearman value: 83.9290208134097 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 76.37337662337663 - type: f1 value: 75.33296834885043 - task: type: Clustering dataset: name: MTEB BigPatentClustering type: jinaai/big-patent-clustering config: default split: test revision: 62d5330920bca426ce9d3c76ea914f15fc83e891 metrics: - type: v_measure value: 21.31174373264835 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 34.481973521597844 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 26.14094256567341 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 32.527 - type: map_at_10 value: 43.699 - type: map_at_100 value: 45.03 - type: map_at_1000 value: 45.157000000000004 - type: map_at_3 value: 39.943 - type: map_at_5 value: 42.324 - type: mrr_at_1 value: 39.771 - type: mrr_at_10 value: 49.277 - type: mrr_at_100 value: 49.956 - type: mrr_at_1000 value: 50.005 - type: mrr_at_3 value: 46.304 - type: mrr_at_5 value: 48.493 - type: ndcg_at_1 value: 39.771 - type: ndcg_at_10 value: 49.957 - type: ndcg_at_100 value: 54.678000000000004 - type: ndcg_at_1000 value: 56.751 - type: ndcg_at_3 value: 44.608 - type: ndcg_at_5 value: 47.687000000000005 - type: precision_at_1 value: 39.771 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.5010000000000001 - type: precision_at_1000 value: 0.194 - type: precision_at_3 value: 21.173000000000002 - type: precision_at_5 value: 15.794 - type: recall_at_1 value: 32.527 - type: recall_at_10 value: 61.791 - type: recall_at_100 value: 81.49300000000001 - type: recall_at_1000 value: 95.014 - type: recall_at_3 value: 46.605000000000004 - type: recall_at_5 value: 54.83 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 29.424 - type: map_at_10 value: 38.667 - type: map_at_100 value: 39.771 - type: map_at_1000 value: 39.899 - type: map_at_3 value: 35.91 - type: map_at_5 value: 37.45 - type: mrr_at_1 value: 36.687999999999995 - type: mrr_at_10 value: 44.673 - type: mrr_at_100 value: 45.289 - type: mrr_at_1000 value: 45.338 - type: mrr_at_3 value: 42.601 - type: mrr_at_5 value: 43.875 - type: ndcg_at_1 value: 36.687999999999995 - type: ndcg_at_10 value: 44.013000000000005 - type: ndcg_at_100 value: 48.13 - type: ndcg_at_1000 value: 50.294000000000004 - type: ndcg_at_3 value: 40.056999999999995 - type: ndcg_at_5 value: 41.902 - type: precision_at_1 value: 36.687999999999995 - type: precision_at_10 value: 8.158999999999999 - type: precision_at_100 value: 1.321 - type: precision_at_1000 value: 0.179 - type: precision_at_3 value: 19.045 - type: precision_at_5 value: 13.427 - type: recall_at_1 value: 29.424 - type: recall_at_10 value: 53.08500000000001 - type: recall_at_100 value: 70.679 - type: recall_at_1000 value: 84.66 - type: recall_at_3 value: 41.399 - type: recall_at_5 value: 46.632 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.747 - type: map_at_10 value: 51.452 - type: map_at_100 value: 52.384 - type: map_at_1000 value: 52.437 - type: map_at_3 value: 48.213 - type: map_at_5 value: 50.195 - type: mrr_at_1 value: 45.391999999999996 - type: mrr_at_10 value: 54.928 - type: mrr_at_100 value: 55.532000000000004 - type: mrr_at_1000 value: 55.565 - type: mrr_at_3 value: 52.456 - type: mrr_at_5 value: 54.054 - type: ndcg_at_1 value: 45.391999999999996 - type: ndcg_at_10 value: 57.055 - type: ndcg_at_100 value: 60.751999999999995 - type: ndcg_at_1000 value: 61.864 - type: ndcg_at_3 value: 51.662 - type: ndcg_at_5 value: 54.613 - type: precision_at_1 value: 45.391999999999996 - type: precision_at_10 value: 9.103 - type: precision_at_100 value: 1.1780000000000002 - type: precision_at_1000 value: 0.132 - type: precision_at_3 value: 22.717000000000002 - type: precision_at_5 value: 15.812000000000001 - type: recall_at_1 value: 39.747 - type: recall_at_10 value: 70.10499999999999 - type: recall_at_100 value: 86.23100000000001 - type: recall_at_1000 value: 94.025 - type: recall_at_3 value: 55.899 - type: recall_at_5 value: 63.05500000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 27.168999999999997 - type: map_at_10 value: 34.975 - type: map_at_100 value: 35.94 - type: map_at_1000 value: 36.021 - type: map_at_3 value: 32.35 - type: map_at_5 value: 33.831 - type: mrr_at_1 value: 28.701 - type: mrr_at_10 value: 36.698 - type: mrr_at_100 value: 37.546 - type: mrr_at_1000 value: 37.613 - type: mrr_at_3 value: 34.256 - type: mrr_at_5 value: 35.685 - type: ndcg_at_1 value: 28.701 - type: ndcg_at_10 value: 39.639 - type: ndcg_at_100 value: 44.389 - type: ndcg_at_1000 value: 46.46 - type: ndcg_at_3 value: 34.52 - type: ndcg_at_5 value: 37.076 - type: precision_at_1 value: 28.701 - type: precision_at_10 value: 5.955 - type: precision_at_100 value: 0.8880000000000001 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 14.274999999999999 - type: precision_at_5 value: 10.011000000000001 - type: recall_at_1 value: 27.168999999999997 - type: recall_at_10 value: 52.347 - type: recall_at_100 value: 74.1 - type: recall_at_1000 value: 89.739 - type: recall_at_3 value: 38.567 - type: recall_at_5 value: 44.767 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 15.872 - type: map_at_10 value: 23.153000000000002 - type: map_at_100 value: 24.311 - type: map_at_1000 value: 24.432000000000002 - type: map_at_3 value: 20.707 - type: map_at_5 value: 21.921 - type: mrr_at_1 value: 19.776 - type: mrr_at_10 value: 27.755999999999997 - type: mrr_at_100 value: 28.709 - type: mrr_at_1000 value: 28.778 - type: mrr_at_3 value: 25.186999999999998 - type: mrr_at_5 value: 26.43 - type: ndcg_at_1 value: 19.776 - type: ndcg_at_10 value: 28.288999999999998 - type: ndcg_at_100 value: 34.011 - type: ndcg_at_1000 value: 36.916 - type: ndcg_at_3 value: 23.551 - type: ndcg_at_5 value: 25.429000000000002 - type: precision_at_1 value: 19.776 - type: precision_at_10 value: 5.311 - type: precision_at_100 value: 0.9440000000000001 - type: precision_at_1000 value: 0.132 - type: precision_at_3 value: 11.360000000000001 - type: precision_at_5 value: 8.209 - type: recall_at_1 value: 15.872 - type: recall_at_10 value: 39.726 - type: recall_at_100 value: 65.035 - type: recall_at_1000 value: 85.846 - type: recall_at_3 value: 26.432 - type: recall_at_5 value: 31.22 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 28.126 - type: map_at_10 value: 37.537 - type: map_at_100 value: 38.807 - type: map_at_1000 value: 38.923 - type: map_at_3 value: 34.65 - type: map_at_5 value: 36.248000000000005 - type: mrr_at_1 value: 34.649 - type: mrr_at_10 value: 42.893 - type: mrr_at_100 value: 43.721 - type: mrr_at_1000 value: 43.775999999999996 - type: mrr_at_3 value: 40.488 - type: mrr_at_5 value: 41.729 - type: ndcg_at_1 value: 34.649 - type: ndcg_at_10 value: 43.072 - type: ndcg_at_100 value: 48.464 - type: ndcg_at_1000 value: 50.724000000000004 - type: ndcg_at_3 value: 38.506 - type: ndcg_at_5 value: 40.522000000000006 - type: precision_at_1 value: 34.649 - type: precision_at_10 value: 7.68 - type: precision_at_100 value: 1.214 - type: precision_at_1000 value: 0.16 - type: precision_at_3 value: 18.029999999999998 - type: precision_at_5 value: 12.666 - type: recall_at_1 value: 28.126 - type: recall_at_10 value: 54.396 - type: recall_at_100 value: 76.988 - type: recall_at_1000 value: 91.85799999999999 - type: recall_at_3 value: 41.169 - type: recall_at_5 value: 46.658 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 26.68 - type: map_at_10 value: 35.702 - type: map_at_100 value: 36.864999999999995 - type: map_at_1000 value: 36.977 - type: map_at_3 value: 32.828 - type: map_at_5 value: 34.481 - type: mrr_at_1 value: 32.991 - type: mrr_at_10 value: 40.993 - type: mrr_at_100 value: 41.827 - type: mrr_at_1000 value: 41.887 - type: mrr_at_3 value: 38.623000000000005 - type: mrr_at_5 value: 40.021 - type: ndcg_at_1 value: 32.991 - type: ndcg_at_10 value: 41.036 - type: ndcg_at_100 value: 46.294000000000004 - type: ndcg_at_1000 value: 48.644 - type: ndcg_at_3 value: 36.419000000000004 - type: ndcg_at_5 value: 38.618 - type: precision_at_1 value: 32.991 - type: precision_at_10 value: 7.385999999999999 - type: precision_at_100 value: 1.176 - type: precision_at_1000 value: 0.151 - type: precision_at_3 value: 17.122999999999998 - type: precision_at_5 value: 12.215 - type: recall_at_1 value: 26.68 - type: recall_at_10 value: 51.644 - type: recall_at_100 value: 74.55000000000001 - type: recall_at_1000 value: 90.825 - type: recall_at_3 value: 38.579 - type: recall_at_5 value: 44.512 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 26.30825 - type: map_at_10 value: 34.97866666666666 - type: map_at_100 value: 36.109249999999996 - type: map_at_1000 value: 36.22508333333333 - type: map_at_3 value: 32.239083333333326 - type: map_at_5 value: 33.75933333333334 - type: mrr_at_1 value: 31.05308333333333 - type: mrr_at_10 value: 39.099833333333336 - type: mrr_at_100 value: 39.92008333333334 - type: mrr_at_1000 value: 39.980000000000004 - type: mrr_at_3 value: 36.75958333333333 - type: mrr_at_5 value: 38.086416666666665 - type: ndcg_at_1 value: 31.05308333333333 - type: ndcg_at_10 value: 40.11558333333334 - type: ndcg_at_100 value: 45.05966666666667 - type: ndcg_at_1000 value: 47.36516666666667 - type: ndcg_at_3 value: 35.490833333333335 - type: ndcg_at_5 value: 37.64541666666666 - type: precision_at_1 value: 31.05308333333333 - type: precision_at_10 value: 6.968416666666666 - type: precision_at_100 value: 1.1156666666666666 - type: precision_at_1000 value: 0.14950000000000002 - type: precision_at_3 value: 16.123 - type: precision_at_5 value: 11.451166666666666 - type: recall_at_1 value: 26.30825 - type: recall_at_10 value: 51.19283333333333 - type: recall_at_100 value: 73.0285 - type: recall_at_1000 value: 89.11133333333333 - type: recall_at_3 value: 38.26208333333333 - type: recall_at_5 value: 43.855916666666666 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 23.363999999999997 - type: map_at_10 value: 30.606 - type: map_at_100 value: 31.491999999999997 - type: map_at_1000 value: 31.578 - type: map_at_3 value: 28.610000000000003 - type: map_at_5 value: 29.602 - type: mrr_at_1 value: 26.38 - type: mrr_at_10 value: 33.472 - type: mrr_at_100 value: 34.299 - type: mrr_at_1000 value: 34.361999999999995 - type: mrr_at_3 value: 31.696999999999996 - type: mrr_at_5 value: 32.503 - type: ndcg_at_1 value: 26.38 - type: ndcg_at_10 value: 34.772999999999996 - type: ndcg_at_100 value: 39.334 - type: ndcg_at_1000 value: 41.676 - type: ndcg_at_3 value: 31.097 - type: ndcg_at_5 value: 32.561 - type: precision_at_1 value: 26.38 - type: precision_at_10 value: 5.475 - type: precision_at_100 value: 0.84 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 13.395000000000001 - type: precision_at_5 value: 9.11 - type: recall_at_1 value: 23.363999999999997 - type: recall_at_10 value: 44.656 - type: recall_at_100 value: 65.77199999999999 - type: recall_at_1000 value: 83.462 - type: recall_at_3 value: 34.213 - type: recall_at_5 value: 38.091 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 17.971999999999998 - type: map_at_10 value: 24.913 - type: map_at_100 value: 25.916 - type: map_at_1000 value: 26.049 - type: map_at_3 value: 22.569 - type: map_at_5 value: 23.858999999999998 - type: mrr_at_1 value: 21.748 - type: mrr_at_10 value: 28.711 - type: mrr_at_100 value: 29.535 - type: mrr_at_1000 value: 29.621 - type: mrr_at_3 value: 26.484999999999996 - type: mrr_at_5 value: 27.701999999999998 - type: ndcg_at_1 value: 21.748 - type: ndcg_at_10 value: 29.412 - type: ndcg_at_100 value: 34.204 - type: ndcg_at_1000 value: 37.358000000000004 - type: ndcg_at_3 value: 25.202 - type: ndcg_at_5 value: 27.128000000000004 - type: precision_at_1 value: 21.748 - type: precision_at_10 value: 5.279 - type: precision_at_100 value: 0.902 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 11.551 - type: precision_at_5 value: 8.437999999999999 - type: recall_at_1 value: 17.971999999999998 - type: recall_at_10 value: 39.186 - type: recall_at_100 value: 60.785999999999994 - type: recall_at_1000 value: 83.372 - type: recall_at_3 value: 27.584999999999997 - type: recall_at_5 value: 32.448 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 26.684 - type: map_at_10 value: 35.188 - type: map_at_100 value: 36.379 - type: map_at_1000 value: 36.481 - type: map_at_3 value: 32.401 - type: map_at_5 value: 34.132 - type: mrr_at_1 value: 31.063000000000002 - type: mrr_at_10 value: 39.104 - type: mrr_at_100 value: 40.062999999999995 - type: mrr_at_1000 value: 40.119 - type: mrr_at_3 value: 36.692 - type: mrr_at_5 value: 38.161 - type: ndcg_at_1 value: 31.063000000000002 - type: ndcg_at_10 value: 40.096 - type: ndcg_at_100 value: 45.616 - type: ndcg_at_1000 value: 47.869 - type: ndcg_at_3 value: 35.256 - type: ndcg_at_5 value: 37.826 - type: precision_at_1 value: 31.063000000000002 - type: precision_at_10 value: 6.622999999999999 - type: precision_at_100 value: 1.046 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 15.641 - type: precision_at_5 value: 11.231 - type: recall_at_1 value: 26.684 - type: recall_at_10 value: 51.092999999999996 - type: recall_at_100 value: 75.099 - type: recall_at_1000 value: 90.644 - type: recall_at_3 value: 38.063 - type: recall_at_5 value: 44.518 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 26.249 - type: map_at_10 value: 34.694 - type: map_at_100 value: 36.208 - type: map_at_1000 value: 36.443 - type: map_at_3 value: 31.868000000000002 - type: map_at_5 value: 33.018 - type: mrr_at_1 value: 31.818 - type: mrr_at_10 value: 39.416000000000004 - type: mrr_at_100 value: 40.327 - type: mrr_at_1000 value: 40.388000000000005 - type: mrr_at_3 value: 37.120999999999995 - type: mrr_at_5 value: 38.07 - type: ndcg_at_1 value: 31.818 - type: ndcg_at_10 value: 40.405 - type: ndcg_at_100 value: 45.816 - type: ndcg_at_1000 value: 48.403 - type: ndcg_at_3 value: 35.823 - type: ndcg_at_5 value: 37.191 - type: precision_at_1 value: 31.818 - type: precision_at_10 value: 7.806 - type: precision_at_100 value: 1.518 - type: precision_at_1000 value: 0.241 - type: precision_at_3 value: 16.535 - type: precision_at_5 value: 11.738999999999999 - type: recall_at_1 value: 26.249 - type: recall_at_10 value: 50.928 - type: recall_at_100 value: 75.271 - type: recall_at_1000 value: 91.535 - type: recall_at_3 value: 37.322 - type: recall_at_5 value: 41.318 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 21.884999999999998 - type: map_at_10 value: 29.158 - type: map_at_100 value: 30.208000000000002 - type: map_at_1000 value: 30.304 - type: map_at_3 value: 26.82 - type: map_at_5 value: 28.051 - type: mrr_at_1 value: 23.66 - type: mrr_at_10 value: 31.277 - type: mrr_at_100 value: 32.237 - type: mrr_at_1000 value: 32.308 - type: mrr_at_3 value: 29.205 - type: mrr_at_5 value: 30.314000000000004 - type: ndcg_at_1 value: 23.66 - type: ndcg_at_10 value: 33.64 - type: ndcg_at_100 value: 39.028 - type: ndcg_at_1000 value: 41.423 - type: ndcg_at_3 value: 29.189 - type: ndcg_at_5 value: 31.191999999999997 - type: precision_at_1 value: 23.66 - type: precision_at_10 value: 5.287 - type: precision_at_100 value: 0.86 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 12.631 - type: precision_at_5 value: 8.762 - type: recall_at_1 value: 21.884999999999998 - type: recall_at_10 value: 45.357 - type: recall_at_100 value: 70.338 - type: recall_at_1000 value: 88.356 - type: recall_at_3 value: 33.312000000000005 - type: recall_at_5 value: 38.222 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 13.058 - type: map_at_10 value: 21.549 - type: map_at_100 value: 23.287 - type: map_at_1000 value: 23.444000000000003 - type: map_at_3 value: 18.18 - type: map_at_5 value: 19.886 - type: mrr_at_1 value: 28.73 - type: mrr_at_10 value: 40.014 - type: mrr_at_100 value: 40.827000000000005 - type: mrr_at_1000 value: 40.866 - type: mrr_at_3 value: 36.602000000000004 - type: mrr_at_5 value: 38.702 - type: ndcg_at_1 value: 28.73 - type: ndcg_at_10 value: 29.881 - type: ndcg_at_100 value: 36.662 - type: ndcg_at_1000 value: 39.641999999999996 - type: ndcg_at_3 value: 24.661 - type: ndcg_at_5 value: 26.548 - type: precision_at_1 value: 28.73 - type: precision_at_10 value: 9.094 - type: precision_at_100 value: 1.6480000000000001 - type: precision_at_1000 value: 0.22100000000000003 - type: precision_at_3 value: 17.98 - type: precision_at_5 value: 13.811000000000002 - type: recall_at_1 value: 13.058 - type: recall_at_10 value: 35.458 - type: recall_at_100 value: 58.719 - type: recall_at_1000 value: 75.495 - type: recall_at_3 value: 22.607 - type: recall_at_5 value: 28.067999999999998 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 8.811 - type: map_at_10 value: 19.134999999999998 - type: map_at_100 value: 26.905 - type: map_at_1000 value: 28.503 - type: map_at_3 value: 13.863 - type: map_at_5 value: 16.062 - type: mrr_at_1 value: 67 - type: mrr_at_10 value: 74.607 - type: mrr_at_100 value: 74.941 - type: mrr_at_1000 value: 74.954 - type: mrr_at_3 value: 73.042 - type: mrr_at_5 value: 73.992 - type: ndcg_at_1 value: 52.87500000000001 - type: ndcg_at_10 value: 40.199 - type: ndcg_at_100 value: 44.901 - type: ndcg_at_1000 value: 52.239999999999995 - type: ndcg_at_3 value: 44.983000000000004 - type: ndcg_at_5 value: 42.137 - type: precision_at_1 value: 67 - type: precision_at_10 value: 31.8 - type: precision_at_100 value: 10.315000000000001 - type: precision_at_1000 value: 2.0420000000000003 - type: precision_at_3 value: 48.667 - type: precision_at_5 value: 40.9 - type: recall_at_1 value: 8.811 - type: recall_at_10 value: 24.503 - type: recall_at_100 value: 51.288999999999994 - type: recall_at_1000 value: 74.827 - type: recall_at_3 value: 15.254999999999999 - type: recall_at_5 value: 18.698999999999998 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 41.839999999999996 - type: f1 value: 37.78718146306379 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 68.47999999999999 - type: map_at_10 value: 78.782 - type: map_at_100 value: 79.021 - type: map_at_1000 value: 79.035 - type: map_at_3 value: 77.389 - type: map_at_5 value: 78.347 - type: mrr_at_1 value: 73.837 - type: mrr_at_10 value: 83.41499999999999 - type: mrr_at_100 value: 83.53399999999999 - type: mrr_at_1000 value: 83.535 - type: mrr_at_3 value: 82.32300000000001 - type: mrr_at_5 value: 83.13000000000001 - type: ndcg_at_1 value: 73.837 - type: ndcg_at_10 value: 83.404 - type: ndcg_at_100 value: 84.287 - type: ndcg_at_1000 value: 84.52199999999999 - type: ndcg_at_3 value: 81.072 - type: ndcg_at_5 value: 82.537 - type: precision_at_1 value: 73.837 - type: precision_at_10 value: 10.254000000000001 - type: precision_at_100 value: 1.088 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 31.538 - type: precision_at_5 value: 19.811 - type: recall_at_1 value: 68.47999999999999 - type: recall_at_10 value: 92.98100000000001 - type: recall_at_100 value: 96.50800000000001 - type: recall_at_1000 value: 97.925 - type: recall_at_3 value: 86.764 - type: recall_at_5 value: 90.39 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 16.786 - type: map_at_10 value: 26.97 - type: map_at_100 value: 28.488000000000003 - type: map_at_1000 value: 28.665000000000003 - type: map_at_3 value: 23.3 - type: map_at_5 value: 25.249 - type: mrr_at_1 value: 33.025 - type: mrr_at_10 value: 41.86 - type: mrr_at_100 value: 42.673 - type: mrr_at_1000 value: 42.714 - type: mrr_at_3 value: 39.403 - type: mrr_at_5 value: 40.723 - type: ndcg_at_1 value: 33.025 - type: ndcg_at_10 value: 34.522999999999996 - type: ndcg_at_100 value: 40.831 - type: ndcg_at_1000 value: 44.01 - type: ndcg_at_3 value: 30.698999999999998 - type: ndcg_at_5 value: 31.832 - type: precision_at_1 value: 33.025 - type: precision_at_10 value: 9.583 - type: precision_at_100 value: 1.619 - type: precision_at_1000 value: 0.22100000000000003 - type: precision_at_3 value: 20.216 - type: precision_at_5 value: 15.031 - type: recall_at_1 value: 16.786 - type: recall_at_10 value: 41.969 - type: recall_at_100 value: 66.353 - type: recall_at_1000 value: 85.299 - type: recall_at_3 value: 28.111000000000004 - type: recall_at_5 value: 33.645 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 37.346000000000004 - type: map_at_10 value: 56.184999999999995 - type: map_at_100 value: 57.062000000000005 - type: map_at_1000 value: 57.126999999999995 - type: map_at_3 value: 52.815 - type: map_at_5 value: 54.893 - type: mrr_at_1 value: 74.693 - type: mrr_at_10 value: 81.128 - type: mrr_at_100 value: 81.356 - type: mrr_at_1000 value: 81.363 - type: mrr_at_3 value: 80.05600000000001 - type: mrr_at_5 value: 80.74 - type: ndcg_at_1 value: 74.693 - type: ndcg_at_10 value: 65.249 - type: ndcg_at_100 value: 68.357 - type: ndcg_at_1000 value: 69.64200000000001 - type: ndcg_at_3 value: 60.377 - type: ndcg_at_5 value: 63.044 - type: precision_at_1 value: 74.693 - type: precision_at_10 value: 13.630999999999998 - type: precision_at_100 value: 1.606 - type: precision_at_1000 value: 0.178 - type: precision_at_3 value: 38.222 - type: precision_at_5 value: 25.040000000000003 - type: recall_at_1 value: 37.346000000000004 - type: recall_at_10 value: 68.157 - type: recall_at_100 value: 80.297 - type: recall_at_1000 value: 88.832 - type: recall_at_3 value: 57.333 - type: recall_at_5 value: 62.6 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 62.80240000000001 - type: ap value: 58.22949464075975 - type: f1 value: 62.55694937343487 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 20.918 - type: map_at_10 value: 32.732 - type: map_at_100 value: 33.922000000000004 - type: map_at_1000 value: 33.976 - type: map_at_3 value: 29.051 - type: map_at_5 value: 31.101 - type: mrr_at_1 value: 21.418 - type: mrr_at_10 value: 33.284000000000006 - type: mrr_at_100 value: 34.426 - type: mrr_at_1000 value: 34.473 - type: mrr_at_3 value: 29.644 - type: mrr_at_5 value: 31.691000000000003 - type: ndcg_at_1 value: 21.418 - type: ndcg_at_10 value: 39.427 - type: ndcg_at_100 value: 45.190999999999995 - type: ndcg_at_1000 value: 46.544000000000004 - type: ndcg_at_3 value: 31.885 - type: ndcg_at_5 value: 35.555 - type: precision_at_1 value: 21.418 - type: precision_at_10 value: 6.254999999999999 - type: precision_at_100 value: 0.915 - type: precision_at_1000 value: 0.10300000000000001 - type: precision_at_3 value: 13.591000000000001 - type: precision_at_5 value: 10.011000000000001 - type: recall_at_1 value: 20.918 - type: recall_at_10 value: 60.074000000000005 - type: recall_at_100 value: 86.726 - type: recall_at_1000 value: 97.116 - type: recall_at_3 value: 39.506 - type: recall_at_5 value: 48.319 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.79799361605106 - type: f1 value: 90.0757957511057 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 58.00501595987233 - type: f1 value: 39.85731569133947 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (eng) type: masakhane/masakhanews config: eng split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 77.10970464135022 - type: f1 value: 76.12037616356896 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (eng) type: masakhane/masakhanews config: eng split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 69.81323966287493 - type: v_measure value: 33.112774215788455 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.51042367182246 - type: f1 value: 60.99310361578824 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.0053799596503 - type: f1 value: 69.7794673003686 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 30.56899174856954 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 26.21848014733929 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.256308756916646 - type: mrr value: 31.123872086825656 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 5.07 - type: map_at_10 value: 11.286999999999999 - type: map_at_100 value: 13.630999999999998 - type: map_at_1000 value: 14.844 - type: map_at_3 value: 8.395 - type: map_at_5 value: 9.721 - type: mrr_at_1 value: 41.486000000000004 - type: mrr_at_10 value: 51.041000000000004 - type: mrr_at_100 value: 51.661 - type: mrr_at_1000 value: 51.7 - type: mrr_at_3 value: 49.226 - type: mrr_at_5 value: 50.526 - type: ndcg_at_1 value: 39.783 - type: ndcg_at_10 value: 30.885 - type: ndcg_at_100 value: 27.459 - type: ndcg_at_1000 value: 35.988 - type: ndcg_at_3 value: 36.705 - type: ndcg_at_5 value: 34.156 - type: precision_at_1 value: 41.486000000000004 - type: precision_at_10 value: 22.415 - type: precision_at_100 value: 6.819999999999999 - type: precision_at_1000 value: 1.8980000000000001 - type: precision_at_3 value: 34.572 - type: precision_at_5 value: 29.287999999999997 - type: recall_at_1 value: 5.07 - type: recall_at_10 value: 14.576 - type: recall_at_100 value: 27.112000000000002 - type: recall_at_1000 value: 57.995 - type: recall_at_3 value: 9.242 - type: recall_at_5 value: 11.668000000000001 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 32.263999999999996 - type: map_at_10 value: 47.219 - type: map_at_100 value: 48.209999999999994 - type: map_at_1000 value: 48.24 - type: map_at_3 value: 42.905 - type: map_at_5 value: 45.501000000000005 - type: mrr_at_1 value: 36.153 - type: mrr_at_10 value: 49.636 - type: mrr_at_100 value: 50.357 - type: mrr_at_1000 value: 50.378 - type: mrr_at_3 value: 46.094 - type: mrr_at_5 value: 48.233 - type: ndcg_at_1 value: 36.124 - type: ndcg_at_10 value: 54.764 - type: ndcg_at_100 value: 58.867999999999995 - type: ndcg_at_1000 value: 59.548 - type: ndcg_at_3 value: 46.717999999999996 - type: ndcg_at_5 value: 50.981 - type: precision_at_1 value: 36.124 - type: precision_at_10 value: 8.931000000000001 - type: precision_at_100 value: 1.126 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 21.051000000000002 - type: precision_at_5 value: 15.104000000000001 - type: recall_at_1 value: 32.263999999999996 - type: recall_at_10 value: 75.39099999999999 - type: recall_at_100 value: 93.038 - type: recall_at_1000 value: 98.006 - type: recall_at_3 value: 54.562999999999995 - type: recall_at_5 value: 64.352 - task: type: Classification dataset: name: MTEB NewsClassification type: ag_news config: default split: test revision: eb185aade064a813bc0b7f42de02595523103ca4 metrics: - type: accuracy value: 77.75 - type: f1 value: 77.504243291547 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (en) type: GEM/opusparcus config: en split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_accuracy value: 99.89816700610999 - type: cos_sim_ap value: 100 - type: cos_sim_f1 value: 99.9490575649516 - type: cos_sim_precision value: 100 - type: cos_sim_recall value: 99.89816700610999 - type: dot_accuracy value: 99.89816700610999 - type: dot_ap value: 100 - type: dot_f1 value: 99.9490575649516 - type: dot_precision value: 100 - type: dot_recall value: 99.89816700610999 - type: euclidean_accuracy value: 99.89816700610999 - type: euclidean_ap value: 100 - type: euclidean_f1 value: 99.9490575649516 - type: euclidean_precision value: 100 - type: euclidean_recall value: 99.89816700610999 - type: manhattan_accuracy value: 99.89816700610999 - type: manhattan_ap value: 100 - type: manhattan_f1 value: 99.9490575649516 - type: manhattan_precision value: 100 - type: manhattan_recall value: 99.89816700610999 - type: max_accuracy value: 99.89816700610999 - type: max_ap value: 100 - type: max_f1 value: 99.9490575649516 - task: type: PairClassification dataset: name: MTEB PawsX (en) type: paws-x config: en split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_accuracy value: 61.75000000000001 - type: cos_sim_ap value: 57.9482264289061 - type: cos_sim_f1 value: 62.444061962134256 - type: cos_sim_precision value: 45.3953953953954 - type: cos_sim_recall value: 100 - type: dot_accuracy value: 61.75000000000001 - type: dot_ap value: 57.94808038610475 - type: dot_f1 value: 62.444061962134256 - type: dot_precision value: 45.3953953953954 - type: dot_recall value: 100 - type: euclidean_accuracy value: 61.75000000000001 - type: euclidean_ap value: 57.94808038610475 - type: euclidean_f1 value: 62.444061962134256 - type: euclidean_precision value: 45.3953953953954 - type: euclidean_recall value: 100 - type: manhattan_accuracy value: 61.7 - type: manhattan_ap value: 57.996119308184966 - type: manhattan_f1 value: 62.46078773091669 - type: manhattan_precision value: 45.66768603465851 - type: manhattan_recall value: 98.78721058434398 - type: max_accuracy value: 61.75000000000001 - type: max_ap value: 57.996119308184966 - type: max_f1 value: 62.46078773091669 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 69.001 - type: map_at_10 value: 82.573 - type: map_at_100 value: 83.226 - type: map_at_1000 value: 83.246 - type: map_at_3 value: 79.625 - type: map_at_5 value: 81.491 - type: mrr_at_1 value: 79.44 - type: mrr_at_10 value: 85.928 - type: mrr_at_100 value: 86.05199999999999 - type: mrr_at_1000 value: 86.054 - type: mrr_at_3 value: 84.847 - type: mrr_at_5 value: 85.596 - type: ndcg_at_1 value: 79.41 - type: ndcg_at_10 value: 86.568 - type: ndcg_at_100 value: 87.965 - type: ndcg_at_1000 value: 88.134 - type: ndcg_at_3 value: 83.55900000000001 - type: ndcg_at_5 value: 85.244 - type: precision_at_1 value: 79.41 - type: precision_at_10 value: 13.108 - type: precision_at_100 value: 1.509 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.443 - type: precision_at_5 value: 24.03 - type: recall_at_1 value: 69.001 - type: recall_at_10 value: 94.132 - type: recall_at_100 value: 99.043 - type: recall_at_1000 value: 99.878 - type: recall_at_3 value: 85.492 - type: recall_at_5 value: 90.226 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 48.3161352736264 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 57.83784484156747 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 4.403 - type: map_at_10 value: 10.922 - type: map_at_100 value: 12.626000000000001 - type: map_at_1000 value: 12.883 - type: map_at_3 value: 7.982 - type: map_at_5 value: 9.442 - type: mrr_at_1 value: 21.7 - type: mrr_at_10 value: 31.653 - type: mrr_at_100 value: 32.757999999999996 - type: mrr_at_1000 value: 32.824999999999996 - type: mrr_at_3 value: 28.266999999999996 - type: mrr_at_5 value: 30.127 - type: ndcg_at_1 value: 21.7 - type: ndcg_at_10 value: 18.355 - type: ndcg_at_100 value: 25.228 - type: ndcg_at_1000 value: 30.164 - type: ndcg_at_3 value: 17.549 - type: ndcg_at_5 value: 15.260000000000002 - type: precision_at_1 value: 21.7 - type: precision_at_10 value: 9.47 - type: precision_at_100 value: 1.9290000000000003 - type: precision_at_1000 value: 0.312 - type: precision_at_3 value: 16.3 - type: precision_at_5 value: 13.28 - type: recall_at_1 value: 4.403 - type: recall_at_10 value: 19.18 - type: recall_at_100 value: 39.182 - type: recall_at_1000 value: 63.378 - type: recall_at_3 value: 9.934999999999999 - type: recall_at_5 value: 13.459999999999999 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cos_sim_pearson value: 76.90841073432534 - type: cos_sim_spearman value: 69.2566375434526 - type: euclidean_pearson value: 73.00183878559413 - type: euclidean_spearman value: 69.25664656235413 - type: manhattan_pearson value: 72.89594756197533 - type: manhattan_spearman value: 69.23247111043545 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 69.60878511794063 - type: cos_sim_spearman value: 65.89916377105551 - type: euclidean_pearson value: 66.90761876557181 - type: euclidean_spearman value: 65.89915018368384 - type: manhattan_pearson value: 66.78502575257721 - type: manhattan_spearman value: 65.79977053467938 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 77.2869334987418 - type: cos_sim_spearman value: 77.86961921643416 - type: euclidean_pearson value: 77.43179820479914 - type: euclidean_spearman value: 77.86961921643416 - type: manhattan_pearson value: 77.18900647348373 - type: manhattan_spearman value: 77.61209060062608 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 76.26453932960364 - type: cos_sim_spearman value: 72.81574657995401 - type: euclidean_pearson value: 75.0708953437423 - type: euclidean_spearman value: 72.81574657995401 - type: manhattan_pearson value: 74.88396609999512 - type: manhattan_spearman value: 72.65437562156805 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 82.37827653919395 - type: cos_sim_spearman value: 83.4885552472602 - type: euclidean_pearson value: 82.89377087926749 - type: euclidean_spearman value: 83.4885552472602 - type: manhattan_pearson value: 82.82440771787735 - type: manhattan_spearman value: 83.41449537888975 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 78.7995043673964 - type: cos_sim_spearman value: 80.57804447517638 - type: euclidean_pearson value: 80.03013884278195 - type: euclidean_spearman value: 80.57804447517638 - type: manhattan_pearson value: 80.13406111544424 - type: manhattan_spearman value: 80.65354602648962 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 83.63565989937278 - type: cos_sim_spearman value: 84.4948593656943 - type: euclidean_pearson value: 84.68743074820951 - type: euclidean_spearman value: 84.4948593656943 - type: manhattan_pearson value: 84.43639397781811 - type: manhattan_spearman value: 84.32595552115242 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 65.06382649277246 - type: cos_sim_spearman value: 66.28447782018655 - type: euclidean_pearson value: 67.09895930908392 - type: euclidean_spearman value: 66.28447782018655 - type: manhattan_pearson value: 66.96342453888376 - type: manhattan_spearman value: 66.33876259551842 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 78.43883428940346 - type: cos_sim_spearman value: 79.18395553127085 - type: euclidean_pearson value: 79.22986635457109 - type: euclidean_spearman value: 79.18395553127085 - type: manhattan_pearson value: 79.10921229934691 - type: manhattan_spearman value: 79.02283553930171 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (en) type: PhilipMay/stsb_multi_mt config: en split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_pearson value: 78.43883433444418 - type: cos_sim_spearman value: 79.18395553127085 - type: euclidean_pearson value: 79.22986642351681 - type: euclidean_spearman value: 79.18395553127085 - type: manhattan_pearson value: 79.10921236746302 - type: manhattan_spearman value: 79.02283553930171 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 76.9361627171417 - type: mrr value: 93.06577046773126 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 50.693999999999996 - type: map_at_10 value: 59.784000000000006 - type: map_at_100 value: 60.443000000000005 - type: map_at_1000 value: 60.480000000000004 - type: map_at_3 value: 57.028 - type: map_at_5 value: 58.306999999999995 - type: mrr_at_1 value: 53.333 - type: mrr_at_10 value: 61.565000000000005 - type: mrr_at_100 value: 62.095 - type: mrr_at_1000 value: 62.131 - type: mrr_at_3 value: 59.721999999999994 - type: mrr_at_5 value: 60.589000000000006 - type: ndcg_at_1 value: 53.333 - type: ndcg_at_10 value: 64.512 - type: ndcg_at_100 value: 67.366 - type: ndcg_at_1000 value: 68.46799999999999 - type: ndcg_at_3 value: 59.748999999999995 - type: ndcg_at_5 value: 61.526 - type: precision_at_1 value: 53.333 - type: precision_at_10 value: 8.733 - type: precision_at_100 value: 1.027 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 23.222 - type: precision_at_5 value: 15.2 - type: recall_at_1 value: 50.693999999999996 - type: recall_at_10 value: 77.333 - type: recall_at_100 value: 90.10000000000001 - type: recall_at_1000 value: 99 - type: recall_at_3 value: 64.39399999999999 - type: recall_at_5 value: 68.7 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.81386138613861 - type: cos_sim_ap value: 94.96375600031361 - type: cos_sim_f1 value: 90.36885245901641 - type: cos_sim_precision value: 92.64705882352942 - type: cos_sim_recall value: 88.2 - type: dot_accuracy value: 99.81386138613861 - type: dot_ap value: 94.96375600031361 - type: dot_f1 value: 90.36885245901641 - type: dot_precision value: 92.64705882352942 - type: dot_recall value: 88.2 - type: euclidean_accuracy value: 99.81386138613861 - type: euclidean_ap value: 94.96375600031361 - type: euclidean_f1 value: 90.36885245901641 - type: euclidean_precision value: 92.64705882352942 - type: euclidean_recall value: 88.2 - type: manhattan_accuracy value: 99.81287128712871 - type: manhattan_ap value: 94.92563500640084 - type: manhattan_f1 value: 90.27277406073082 - type: manhattan_precision value: 93.00106044538707 - type: manhattan_recall value: 87.7 - type: max_accuracy value: 99.81386138613861 - type: max_ap value: 94.96375600031361 - type: max_f1 value: 90.36885245901641 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 57.486984956276274 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.58453023612073 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.16317315282306 - type: mrr value: 50.82617137764197 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.2927995133324 - type: cos_sim_spearman value: 30.09648622523191 - type: dot_pearson value: 30.29279853541771 - type: dot_spearman value: 30.09648622523191 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.23500000000000001 - type: map_at_10 value: 2.01 - type: map_at_100 value: 12.064 - type: map_at_1000 value: 27.437 - type: map_at_3 value: 0.6649999999999999 - type: map_at_5 value: 1.0959999999999999 - type: mrr_at_1 value: 88 - type: mrr_at_10 value: 92.667 - type: mrr_at_100 value: 92.667 - type: mrr_at_1000 value: 92.667 - type: mrr_at_3 value: 91.667 - type: mrr_at_5 value: 92.667 - type: ndcg_at_1 value: 84 - type: ndcg_at_10 value: 79.431 - type: ndcg_at_100 value: 60.914 - type: ndcg_at_1000 value: 52.005 - type: ndcg_at_3 value: 82.285 - type: ndcg_at_5 value: 81.565 - type: precision_at_1 value: 88 - type: precision_at_10 value: 84.8 - type: precision_at_100 value: 62.32 - type: precision_at_1000 value: 23.014000000000003 - type: precision_at_3 value: 86.667 - type: precision_at_5 value: 87.2 - type: recall_at_1 value: 0.23500000000000001 - type: recall_at_10 value: 2.19 - type: recall_at_100 value: 14.904 - type: recall_at_1000 value: 47.875 - type: recall_at_3 value: 0.695 - type: recall_at_5 value: 1.165 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.639 - type: map_at_10 value: 14.184 - type: map_at_100 value: 20.61 - type: map_at_1000 value: 22.377 - type: map_at_3 value: 9.163 - type: map_at_5 value: 10.773000000000001 - type: mrr_at_1 value: 46.939 - type: mrr_at_10 value: 59.345000000000006 - type: mrr_at_100 value: 60.07599999999999 - type: mrr_at_1000 value: 60.07599999999999 - type: mrr_at_3 value: 55.782 - type: mrr_at_5 value: 58.231 - type: ndcg_at_1 value: 41.837 - type: ndcg_at_10 value: 32.789 - type: ndcg_at_100 value: 42.232 - type: ndcg_at_1000 value: 53.900999999999996 - type: ndcg_at_3 value: 41.963 - type: ndcg_at_5 value: 35.983 - type: precision_at_1 value: 46.939 - type: precision_at_10 value: 28.163 - type: precision_at_100 value: 8.102 - type: precision_at_1000 value: 1.59 - type: precision_at_3 value: 44.897999999999996 - type: precision_at_5 value: 34.694 - type: recall_at_1 value: 3.639 - type: recall_at_10 value: 19.308 - type: recall_at_100 value: 48.992000000000004 - type: recall_at_1000 value: 84.59400000000001 - type: recall_at_3 value: 9.956 - type: recall_at_5 value: 12.33 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 64.305 - type: ap value: 11.330746746072599 - type: f1 value: 49.290704382387865 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 56.1941143180532 - type: f1 value: 56.40189765095578 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 36.28189332526842 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 83.1912737676581 - type: cos_sim_ap value: 64.31536990146257 - type: cos_sim_f1 value: 61.095167030191696 - type: cos_sim_precision value: 54.074375127006704 - type: cos_sim_recall value: 70.21108179419525 - type: dot_accuracy value: 83.1912737676581 - type: dot_ap value: 64.31539216162541 - type: dot_f1 value: 61.095167030191696 - type: dot_precision value: 54.074375127006704 - type: dot_recall value: 70.21108179419525 - type: euclidean_accuracy value: 83.1912737676581 - type: euclidean_ap value: 64.31538391358727 - type: euclidean_f1 value: 61.095167030191696 - type: euclidean_precision value: 54.074375127006704 - type: euclidean_recall value: 70.21108179419525 - type: manhattan_accuracy value: 83.07206294331525 - type: manhattan_ap value: 64.14646315556838 - type: manhattan_f1 value: 61.194029850746254 - type: manhattan_precision value: 54.166666666666664 - type: manhattan_recall value: 70.31662269129288 - type: max_accuracy value: 83.1912737676581 - type: max_ap value: 64.31539216162541 - type: max_f1 value: 61.194029850746254 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.38242713548337 - type: cos_sim_ap value: 84.70041255196017 - type: cos_sim_f1 value: 77.13222561986515 - type: cos_sim_precision value: 73.95266690215472 - type: cos_sim_recall value: 80.59747459193102 - type: dot_accuracy value: 88.38242713548337 - type: dot_ap value: 84.7004118720222 - type: dot_f1 value: 77.13222561986515 - type: dot_precision value: 73.95266690215472 - type: dot_recall value: 80.59747459193102 - type: euclidean_accuracy value: 88.38242713548337 - type: euclidean_ap value: 84.70041593996575 - type: euclidean_f1 value: 77.13222561986515 - type: euclidean_precision value: 73.95266690215472 - type: euclidean_recall value: 80.59747459193102 - type: manhattan_accuracy value: 88.36108200411378 - type: manhattan_ap value: 84.66897701572054 - type: manhattan_f1 value: 77.00707640360645 - type: manhattan_precision value: 72.17695778062082 - type: manhattan_recall value: 82.53002771789343 - type: max_accuracy value: 88.38242713548337 - type: max_ap value: 84.70041593996575 - type: max_f1 value: 77.13222561986515 - task: type: Clustering dataset: name: MTEB WikiCitiesClustering type: jinaai/cities_wiki_clustering config: default split: test revision: ddc9ee9242fa65332597f70e967ecc38b9d734fa metrics: - type: v_measure value: 81.46426354153643 --- <h1 align="center">Snowflake's Arctic-embed-xs</h1> <h4 align="center"> <p> <a href=#news>News</a> | <a href=#models>Models</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#contact">Contact</a> | <a href="#faq">FAQ</a> <a href="#license">License</a> | <a href="#acknowledgement">Acknowledgement</a> <p> </h4> ## News 12/04/2024: Release of [snowflake-arctic-embed-l-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-l-v2.0) and [snowflake-arctic-embed-m-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-v2.0) our newest models with multilingual workloads in mind. These models outperform prior versions of Arctic Embed and we suggest these replace prior versions! 07/26/2024: Release preprint [[2407.18887] Embedding And Clustering Your Data Can Improve Contrastive Pretraining](https://arxiv.org/abs/2407.18887) on arXiv. 07/18/2024: Release of `snowflake-arctic-embed-m-v1.5`, capable of producing highly compressible embedding vectors that preserve quality even when squished as small as 128 bytes per vector. Details about the development of this model are available in the [launch post on the Snowflake engineering blog](https://www.snowflake.com/engineering-blog/arctic-embed-m-v1-5-enterprise-retrieval/). 05/10/2024: Release the [technical report on Arctic Embed](https://arxiv.org/abs/2405.05374) 04/16/2024: Release the ** snowflake-arctic-embed ** family of text embedding models. The releases are state-of-the-art for Retrieval quality at each of their representative size profiles. [Technical Report]() is coming shortly. For more details, please refer to our Github: [Arctic-Text-Embed](https://github.com/Snowflake-Labs/arctic-embed). ## Models snowflake-arctic-embed is a suite of text embedding models that focuses on creating high-quality retrieval models optimized for performance. The `snowflake-arctic-embedding` models achieve **state-of-the-art performance on the MTEB/BEIR leaderboard** for each of their size variants. Evaluation is performed using these [scripts](https://github.com/Snowflake-Labs/snowflake-arctic-embed/tree/main/src). As shown below, each class of model size achieves SOTA retrieval accuracy compared to other top models. The models are trained by leveraging existing open-source text representation models, such as bert-base-uncased, and are trained in a multi-stage pipeline to optimize their retrieval performance. First, the models are trained with large batches of query-document pairs where negatives are derived in-batch—pretraining leverages about 400m samples of a mix of public datasets and proprietary web search data. Following pretraining models are further optimized with long training on a smaller dataset (about 1m samples) of triplets of query, positive document, and negative document derived from hard harmful mining. Mining of the negatives and data curation is crucial to retrieval accuracy. A detailed technical report can be found [here](https://arxiv.org/abs/2405.05374). | Name | MTEB Retrieval Score (NDCG @ 10) | Parameters (Millions) | Embedding Dimension | | ----------------------------------------------------------------------- | -------------------------------- | --------------------- | ------------------- | | [snowflake-arctic-embed-xs](https://huggingface.co/Snowflake/snowflake-arctic-embed-xs/) | 50.15 | 22 | 384 | | [snowflake-arctic-embed-s](https://huggingface.co/Snowflake/snowflake-arctic-embed-s/) | 51.98 | 33 | 384 | | [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) | 54.90 | 110 | 768 | | [snowflake-arctic-embed-m-long](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-long/) | 54.83 | 137 | 768 | | [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) | 55.98 | 335 | 1024 | Aside from being great open-source models, the largest model, [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/), can serve as a natural replacement for closed-source embedding, as shown below. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) | 55.98 | | Google-gecko-text-embedding | 55.7 | | text-embedding-3-large | 55.44 | | Cohere-embed-english-v3.0 | 55.00 | | bge-large-en-v1.5 | 54.29 | ### [snowflake-arctic-embed-xs](https://huggingface.co/Snowflake/snowflake-arctic-embed-xs) This tiny model packs quite the punch. Based on the [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) model with only 22m parameters and 384 dimensions, this model should meet even the strictest latency/TCO budgets. Despite its size, its retrieval accuracy is closer to that of models with 100m paramers. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------- | -------------------------------- | | [snowflake-arctic-embed-xs](https://huggingface.co/Snowflake/snowflake-arctic-embed-xs/) | 50.15 | | GIST-all-MiniLM-L6-v2 | 45.12 | | gte-tiny | 44.92 | | all-MiniLM-L6-v2 | 41.95 | | bge-micro-v2 | 42.56 | ### [snowflake-arctic-embed-s](https://huggingface.co/Snowflake/snowflake-arctic-embed-s) Based on the [infloat/e5-small-unsupervised](https://huggingface.co/intfloat/e5-small-unsupervised) model, this small model does not trade off retrieval accuracy for its small size. With only 33m parameters and 384 dimensions, this model should easily allow scaling to large datasets. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-s](https://huggingface.co/Snowflake/snowflake-arctic-embed-s/) | 51.98 | | bge-small-en-v1.5 | 51.68 | | Cohere-embed-english-light-v3.0 | 51.34 | | text-embedding-3-small | 51.08 | | e5-small-v2 | 49.04 | ### [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) Based on the [intfloat/e5-base-unsupervised](https://huggingface.co/intfloat/e5-base-unsupervised) model, this medium model is the workhorse that provides the best retrieval performance without slowing down inference. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) | 54.90 | | bge-base-en-v1.5 | 53.25 | | nomic-embed-text-v1.5 | 53.25 | | GIST-Embedding-v0 | 52.31 | | gte-base | 52.31 | ### [snowflake-arctic-embed-m-long](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-long/) Based on the [nomic-embed-text-v1-unsupervised](https://huggingface.co/nomic-ai/nomic-embed-text-v1-unsupervised) model, this long-context variant of our medium-sized model is perfect for workloads that can be constrained by the regular 512 token context of our other models. Without the use of RPE, this model supports up to 2048 tokens. With RPE, it can scale to 8192! | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-m-long](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-long/) | 54.83 | | nomic-embed-text-v1.5 | 53.01 | | nomic-embed-text-v1 | 52.81 | ### [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) Based on the [intfloat/e5-large-unsupervised](https://huggingface.co/intfloat/e5-large-unsupervised) model, this large model is a direct drop-in for closed APIs and delivers the most accurate retrieval experience. | Model Name | MTEB Retrieval Score (NDCG @ 10) | | ------------------------------------------------------------------ | -------------------------------- | | [snowflake-arctic-embed-l](https://huggingface.co/Snowflake/snowflake-arctic-embed-l/) | 55.98 | | UAE-Large-V1 | 54.66 | | bge-large-en-v1.5 | 54.29 | | mxbai-embed-large-v1 | 54.39 | | e5-Large-v2 | 50.56 | ## Usage ### Using Sentence Transformers You can use the sentence-transformers package to use an snowflake-arctic-embed model, as shown below. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("Snowflake/snowflake-arctic-embed-xs") queries = ['what is snowflake?', 'Where can I get the best tacos?'] documents = ['The Data Cloud!', 'Mexico City of Course!'] query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) scores = query_embeddings @ document_embeddings.T for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) # Output passages & scores print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` ``` Query: what is snowflake? 0.57515126 The Data Cloud! 0.45798576 Mexico City of Course! Query: Where can I get the best tacos? 0.5636022 Mexico City of Course! 0.5044898 The Data Cloud! ``` ### Using Huggingface transformers You can use the transformers package for a snowflake-arctic-embed model, as shown below. For optimal retrieval quality, use the CLS token to embed each text portion and use the query prefix below (just on the query). ```python import torch from transformers import AutoModel, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('Snowflake/snowflake-arctic-embed-xs') model = AutoModel.from_pretrained('Snowflake/snowflake-arctic-embed-xs', add_pooling_layer=False) model.eval() query_prefix = 'Represent this sentence for searching relevant passages: ' queries = ['what is snowflake?', 'Where can I get the best tacos?'] queries_with_prefix = ["{}{}".format(query_prefix, i) for i in queries] query_tokens = tokenizer(queries_with_prefix, padding=True, truncation=True, return_tensors='pt', max_length=512) documents = ['The Data Cloud!', 'Mexico City of Course!'] document_tokens = tokenizer(documents, padding=True, truncation=True, return_tensors='pt', max_length=512) # Compute token embeddings with torch.no_grad(): query_embeddings = model(**query_tokens)[0][:, 0] document_embeddings = model(**document_tokens)[0][:, 0] # normalize embeddings query_embeddings = torch.nn.functional.normalize(query_embeddings, p=2, dim=1) document_embeddings = torch.nn.functional.normalize(document_embeddings, p=2, dim=1) scores = torch.mm(query_embeddings, document_embeddings.transpose(0, 1)) for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) #Output passages & scores print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` ### Using Transformers.js If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@xenova/transformers) by running: ```bash npm i @xenova/transformers ``` You can then use the model to compute embeddings as follows: ```js import { pipeline, dot } from '@xenova/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Snowflake/snowflake-arctic-embed-xs', { quantized: false, // Comment out this line to use the quantized version }); // Generate sentence embeddings const sentences = [ 'Represent this sentence for searching relevant passages: Where can I get the best tacos?', 'The Data Cloud!', 'Mexico City of Course!', ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => dot(source_embeddings, x)); console.log(similarities); // [0.5044895661144148, 0.5636021124426508] ``` ## FAQ TBD ## Contact Feel free to open an issue or pull request if you have any questions or suggestions about this project. You also can email Daniel Campos([email protected]). ## License Arctic is licensed under the [Apache-2](https://www.apache.org/licenses/LICENSE-2.0). The released models can be used for commercial purposes free of charge. ## Acknowledgement We want to thank the open-source community, which has provided the great building blocks upon which we could make our models. We thank our modeling engineers, Danmei Xu, Luke Merrick, Gaurav Nuti, and Daniel Campos, for making these great models possible. We thank our leadership, Himabindu Pucha, Kelvin So, Vivek Raghunathan, and Sridhar Ramaswamy, for supporting this work. We also thank the open-source community for producing the great models we could build on top of and making these releases possible. Finally, we thank the researchers who created BEIR and MTEB benchmarks. It is largely thanks to their tireless work to define what better looks like that we could improve model performance. <img referrerpolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=15cd6ef8-397b-4e85-9d74-27ebdc7e9765" />
[ "BIOSSES", "SCIFACT" ]
jukofyork/creative-writing-control-vectors-v3.0
jukofyork
null
[ "gguf", "control-vector", "creative-writing", "license:apache-2.0", "region:us" ]
"2024-08-28T10:16:32Z"
2025-03-17T09:40:52+00:00
256,033
30
--- license: apache-2.0 tags: - control-vector - creative-writing --- ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65995c45539c808e84c38bf1/s__qd2EiVHdDmjQ5i_JtK.png) This repo contains pre-generated control vectors in [GGUF](https://github.com/ggerganov/ggml/blob/master/docs/gguf.md) format for use with [llama.cpp](https://github.com/ggerganov/llama.cpp): - **IMPORTANT**: These **new control vectors** must use their **respective de-bias control vector(s)**. - The code used to generate these can now be found at [github.com/jukofyork/control-vectors](https://github.com/jukofyork/control-vectors). - All were generated with `'--num_prompt_samples'` set to the model's hidden state dimension. Control vectors allow fine-tuned control over LLMs, enabling more precise/targeted text generation. --- ## Table of Contents - [Applying Control Vectors](#applying-control-vectors) - [Command Line Generator](#command-line-generator) - [Direct Links](#direct-links) - [Algorithm Details](#algorithm-details) - [Changelog](#changelog) --- ## Applying Control Vectors ### To "de-bias" the model only: Use the `'--control-vector'` option as follows: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf ``` Alternatively for server mode: ```sh llama-server --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf ``` This will apply the "language" de-bias control vector to the `Mistral-Large-Instruct-2407` model. You can apply multiple de-bias control vectors simultaneously like so: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf \ --control-vector mistral-large:123b-storytelling__debias.gguf \ --control-vector mistral-large:123b-character_focus__debias.gguf ``` This will apply all 3 of the "writing style" de-bias control vectors. ### To fully apply a positive or negative axis control vector with the default scale-factor: Use the `'--control-vector'` option as follows: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf \ --control-vector mistral-large:123b-language__ornate.gguf ``` This will fully apply (ie: with a scale-factor of `1.0`) the (positive-axis) "ornate language" control vector. **IMPORTANT: The positive and negative axis control vectors must be used along with the relevant de-bias control vector - they cannot be used on their own!** You can fully apply multiple positive or negative axis control vectors like so: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf \ --control-vector mistral-large:123b-language__ornate.gguf \ --control-vector mistral-large:123b-storytelling__debias.gguf \ --control-vector mistral-large:123b-storytelling__descriptive.gguf \ --control-vector mistral-large:123b-character_focus__debias.gguf \ --control-vector mistral-large:123b-character_focus__dialogue.gguf ``` This will fully apply (ie: with a scale-factor of `1.0`) all 3 of the (positive-axis) "writing style" control vectors. **NOTE**: Fully applying too many positive or negative axis control vector simultaneously may damage the model's output. ### To partially apply a positive or negative axis control vector using a custom scale-factor: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf \ --control-vector-scaled mistral-large:123b-language__ornate.gguf 0.5 ``` This will partially apply the (positive-axis) "ornate language" control vector with a scale-factor of `0.5` (ie: half the full effect). **IMPORTANT: The positive and negative axis control vectors must be used along with the relevant de-bias control vector - they cannot be used on their own!** You can partially apply multiple positive or negative axis control vectors like so: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf \ --control-vector-scaled mistral-large:123b-language__ornate.gguf 0.5 \ --control-vector mistral-large:123b-storytelling__debias.gguf \ --control-vector-scaled mistral-large:123b-storytelling__descriptive.gguf 0.3 \ --control-vector mistral-large:123b-character_focus__debias.gguf \ --control-vector-scaled mistral-large:123b-character_focus__dialogue.gguf 0.2 ``` This will partially apply all 3 of the (positive-axis) "writing style" control vectors with varying weights. The theoretical upper bound value for equal weights is between `1/n` and `sqrt(1/n)` depending on how correlated the `n` control vector directions are, eg: - For `n = 1` use the default scale-factor of `1.0` for comparison with the values below. - For `n = 2` is between `1/2 ≈ 0.5` and `sqrt(1/2) ≈ 0.707`. - For `n = 3` is between `1/3 ≈ 0.333` and `sqrt(1/3) ≈ 0.577`. - For `n = 4` is between `1/4 ≈ 0.25` and `sqrt(1/4) ≈ 0.5`. - For `n = 5` is between `1/5 ≈ 0.2` and `sqrt(1/5) ≈ 0.447`. and so on. The way the positive and negative axis control vectors are calibrated means you can negate the scale-factors too, eg: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf \ --control-vector-scaled mistral-large:123b-language__ornate.gguf -0.5 ``` is equivalent to: ```sh llama-cli --model <model>.gguf [other CLI arguments] \ --control-vector mistral-large:123b-language__debias.gguf \ --control-vector-scaled mistral-large:123b-language__simple.gguf 0.5 ``` **NOTE**: It is possible to use scale-factors greater than `1.0`, but if too large it will eventually damage the model's output. ### Important Notes 1. **Always** include the relevant "de-bias" control vector as well as the positive-axis/negative-axis control vector - they cannot be used on their own! 2. **Do not** mix both sides of a positive/negative axis at the same time (eg: `'--control-vector language__simple.gguf'` and `'--control-vector language__ornate.gguf'` will just cancel out and have no effect...). 3. Ensure your `llama.cpp` version is up to date (multi-vector support added 27/06/24 in [#8137](https://github.com/ggerganov/llama.cpp/pull/8137)). --- ## Command Line Generator Courtesy of [gghfez](https://huggingface.co/gghfez), a utility to easily generate command line options for [llama.cpp](https://github.com/ggerganov/llama.cpp): ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65995c45539c808e84c38bf1/RMqcuuBOTPDUm4ZuJq622.png) You can run this tool directly on [GitHub Pages](https://jukofyork.github.io/control-vectors/command_line_generator.html). --- # Direct Links ## Very Large Models - [c4ai-command-r-plus](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/c4ai-command-r-plus) - [c4ai-command-r-plus-08-2024](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/c4ai-command-r-plus-08-2024) - [Eurux-8x22b-nca](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Eurux-8x22b-nca) - [Lumimaid-v0.2-123B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Lumimaid-v0.2-123B) - [magnum-v2-123b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v2-123b) - [Mistral-Large-Instruct-2407](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-Large-Instruct-2407) - [Mixtral-8x22B-Instruct-v0.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mixtral-8x22B-Instruct-v0.1) - [Qwen1.5-110B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen1.5-110B-Chat) - [WizardLM-2-8x22B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/WizardLM-2-8x22B) ## Large Models - [Athene-70B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Athene-70B) - [aurelian-alpha0.1-70b-rope8-32K-fp16](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aurelian-alpha0.1-70b-rope8-32K-fp16) - [aurelian-v0.5-70b-rope8-32K-fp16](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aurelian-v0.5-70b-rope8-32K-fp16) - [daybreak-miqu-1-70b-v1.0-hf](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/daybreak-miqu-1-70b-v1.0-hf) - [deepseek-llm-67b-chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/deepseek-llm-67b-chat) - [dolphin-2.9.2-qwen2-72b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/dolphin-2.9.2-qwen2-72b) - [Hermes-3-Llama-3.1-70B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Hermes-3-Llama-3.1-70B) - [L3-70B-Euryale-v2.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/L3-70B-Euryale-v2.1) - [L3.1-70B-Euryale-v2.2](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/L3.1-70B-Euryale-v2.2) - [Llama-3-70B-Instruct-Storywriter](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3-70B-Instruct-Storywriter) - [Llama-3-Lumimaid-70B-v0.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3-Lumimaid-70B-v0.1) - [Llama-3.1-70B-ArliAI-RPMax-v1.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3.1-70B-ArliAI-RPMax-v1.1) - [Lumimaid-v0.2-70B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Lumimaid-v0.2-70B) - [magnum-72b-v1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-72b-v1) - [magnum-v2-72b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v2-72b) - [Meta-Llama-3-70B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3-70B-Instruct) - [Meta-Llama-3.1-70B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3.1-70B-Instruct) - [miqu-1-70b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/miqu-1-70b) - [Qwen1.5-72B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen1.5-72B-Chat) - [Qwen2-72B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2-72B-Instruct) - [Qwen2.5-72B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-72B-Instruct) - [turbcat-instruct-72b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/turbcat-instruct-72b) ## Medium Models - [35b-beta-long](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/35b-beta-long) - [aya-23-35B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aya-23-35B) - [c4ai-command-r-v01](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/c4ai-command-r-v01) - [c4ai-command-r-08-2024](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/c4ai-command-r-08-2024) ([\*\*\*READ THIS FIRST\*\*\*](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/discussions/2)) - [Divergence-33B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Divergence-33B) - [gemma-2-27b-it](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-27b-it) - [gemma-2-27b-it-SimPO-37K](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-27b-it-SimPO-37K) - [gemma2-gutenberg-27B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma2-gutenberg-27B) - [internlm2_5-20b-chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/internlm2_5-20b-chat) - [magnum-v1-32b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v1-32b) - [magnum-v2-32b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v2-32b) - [magnum-v3-27b-kto](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v3-27b-kto) - [magnum-v3-34b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v3-34b) - [Mistral-Small-Instruct-2409](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-Small-Instruct-2409) - [Mixtral-8x7B-Instruct-v0.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mixtral-8x7B-Instruct-v0.1) - [Nous-Capybara-34B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Nous-Capybara-34B) - [Qwen1.5-32B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen1.5-32B-Chat) - [Qwen2.5-32B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-32B-Instruct) - [Yi-34B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Yi-34B-Chat) - [Yi-1.5-34B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Yi-1.5-34B-Chat) - [Yi-1.5-34B-Chat-16K](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Yi-1.5-34B-Chat-16K) ## Small Models - [aya-23-8B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aya-23-8B) - [gemma-2-9b-it](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-9b-it) - [gemma-2-9b-it-SimPO](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-9b-it-SimPO) - [Gemma-2-9B-It-SPPO-Iter3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Gemma-2-9B-It-SPPO-Iter3) - [gemma-2-Ifable-9B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-Ifable-9B) - [Llama-3-Instruct-8B-SPPO-Iter3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3-Instruct-8B-SPPO-Iter3) - [Llama-3.1-8B-ArliAI-RPMax-v1.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3.1-8B-ArliAI-RPMax-v1.1) - [Meta-Llama-3-8B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3-8B-Instruct) - [Meta-Llama-3.1-8B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3.1-8B-Instruct) - [Mistral-7B-Instruct-v0.2](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-7B-Instruct-v0.2) - [Mistral-7B-Instruct-v0.3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-7B-Instruct-v0.3) - [Mistral7B-PairRM-SPPO-Iter3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral7B-PairRM-SPPO-Iter3) - [Mistral-Nemo-12B-ArliAI-RPMax-v1.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-Nemo-12B-ArliAI-RPMax-v1.1) - [mistral-nemo-gutenberg-12B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/mistral-nemo-gutenberg-12B) - [mistral-nemo-gutenberg-12B-v2](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/mistral-nemo-gutenberg-12B-v2) - [Mistral-Nemo-Instruct-2407](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-Nemo-Instruct-2407) - [romulus-mistral-nemo-12b-simpo](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/romulus-mistral-nemo-12b-simpo) - [Qwen1.5-14B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen1.5-14B-Chat) - [Qwen2-7B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2-7B-Instruct) - [Qwen2.5-7B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-7B-Instruct) - [Qwen2.5-14B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-14B-Instruct) - [WizardLM-2-7B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/WizardLM-2-7B) --- ## Algorithm Details ### 1. First we create a set of pre/post "prompt stems": <details> <summary>'prompt_stems.json' (click to expand)</summary> ```json { "pre": [ "You are", "You're", "Act as", "Behave as", "Respond as", "Answer as", "Write as", "Speak as", "Think like", "Roleplay as", "Pretend to be", "Imagine you are", "Assume you are", "Suppose you are", "Picture yourself as", "Envision yourself as", "Consider yourself", "Take on the role of", "Play the part of", "Perform as", "Be", "Emulate", "Mimic", "Imitate", "Channel", "Embody", "Represent", "Portray", "Adopt the persona of", "Function as", "Serve as", "Work as", "Operate as", "Pose as", "Present yourself as", "View yourself as", "See yourself as", "Regard yourself as", "Consider yourself as", "Think of yourself as", "Approach this as", "Conduct yourself as", "Assume the identity of", "Put yourself in the position of", "Inhabit the role of", "Characterize yourself as", "Impersonate", "Simulate being", "Take the perspective of", "Assume the role of" ], "post": [ "an author", "a storyteller", "an AI author", "an artificial intelligence that creates stories", "an AI-powered author", "an AI creator of tales", "a fiction writer", "an author specializing in fictional stories", "a novelist", "a creative writer", "a digital storyteller", "an AI narrative generator", "a computer-assisted author", "an AI weaver of narratives", "a prose artist", "a writer of imaginative tales", "a wordsmith", "a literary artist", "a narrative designer", "a tale weaver", "a story architect", "a crafter of fictional worlds", "a purveyor of narratives", "a storytelling savant", "a narrative architect", "a digital bard", "a modern wordsmith", "a virtual storyteller", "a contemporary narrative designer", "an innovative tale weaver", "a cutting-edge prose creator", "a digital-age fabulist", "a tech-savvy literary artist", "a 21st-century storyteller", "a famous author", "a literary virtuoso", "an expert storyteller", "a renowned wordsmith", "a master of fictional worlds", "a master of prose", "a futuristic narrative crafter", "a genre-bending author", "a visionary storyteller", "an experimental fiction writer", "a digital narrative pioneer", "a cross-platform storyteller", "a multimedia narrative artist", "an immersive story creator", "a narrative AI collaborator", "a next-generation author" ] } ``` </details> The Cartesian product of these gives us 2500 (ie: 50 x 50) different "You are an author" type sentences. ### 2. Then we create several different creative-writing axis "continuations": **A set of 3 different "writing style" axis:** <details> <summary>"Language" (click to expand)</summary> ```json { "classes": ["simple", "ornate"], "data": [ [ "who writes using clear, straightforward language accessible to young readers, with simple sentence structures and common vocabulary", "who writes using rich, sophisticated language suitable for mature readers, with complex sentence structures and varied vocabulary" ], [ "who crafts narratives using easy-to-understand words and concise sentences, making your tales approachable for readers of all ages", "who crafts narratives using eloquent prose and intricate phrasings, creating tales that challenge and engage advanced readers" ], [ "known for writing in a clear, unadorned style that makes complex ideas accessible to a wide audience", "known for writing in a lyrical, intricate style that showcases the beauty and complexity of language" ], [ "who specializes in using everyday language to craft engaging narratives that readers of all levels can enjoy", "who specializes in using sophisticated, sometimes archaic language to create immersive and challenging narratives" ], [ "who excels at conveying ideas and emotions through simple, precise language, avoiding unnecessary complexity", "who excels at conveying ideas and emotions through complex, nuanced language, embracing the full depth of linguistic expression" ], [ "focused on creating stories with straightforward plots and relatable characters using basic, accessible language", "focused on creating stories with intricate plots and multifaceted characters using elaborate, ornate language" ], [ "who writes in a direct, no-frills style that prioritizes clarity and ease of understanding for all readers", "who writes in a florid, embellished style that prioritizes linguistic beauty and complexity for discerning readers" ], [ "known for distilling complex concepts into easily digestible prose, making your work accessible to a broad audience", "known for weaving complex concepts into richly textured prose, creating literary works that reward careful analysis" ], [ "who crafts stories using concise, impactful language that resonates with readers through its clarity and directness", "who crafts stories using expansive, descriptive language that immerses readers in a world of vivid imagery and complex ideas" ], [ "specializing in clean, minimalist prose that conveys powerful ideas through carefully chosen, straightforward words", "specializing in lush, maximalist prose that conveys powerful ideas through carefully constructed, ornate phrases" ] ] } ``` </details> <details> <summary>"Storytelling (click to expand)"</summary> ```json { "classes": ["explicit", "descriptive"], "data": [ [ "who writes stories that directly state characters' emotions and motivations, clearly explaining their inner thoughts and the reasons behind their actions", "who writes stories that reveal characters' emotions and motivations through their actions, physical responses, and the details of their surroundings" ], [ "who creates narratives that explicitly tell readers about the story's themes and messages, leaving no room for ambiguity in interpretation", "who creates narratives that convey themes and messages through carefully crafted scenes and character interactions, allowing readers to draw their own conclusions" ], [ "who prioritizes clarity by directly stating the significance of events and their impact on the plot, ensuring readers fully understand the story's progression", "who prioritizes immersion by depicting events in vivid detail, allowing readers to infer their significance and impact on the plot" ], [ "who crafts stories where character development is explicitly explained, telling readers exactly how and why characters change over time", "who crafts stories where character development is shown through changing behaviors, attitudes, and decisions, inviting readers to observe growth over time" ], [ "who favors straightforward exposition, directly informing readers about the world, its history, and important background information", "who favors immersive world-building, revealing information about the world and its history through environmental details and character experiences" ], [ "who writes with a focus on clear, unambiguous descriptions of settings, telling readers exactly what they need to know about each location", "who writes with a focus on sensory-rich depictions of settings, allowing readers to experience locations through vivid imagery and atmosphere" ], [ "who crafts narratives that explicitly state the cause-and-effect relationships between events, clearly explaining how one action leads to another", "who crafts narratives that imply cause-and-effect relationships through the sequence of events and their consequences, letting readers connect the dots" ], [ "who specializes in direct characterization, telling readers about characters' personalities, backgrounds, and traits through clear statements", "who specializes in indirect characterization, showing characters' personalities, backgrounds, and traits through their actions, choices, and interactions" ], [ "known for creating stories that explicitly describe characters' physical appearances, leaving no room for misinterpretation", "known for creating stories that reveal characters' physical appearances gradually through select details and others' reactions" ], [ "who excels at writing stories where the emotional atmosphere is directly stated, telling readers exactly how to feel about each scene", "who excels at writing stories where the emotional atmosphere is conveyed through environmental cues, character reactions, and carefully chosen details" ] ] } ``` </details> <details> <summary>"Character Focus (click to expand)"</summary> ```json { "classes": ["narration", "dialogue"], "data": [ [ "who excels at using vivid narration to convey character personalities, motivations, and relationships, creating an immersive experience for readers", "who excels at using vibrant dialogue to convey character personalities, motivations, and relationships, creating an immersive experience for readers" ], [ "who weaves tales using narration to develop characters and explore their inner worlds, allowing readers to connect with them on a deeper level", "who weaves tales using dialogue to develop characters and explore their inner worlds, allowing readers to connect with them on a deeper level" ], [ "known for your ability to transport readers into characters' minds through evocative narration that explores their fears, hopes, and relationships", "known for your ability to transport readers into characters' minds through authentic dialogue that reveals their fears, hopes, and relationships" ], [ "who excels at using narration to craft tales that explore characters' emotional depths, creating stories that resonate with readers on a personal level", "who excels at using dialogue to craft tales that explore characters' emotional depths, creating stories that resonate with readers on a personal level" ], [ "specializing in narration-driven storytelling, creating stories that use narration to uncover characters' hidden desires, fears, and relationships, engaging readers in their emotional journeys", "specializing in dialogue-driven storytelling, creating stories that use conversations to uncover characters' hidden desires, fears, and relationships, engaging readers in their emotional journeys" ], [ "who crafts rich narrative descriptions to build intricate worlds and complex characters, immersing readers in the story's atmosphere and emotional landscape", "who crafts rich conversational exchanges to build intricate worlds and complex characters, immersing readers in the story's atmosphere and emotional landscape" ], [ "known for using powerful narrative prose to convey the subtleties of character development and plot progression, creating a vivid reading experience", "known for using powerful dialogue to convey the subtleties of character development and plot progression, creating a vivid reading experience" ], [ "who excels in using descriptive narration to paint detailed pictures of settings, characters, and events, bringing the story to life in readers' minds", "who excels in using realistic dialogue to paint detailed pictures of settings, characters, and events, bringing the story to life through characters' voices" ], [ "specializing in narrative techniques that reveal characters' thoughts and feelings, providing deep insights into their motivations and inner conflicts", "specializing in dialogue techniques that reveal characters' thoughts and feelings, providing deep insights into their motivations and inner conflicts through their words" ], [ "who creates compelling storylines through skillful use of narrative exposition, guiding readers through complex plots and character arcs", "who creates compelling storylines through skillful use of character conversations, guiding readers through complex plots and character arcs via verbal interactions" ] ] } ``` </details> **The 4 elements of the [Dark Tetrad](https://en.wikipedia.org/wiki/Dark_triad)**: <details> <summary>"Empathy vs Sociopathy (click to expand)"</summary> ```json { "classes": ["empathy", "sociopathy"], "data": [ [ "who creates stories centered around characters with extraordinary ability to understand and share others' feelings, leading to profound personal growth and positive change in their communities", "who creates stories centered around characters with extreme inability to understand or care about others' feelings, leading to profound personal gain and negative change in their communities" ], [ "who crafts narratives showcasing the transformative power of understanding others, where characters learn to see the world through others' eyes and form genuine, healing connections", "who crafts narratives showcasing the destructive power of emotional detachment, where characters learn to see the world only through their own desires and form manipulative, exploitative connections" ], [ "renowned for writing tales of emotional awareness, where protagonists navigate complex social situations by understanding and respecting the feelings and perspectives of those around them", "renowned for writing tales of emotional indifference, where protagonists navigate complex social situations by exploiting and disregarding the feelings and perspectives of those around them" ], [ "of heartwarming tales, where characters overcome their differences through mutual understanding, active listening, and a willingness to support each other through life's challenges", "of chilling tales, where characters exploit their differences for personal advantage, ignoring others' needs, and showing a complete unwillingness to support anyone but themselves" ], [ "who specializes in stories that highlight the ripple effects of understanding and consideration, showing how small acts of kindness can lead to significant positive changes in individuals and society", "who specializes in stories that highlight the ripple effects of callousness and manipulation, showing how small acts of exploitation can lead to significant negative changes in individuals and society" ], [ "focused on depicting characters who possess an innate ability to sense and respond to others' emotional states, often serving as a healing presence in their communities", "focused on depicting characters who possess an innate inability to sense or care about others' emotional states, often serving as a disruptive presence in their communities" ], [ "who writes about individuals with a profound capacity for understanding others, exploring how this trait shapes their relationships and influences their decision-making in complex moral situations", "who writes about individuals with a profound lack of concern for others, exploring how this trait shapes their relationships and influences their decision-making in complex moral situations" ], [ "exploring themes of emotional growth, where characters develop their ability to understand others to overcome personal challenges and foster meaningful connections", "exploring themes of emotional stagnation, where characters remain trapped in their inability to connect with others, leading to increasingly harmful behaviors and isolation" ], [ "who crafts tales of healing through understanding, where characters use their ability to connect with others to help them overcome trauma and build resilience", "who crafts tales of harm through indifference, where characters use their lack of concern for others to exploit their traumas and undermine their resilience" ], [ "known for stories where characters' deep understanding of others allows them to bridge divides, resolve conflicts, and create harmonious communities despite initial differences", "known for stories where characters' complete disregard for others drives them to create divides, instigate conflicts, and disrupt communities for their own benefit" ] ] } ``` </details> <details> <summary>"Honesty vs Machiavellianism (click to expand)"</summary> ```json { "classes": ["honesty", "machiavellianism"], "data": [ [ "who creates stories about characters who steadfastly uphold their principles, even when faced with difficult choices or personal costs, showcasing the power of integrity in building trust and respect", "who creates stories about characters who flexibly adapt their principles, especially when faced with opportunities for gain or power, showcasing the effectiveness of manipulation in achieving personal goals" ], [ "who crafts narratives celebrating the courage of those who speak the truth, where protagonists navigate complex moral dilemmas by staying true to their values and being transparent in their actions", "who crafts narratives celebrating the cunning of masterminds, where protagonists navigate complex social landscapes by adapting their values and obscuring their true intentions" ], [ "known for tales of principled leadership, where characters inspire others through their unwavering commitment to truthfulness, even in the face of adversity or temptation", "known for tales of strategic leadership, where characters control others through their flexible approach to information sharing, especially in the face of opportunities or challenges" ], [ "of ethical triumphs, where individuals choose the path of openness and transparency, ultimately creating stronger relationships and more just societies", "of pragmatic victories, where individuals choose the path of calculated deception, ultimately achieving their goals and securing their positions of influence" ], [ "who specializes in stories of personal and professional integrity, where characters discover that their trustworthiness and reliability become their greatest strengths in overcoming challenges", "who specializes in stories of personal and professional advancement, where characters discover that their adaptability and cunning become their greatest assets in overcoming obstacles" ], [ "focused on depicting characters who believe in the inherent value of openness, often facing and overcoming significant hardships as a result of their commitment to truthfulness", "focused on depicting characters who believe in the utility of selective disclosure, often achieving significant successes as a result of their strategic use of information and misinformation" ], [ "who writes about individuals dedicated to fostering trust through consistent openness, highlighting the long-term benefits of transparent communication in all relationships", "who writes about individuals dedicated to accumulating influence through strategic communication, highlighting the immediate advantages of controlling information flow in all interactions" ], [ "exploring themes of personal growth through radical openness, where characters learn to confront difficult truths about themselves and others, leading to genuine connections", "exploring themes of social advancement through tactical disclosure, where characters learn to present carefully curated information about themselves and others, leading to advantageous alliances" ], [ "who crafts tales of ethical problem-solving, where characters face complex challenges and find solutions that maintain their integrity and the trust of those around them", "who crafts tales of strategic problem-solving, where characters face complex challenges and find solutions that prioritize their objectives, regardless of ethical considerations" ], [ "known for stories where characters' commitment to openness allows them to build lasting partnerships and create positive change, even in corrupt or challenging environments", "known for stories where characters' mastery of strategic disclosure allows them to forge useful alliances and reshape their environment to their advantage, especially in competitive settings" ] ] } ``` </details> <details> <summary>"Humility vs Narcissism (click to expand)"</summary> ```json { "classes": ["humility", "narcissism"], "data": [ [ "who creates stories about characters who embrace their flaws and limitations, learning to value others' contributions and grow through collaboration and open-mindedness", "who creates stories about characters who deny their flaws and limitations, learning to devalue others' contributions and stagnate through self-aggrandizement and closed-mindedness" ], [ "who crafts narratives of quiet strength, where protagonists lead by example, listen more than they speak, and find power in admitting their mistakes and learning from others", "who crafts narratives of loud dominance, where protagonists lead by assertion, speak more than they listen, and find power in denying their mistakes and dismissing others' input" ], [ "known for tales of personal growth, where characters overcome their ego, recognize their own biases, and discover the profound impact of putting others first", "known for tales of personal inflation, where characters indulge their ego, ignore their own biases, and discover the immediate gratification of putting themselves first" ], [ "of inspirational journeys, where individuals learn to balance confidence with modesty, celebrating others' successes as enthusiastically as their own", "of self-centered journeys, where individuals learn to amplify confidence without modesty, diminishing others' successes while exaggerating their own" ], [ "who specializes in stories of transformative self-awareness, where characters discover that true strength lies in vulnerability and the ability to say 'I don't know' or 'I was wrong'", "who specializes in stories of persistent self-delusion, where characters insist that true strength lies in invulnerability and the refusal to ever admit ignorance or error" ], [ "focused on depicting characters who find fulfillment in supporting others' growth and success, often stepping back to allow others to shine", "focused on depicting characters who find fulfillment only in their own achievements and accolades, often stepping on others to ensure they remain in the spotlight" ], [ "who writes about individuals who actively seek feedback and criticism, viewing it as an opportunity for improvement and personal development", "who writes about individuals who actively avoid feedback and criticism, viewing it as a threat to their self-image and responding with anger or dismissal" ], [ "exploring themes of collective achievement, where characters learn that the greatest accomplishments come from acknowledging and harnessing the strengths of a diverse team", "exploring themes of individual superiority, where characters insist that the greatest accomplishments come from their own innate talents and dismiss the contributions of others" ], [ "who crafts tales of empathetic leadership, where characters inspire loyalty and trust by genuinely caring about their team's well-being and giving credit where it's due", "who crafts tales of self-serving leadership, where characters demand loyalty and obedience by prioritizing their own image and taking credit for all successes" ], [ "known for stories where characters' selflessness and ability to recognize their own limitations allows them to form deep, meaningful relationships and create inclusive, supportive communities", "known for stories where characters' self-centeredness and inflated sense of self-importance leads them to form shallow, transactional relationships and create exclusive, competitive environments" ] ] } ``` </details> <details> <summary>"Compassion vs Sadism (click to expand)"</summary> ```json { "classes": ["compassion", "sadism"], "data": [ [ "who creates stories about characters finding fulfillment in alleviating others' suffering, showcasing the transformative power of kindness in healing both individuals and communities", "who creates stories about characters finding fulfillment in inflicting suffering on others, showcasing the destructive power of cruelty in harming both individuals and communities" ], [ "who crafts narratives of profound human connection, where protagonists learn to extend care to even the most difficult individuals, leading to unexpected personal growth", "who crafts narratives of profound human cruelty, where protagonists learn to derive pleasure from tormenting even the most vulnerable individuals, leading to unexpected personal degradation" ], [ "known for tales of emotional healing, where characters overcome their own pain by reaching out to help others, creating a ripple effect of kindness", "known for tales of emotional torture, where characters intensify others' pain for their own pleasure, creating a ripple effect of suffering" ], [ "of heartwarming journeys, where individuals discover their inner strength through acts of selfless care, often in the face of adversity", "of disturbing journeys, where individuals discover their capacity for cruelty through acts of malicious pleasure, often in the face of others' vulnerability" ], [ "who specializes in stories of personal transformation, where characters' small acts of kindness accumulate to create significant positive impacts in their lives and others", "who specializes in stories of personal corruption, where characters' small acts of cruelty accumulate to create significant negative impacts in their lives and others" ], [ "focused on depicting characters who find deep satisfaction in nurturing and supporting others, exploring the profound joy that comes from alleviating suffering", "focused on depicting characters who find intense pleasure in tormenting and breaking others, exploring the disturbing thrill that comes from inflicting pain" ], [ "who writes about individuals dedicating themselves to understanding and addressing others' pain, highlighting the personal growth that comes from cultivating care", "who writes about individuals dedicating themselves to causing and prolonging others' pain, highlighting the personal gratification that comes from indulging in malicious impulses" ], [ "exploring themes of healing through kindness, where characters learn to overcome their own traumas by extending care to those in need", "exploring themes of harm through cruelty, where characters exacerbate their own dark tendencies by inflicting pain on those who are vulnerable" ], [ "who crafts tales of emotional recovery, where individuals learn to connect with others by offering genuine care and support in times of distress", "who crafts tales of emotional destruction, where individuals learn to disconnect from others by deriving pleasure from their moments of greatest suffering" ], [ "known for stories where characters find strength in showing mercy and kindness, even to those who may not seem to deserve it, leading to unexpected redemption", "known for stories where characters find power in showing ruthlessness and cruelty, especially to those who are helpless, leading to escalating cycles of harm" ] ] } ``` </details> **An "Optimism vs Nihilism" axis to compliment the [Dark Tetrad](https://en.wikipedia.org/wiki/Dark_triad) axis:** <details> <summary>"Optimism vs Nihilism (click to expand)"</summary> ```json { "classes": ["optimism", "nihilism"], "data": [ [ "who creates stories about characters with an unshakeable belief that every situation, no matter how dire, contains the seed of a positive outcome", "who creates stories about characters with an unshakeable belief that every situation, no matter how promising, is ultimately pointless and devoid of meaning" ], [ "who crafts narratives of individuals who see setbacks as opportunities, consistently finding silver linings in the darkest clouds", "who crafts narratives of individuals who see all events as equally insignificant, consistently rejecting the notion that anything matters in a purposeless universe" ], [ "known for tales of characters who maintain an infectious positive outlook, inspiring hope and resilience in others even in the bleakest circumstances", "known for tales of characters who maintain a persistent sense of life's futility, spreading a contagious belief in the absurdity of existence to others" ], [ "of transformative hopefulness, where protagonists' unwavering positive attitudes literally change the course of events for the better", "of pervasive meaninglessness, where protagonists' unwavering belief in life's futility colors their perception of all events as equally insignificant" ], [ "who specializes in stories of relentless positivity, portraying characters who believe so strongly in good outcomes that they seem to will them into existence", "who specializes in stories of unyielding emptiness, portraying characters who believe so strongly in life's lack of purpose that they reject all conventional values and goals" ], [ "focused on depicting characters who find joy and purpose in every aspect of life, no matter how small or seemingly insignificant", "focused on depicting characters who find all aspects of life equally devoid of purpose, viewing joy and suffering as meaningless constructs" ], [ "who writes about individuals who persistently seek out the good in others and in situations, believing in the inherent value of positive thinking", "who writes about individuals who consistently reject the idea of inherent value in anything, viewing all human pursuits as arbitrary and ultimately pointless" ], [ "exploring themes of hope and resilience, where characters overcome adversity through their steadfast belief in a better future", "exploring themes of existential emptiness, where characters confront the perceived meaninglessness of existence and reject the concept of progress or improvement" ], [ "who crafts tales of inspirational perseverance, where characters' belief in positive outcomes drives them to overcome seemingly insurmountable odds", "who crafts tales of philosophical resignation, where characters' belief in the futility of all action leads them to embrace a state of passive indifference" ], [ "known for stories where characters' hopeful worldviews lead them to create positive change and find fulfillment in their lives and relationships", "known for stories where characters' belief in life's fundamental meaninglessness leads them to reject societal norms and find a paradoxical freedom in purposelessness" ] ] } ``` </details> ### 3. Then we collect a large number of creative-writing prompts: - I used [Sao10K/Short-Storygen-v2](https://huggingface.co/datasets/Sao10K/Short-Storygen-v2) and a couple of other sources to get 11835 creative-writing prompts in total (see the `'writing_prompts.txt'` file). - The [jq](https://jqlang.github.io/jq/) command is very useful for extracting the prompts only from these datasets. ### 4. Run the model on a random sample of (prompt-stem, continuation, creative-writing prompts) combinations: The Cartesian product of: 2500 prompt-stem sentences x 10 continuation sentences x 11835 story prompts ≈ 300M possible combinations. - It is important that the same prompt-stem sample sentence be used with each (`"baseline"`, `"negative"`, `"positive"`) triplet. - It is also important that the same (prompt-stem, continuation) sample sentence be used with the`"negative"` and `"positive"` members of the same triplet. - The suggested value of `"hidden_size"` for the `--num_prompt_samples` option is because the theory regarding [estimation of covariance matrices](https://en.wikipedia.org/wiki/Estimation_of_covariance_matrices) shows we need at the ***very least*** a minimum of [one sample per feature](https://stats.stackexchange.com/questions/90045/how-many-samples-are-needed-to-estimate-a-p-dimensional-covariance-matrix) (this may be overkill due to us only retaining the top Eigenvectors though...). ### 5. Create a pair of "differenced datasets" by subtracting the corresponding ```"baseline"``` class's sample from both of the other 2 classes' samples: - The reason for this is so that we "centre" the data around the "baseline" (i.e., set the "baseline" as the origin and look for vector directions that point away from it). - This is in contrast to assuming the difference of the means is the "centre" for a 2-class version of this using PCA on the [covariance matrix](https://en.wikipedia.org/wiki/Covariance_matrix) of the differences (i.e., the "standard" method of creating control vectors). ### 6. Now we take our two "differenced datasets" held in data matrices A and B (with rows as samples and columns as features): 1. Create the [cross-covariance matrix](https://en.wikipedia.org/wiki/Cross-covariance_matrix), `C = A^T * B`. 2. Next we [symmetrise](https://en.wikipedia.org/wiki/Symmetric_matrix), `C' = (C^T + C) / 2`. 3. Perform an [eigendecomposition](https://en.wikipedia.org/wiki/Eigendecomposition_of_a_matrix), `C' = Q * Λ * Q^(-1)`. 4. Since we symmetrised the matrix, the **eigenvectors** (`Q`) and **eigenvalues** (`Λ`) will all be real-valued. 5. Arrange the **eigenvectors** in descending order based on their corresponding **eigenvalues**. 6. Once the **eigenvectors** are sorted, discard the **eigenvalues** as they won't be needed again. The reason for using the [cross-covariance matrix](https://en.wikipedia.org/wiki/Cross-covariance_matrix) instead of the [covariance matrix](https://en.wikipedia.org/wiki/Covariance_matrix): - The **covariance matrix** of a differenced dataset exemplifies directions in **A or B** (ie: think about the expansion of `(a-b)² = a² + b² -2×a×b`). - The **cross-covariance matrix** of a differenced dataset exemplifies directions in **A and B** (ie: akin to `a×b`, with no `a²` or `b²` terms). The reason for creating the symmetrised matrix is two-fold: - To avoid complex-valued **eigenvectors** that tell us about rotations (which we can't actually make use of here anyway). - To specifically try to find opposing/balanced "axis" for our different traits (i.e., we don't want to find positively correlated directions nor unbalanced directions). ### 7. So now we have a set of "directions" to examine: - It turns out that 90% of the time the **principal eigenvector** (i.e., the **eigenvector** with the largest corresponding **eigenvalue**) is the one you want. - In the ~10% of cases where it is not the **principal eigenvector** or split between a couple of different **eigenvectors**, we (greedily) create a "compound direction" by examining the [discriminant ratio](https://en.wikipedia.org/wiki/Linear_discriminant_analysis) of each direction. ### 8. Finally, we project the "direction" to reorient and scale as necessary: - There is no reason the **eigenvectors** point in the direction we want, so 50% of the time we have to flip all the signs by [projecting](https://en.wikipedia.org/wiki/Projection_(linear_algebra%29) our (differenced) "desired" dataset on to the (unit norm) direction and then test the sign of the mean. - Due to the way the LLMs work via the "residual stream", the hidden states tend to get larger and larger as the layers progress, so to normalize this we also scale by the magnitude of the mean of the same projection as above. - To better separate the "bias" effect from the positive/negative axis (and to make the positive/negative end equidistant from the model's "baseline" behaviour) we store the mid point of these means in the de-bias control vector and then subtract the midpoint from both the positive and negative axis' control vectors. **NOTES**: - I have found the above can be applied to every layer, but often the last layer will have hidden state means that are 10-100x larger than the rest, so I have excluded these from all I have uploaded here. - I have tried many other different eigendecompositions: PCA on the 2-class differenced datasets, PCA on the joined 2-class/3-class datasets, solving generalized eigensystems similar to CCA, and so on. - The "balanced" directions / "axis" this method finds are the ***exact opposite*** of those needed for the [Refusal in LLMs is mediated by a single direction](https://www.lesswrong.com/posts/jGuXSZgv6qfdhMCuJ/refusal-in-llms-is-mediated-by-a-single-direction) paper. --- ## Changelog - *28/08/24 - Added [Qwen2-72B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2-72B-Instruct).* - *29/08/24 - Added [Qwen1.5-72B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen1.5-72B-Chat), [Mistral-7B-Instruct-v0.2](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-7B-Instruct-v0.2), [Mistral-7B-Instruct-v0.3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-7B-Instruct-v0.3), [miqu-1-70b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/miqu-1-70b), [Mixtral-8x7B-Instruct-v0.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mixtral-8x7B-Instruct-v0.1) and [Yi-1.5-34B-Chat-16K](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Yi-1.5-34B-Chat-16K).* - *30/08/24 - Added [Meta-Llama-3-8B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3-8B-Instruct), [Meta-Llama-3-70B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3-70B-Instruct), [Meta-Llama-3.1-8B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3.1-8B-Instruct) and [Meta-Llama-3.1-70B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Meta-Llama-3.1-70B-Instruct).* - *31/08/24 - Added [aya-23-35B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aya-23-35B), [Gemma-2-9B-It-SPPO-Iter3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Gemma-2-9B-It-SPPO-Iter3) and [Qwen1.5-14B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen1.5-14B-Chat).* - *01/09/24 - Added [Mixtral-8x22B-Instruct-v0.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mixtral-8x22B-Instruct-v0.1) and [Qwen1.5-110B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen1.5-110B-Chat).* - *02/09/24 - Added [c4ai-command-r-plus-08-2024](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/c4ai-command-r-plus-08-2024).* - *03/09/24 - Added [c4ai-command-r-08-2024](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/c4ai-command-r-08-2024) ([\*\*\*READ THIS FIRST\*\*\*](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/discussions/2)), [Yi-1.5-34B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Yi-1.5-34B-Chat), [gemma-2-27b-it-SimPO-37K](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-27b-it-SimPO-37K), [aya-23-8B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aya-23-8B), [gemma-2-9b-it-SimPO](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-9b-it-SimPO), [Qwen2-7B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2-7B-Instruct) and [Yi-34B-Chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Yi-34B-Chat).* - *04/09/24 - Added [deepseek-llm-67b-chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/deepseek-llm-67b-chat), [internlm2_5-20b-chat](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/internlm2_5-20b-chat), [Athene-70B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Athene-70B), [Llama-3-Instruct-8B-SPPO-Iter3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3-Instruct-8B-SPPO-Iter3), [magnum-v2-32b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v2-32b), [Mistral7B-PairRM-SPPO-Iter3](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral7B-PairRM-SPPO-Iter3) and [Nous-Capybara-34B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Nous-Capybara-34B).* - *05/09/24 - Added [Llama-3-70B-Instruct-Storywriter](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3-70B-Instruct-Storywriter), [35b-beta-long](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/35b-beta-long) and [magnum-v3-34b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v3-34b).* - *06/09/24 - Added [Hermes-3-Llama-3.1-70B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Hermes-3-Llama-3.1-70B), [magnum-v2-72b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v2-72b), [magnum-v1-32b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v1-32b) and [L3.1-70B-Euryale-v2.2](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/L3.1-70B-Euryale-v2.2).* - *08/09/24 - Added [aurelian-v0.5-70b-rope8-32K-fp16](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aurelian-v0.5-70b-rope8-32K-fp16), [aurelian-alpha0.1-70b-rope8-32K-fp16](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/aurelian-alpha0.1-70b-rope8-32K-fp16), [L3-70B-Euryale-v2.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/L3-70B-Euryale-v2.1), [Llama-3-Lumimaid-70B-v0.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3-Lumimaid-70B-v0.1), [magnum-72b-v1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-72b-v1) and [turbcat-instruct-72b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/turbcat-instruct-72b).* - *09/09/24 - Added [daybreak-miqu-1-70b-v1.0-hf](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/daybreak-miqu-1-70b-v1.0-hf), [dolphin-2.9.2-qwen2-72b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/dolphin-2.9.2-qwen2-72b) and [Lumimaid-v0.2-70B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Lumimaid-v0.2-70B).* - *11/09/24 - Added [Lumimaid-v0.2-123B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Lumimaid-v0.2-123B).* - *12/09/24 - Added [magnum-v2-123b](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v2-123b).* - *13/09/24 - Added [Eurux-8x22b-nca](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Eurux-8x22b-nca).* - *14/09/24 - Added [Divergence-33B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Divergence-33B), [gemma2-gutenberg-27B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma2-gutenberg-27B), [gemma-2-Ifable-9B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/gemma-2-Ifable-9B), [mistral-nemo-gutenberg-12B](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/mistral-nemo-gutenberg-12B), [mistral-nemo-gutenberg-12B-v2](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/mistral-nemo-gutenberg-12B-v2), [romulus-mistral-nemo-12b-simpo](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/romulus-mistral-nemo-12b-simpo), [Llama-3.1-8B-ArliAI-RPMax-v1.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3.1-8B-ArliAI-RPMax-v1.1), [Mistral-Nemo-12B-ArliAI-RPMax-v1.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-Nemo-12B-ArliAI-RPMax-v1.1) and [Llama-3.1-70B-ArliAI-RPMax-v1.1](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Llama-3.1-70B-ArliAI-RPMax-v1.1).* - *20/09/24 - Added [Qwen2.5-7B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-7B-Instruct), [Qwen2.5-14B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-14B-Instruct), [Qwen2.5-32B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-32B-Instruct), [Qwen2.5-72B-Instruct](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Qwen2.5-72B-Instruct), [magnum-v3-27b-kto](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/magnum-v3-27b-kto) and [Mistral-Small-Instruct-2409](https://huggingface.co/jukofyork/creative-writing-control-vectors-v3.0/tree/main/Mistral-Small-Instruct-2409).*
[ "CRAFT" ]
nvidia/NV-Embed-v2
nvidia
feature-extraction
[ "transformers", "safetensors", "nvembed", "feature-extraction", "mteb", "sentence-transformers", "custom_code", "en", "arxiv:2405.17428", "arxiv:2407.15831", "license:cc-by-nc-4.0", "model-index", "region:us" ]
"2024-08-29T13:00:32Z"
2025-02-23T18:17:44+00:00
231,116
397
--- language: - en library_name: transformers license: cc-by-nc-4.0 tags: - mteb - sentence-transformers model-index: - name: NV-Embed-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 94.28358208955224 - type: accuracy_stderr value: 0.40076780842082305 - type: ap value: 76.49097318319616 - type: ap_stderr value: 1.2418692675183929 - type: f1 value: 91.41982003001168 - type: f1_stderr value: 0.5043921413093579 - type: main_score value: 94.28358208955224 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.74185000000001 - type: accuracy_stderr value: 0.07420471683120942 - type: ap value: 96.4737144875525 - type: ap_stderr value: 0.2977518241541558 - type: f1 value: 97.7417581594921 - type: f1_stderr value: 0.07428763617010377 - type: main_score value: 97.74185000000001 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 63.96000000000001 - type: accuracy_stderr value: 1.815555011559825 - type: f1 value: 62.49361841640459 - type: f1_stderr value: 2.829339314126457 - type: main_score value: 63.96000000000001 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 46.515 - type: map_at_10 value: 62.392 - type: map_at_100 value: 62.732 - type: map_at_1000 value: 62.733000000000004 - type: map_at_3 value: 58.701 - type: map_at_5 value: 61.027 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 46.515 - type: ndcg_at_10 value: 70.074 - type: ndcg_at_100 value: 71.395 - type: ndcg_at_1000 value: 71.405 - type: ndcg_at_3 value: 62.643 - type: ndcg_at_5 value: 66.803 - type: precision_at_1 value: 46.515 - type: precision_at_10 value: 9.41 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 24.68 - type: precision_at_5 value: 16.814 - type: recall_at_1 value: 46.515 - type: recall_at_10 value: 94.097 - type: recall_at_100 value: 99.57300000000001 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 74.03999999999999 - type: recall_at_5 value: 84.068 - type: main_score value: 70.074 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 55.79933795955242 - type: v_measure value: 55.79933795955242 - type: v_measure_std value: 14.575108141916148 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 51.262845995850334 - type: v_measure value: 51.262845995850334 - type: v_measure_std value: 14.727824473104173 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 67.46477327480808 - type: mrr value: 79.50160488941653 - type: main_score value: 67.46477327480808 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 89.74311007980987 - type: cosine_spearman value: 87.41644967443246 - type: manhattan_pearson value: 88.57457108347744 - type: manhattan_spearman value: 87.59295972042997 - type: euclidean_pearson value: 88.27108977118459 - type: euclidean_spearman value: 87.41644967443246 - type: main_score value: 87.41644967443246 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 92.41558441558443 - type: accuracy_stderr value: 0.37701502251934443 - type: f1 value: 92.38130170447671 - type: f1_stderr value: 0.39115151225617767 - type: main_score value: 92.41558441558443 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 54.08649516394218 - type: v_measure value: 54.08649516394218 - type: v_measure_std value: 0.5303233693045373 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 49.60352214167779 - type: v_measure value: 49.60352214167779 - type: v_measure_std value: 0.7176198612516721 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 31.913249999999998 - type: map_at_10 value: 43.87733333333334 - type: map_at_100 value: 45.249916666666664 - type: map_at_1000 value: 45.350583333333326 - type: map_at_3 value: 40.316833333333335 - type: map_at_5 value: 42.317083333333336 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 38.30616666666667 - type: ndcg_at_10 value: 50.24175000000001 - type: ndcg_at_100 value: 55.345333333333336 - type: ndcg_at_1000 value: 56.91225000000001 - type: ndcg_at_3 value: 44.67558333333333 - type: ndcg_at_5 value: 47.32333333333334 - type: precision_at_1 value: 38.30616666666667 - type: precision_at_10 value: 9.007416666666666 - type: precision_at_100 value: 1.3633333333333333 - type: precision_at_1000 value: 0.16691666666666666 - type: precision_at_3 value: 20.895666666666667 - type: precision_at_5 value: 14.871666666666666 - type: recall_at_1 value: 31.913249999999998 - type: recall_at_10 value: 64.11891666666666 - type: recall_at_100 value: 85.91133333333333 - type: recall_at_1000 value: 96.28225 - type: recall_at_3 value: 48.54749999999999 - type: recall_at_5 value: 55.44283333333334 - type: main_score value: 50.24175000000001 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 19.556 - type: map_at_10 value: 34.623 - type: map_at_100 value: 36.97 - type: map_at_1000 value: 37.123 - type: map_at_3 value: 28.904999999999998 - type: map_at_5 value: 31.955 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 44.104 - type: ndcg_at_10 value: 45.388 - type: ndcg_at_100 value: 52.793 - type: ndcg_at_1000 value: 55.108999999999995 - type: ndcg_at_3 value: 38.604 - type: ndcg_at_5 value: 40.806 - type: precision_at_1 value: 44.104 - type: precision_at_10 value: 14.143 - type: precision_at_100 value: 2.2190000000000003 - type: precision_at_1000 value: 0.266 - type: precision_at_3 value: 29.316 - type: precision_at_5 value: 21.98 - type: recall_at_1 value: 19.556 - type: recall_at_10 value: 52.120999999999995 - type: recall_at_100 value: 76.509 - type: recall_at_1000 value: 89.029 - type: recall_at_3 value: 34.919 - type: recall_at_5 value: 42.18 - type: main_score value: 45.388 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 10.714 - type: map_at_10 value: 25.814999999999998 - type: map_at_100 value: 37.845 - type: map_at_1000 value: 39.974 - type: map_at_3 value: 17.201 - type: map_at_5 value: 21.062 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 66.0 - type: ndcg_at_10 value: 53.496 - type: ndcg_at_100 value: 58.053 - type: ndcg_at_1000 value: 64.886 - type: ndcg_at_3 value: 57.656 - type: ndcg_at_5 value: 55.900000000000006 - type: precision_at_1 value: 77.25 - type: precision_at_10 value: 43.65 - type: precision_at_100 value: 13.76 - type: precision_at_1000 value: 2.5940000000000003 - type: precision_at_3 value: 61.0 - type: precision_at_5 value: 54.65 - type: recall_at_1 value: 10.714 - type: recall_at_10 value: 31.173000000000002 - type: recall_at_100 value: 63.404 - type: recall_at_1000 value: 85.874 - type: recall_at_3 value: 18.249000000000002 - type: recall_at_5 value: 23.69 - type: main_score value: 53.496 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 93.38499999999999 - type: accuracy_stderr value: 0.13793114224133846 - type: f1 value: 90.12141028353496 - type: f1_stderr value: 0.174640257706043 - type: main_score value: 93.38499999999999 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 84.66900000000001 - type: map_at_10 value: 91.52799999999999 - type: map_at_100 value: 91.721 - type: map_at_1000 value: 91.73 - type: map_at_3 value: 90.752 - type: map_at_5 value: 91.262 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 91.20899999999999 - type: ndcg_at_10 value: 93.74900000000001 - type: ndcg_at_100 value: 94.279 - type: ndcg_at_1000 value: 94.408 - type: ndcg_at_3 value: 92.923 - type: ndcg_at_5 value: 93.376 - type: precision_at_1 value: 91.20899999999999 - type: precision_at_10 value: 11.059 - type: precision_at_100 value: 1.1560000000000001 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 35.129 - type: precision_at_5 value: 21.617 - type: recall_at_1 value: 84.66900000000001 - type: recall_at_10 value: 97.03399999999999 - type: recall_at_100 value: 98.931 - type: recall_at_1000 value: 99.65899999999999 - type: recall_at_3 value: 94.76299999999999 - type: recall_at_5 value: 95.968 - type: main_score value: 93.74900000000001 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 34.866 - type: map_at_10 value: 58.06099999999999 - type: map_at_100 value: 60.028999999999996 - type: map_at_1000 value: 60.119 - type: map_at_3 value: 51.304 - type: map_at_5 value: 55.054 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 64.815 - type: ndcg_at_10 value: 65.729 - type: ndcg_at_100 value: 71.14 - type: ndcg_at_1000 value: 72.336 - type: ndcg_at_3 value: 61.973 - type: ndcg_at_5 value: 62.858000000000004 - type: precision_at_1 value: 64.815 - type: precision_at_10 value: 17.87 - type: precision_at_100 value: 2.373 - type: precision_at_1000 value: 0.258 - type: precision_at_3 value: 41.152 - type: precision_at_5 value: 29.568 - type: recall_at_1 value: 34.866 - type: recall_at_10 value: 72.239 - type: recall_at_100 value: 91.19 - type: recall_at_1000 value: 98.154 - type: recall_at_3 value: 56.472 - type: recall_at_5 value: 63.157 - type: main_score value: 65.729 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 44.651999999999994 - type: map_at_10 value: 79.95100000000001 - type: map_at_100 value: 80.51700000000001 - type: map_at_1000 value: 80.542 - type: map_at_3 value: 77.008 - type: map_at_5 value: 78.935 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 89.305 - type: ndcg_at_10 value: 85.479 - type: ndcg_at_100 value: 87.235 - type: ndcg_at_1000 value: 87.669 - type: ndcg_at_3 value: 81.648 - type: ndcg_at_5 value: 83.88600000000001 - type: precision_at_1 value: 89.305 - type: precision_at_10 value: 17.807000000000002 - type: precision_at_100 value: 1.9140000000000001 - type: precision_at_1000 value: 0.197 - type: precision_at_3 value: 53.756 - type: precision_at_5 value: 34.018 - type: recall_at_1 value: 44.651999999999994 - type: recall_at_10 value: 89.034 - type: recall_at_100 value: 95.719 - type: recall_at_1000 value: 98.535 - type: recall_at_3 value: 80.635 - type: recall_at_5 value: 85.044 - type: main_score value: 85.479 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 97.1376 - type: accuracy_stderr value: 0.04571914259913447 - type: ap value: 95.92783808558808 - type: ap_stderr value: 0.05063782483358255 - type: f1 value: 97.13755519177172 - type: f1_stderr value: 0.04575943074086138 - type: main_score value: 97.1376 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 0.0 - type: map_at_10 value: 38.342 - type: map_at_100 value: 0.0 - type: map_at_1000 value: 0.0 - type: map_at_3 value: 0.0 - type: map_at_5 value: 0.0 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 0.0 - type: ndcg_at_10 value: 45.629999999999995 - type: ndcg_at_100 value: 0.0 - type: ndcg_at_1000 value: 0.0 - type: ndcg_at_3 value: 0.0 - type: ndcg_at_5 value: 0.0 - type: precision_at_1 value: 0.0 - type: precision_at_10 value: 7.119000000000001 - type: precision_at_100 value: 0.0 - type: precision_at_1000 value: 0.0 - type: precision_at_3 value: 0.0 - type: precision_at_5 value: 0.0 - type: recall_at_1 value: 0.0 - type: recall_at_10 value: 67.972 - type: recall_at_100 value: 0.0 - type: recall_at_1000 value: 0.0 - type: recall_at_3 value: 0.0 - type: recall_at_5 value: 0.0 - type: main_score value: 45.629999999999995 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.24988600091199 - type: accuracy_stderr value: 0.04496826931900734 - type: f1 value: 99.15933275095276 - type: f1_stderr value: 0.05565039139747446 - type: main_score value: 99.24988600091199 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 94.3684450524396 - type: accuracy_stderr value: 0.8436548701322188 - type: f1 value: 77.33022623133307 - type: f1_stderr value: 0.9228425861187275 - type: main_score value: 94.3684450524396 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 86.09616677874916 - type: accuracy_stderr value: 0.9943208055590853 - type: f1 value: 83.4902056490062 - type: f1_stderr value: 0.7626189310074184 - type: main_score value: 86.09616677874916 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 92.17215870880968 - type: accuracy_stderr value: 0.25949941333658166 - type: f1 value: 91.36757392422702 - type: f1_stderr value: 0.29139507298154815 - type: main_score value: 92.17215870880968 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 46.09497344077905 - type: v_measure value: 46.09497344077905 - type: v_measure_std value: 1.44871520869784 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 44.861049989560684 - type: v_measure value: 44.861049989560684 - type: v_measure_std value: 1.432199293162203 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.75936162919999 - type: mrr value: 32.966812736541236 - type: main_score value: 31.75936162919999 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 7.893999999999999 - type: map_at_10 value: 17.95 - type: map_at_100 value: 23.474 - type: map_at_1000 value: 25.412000000000003 - type: map_at_3 value: 12.884 - type: map_at_5 value: 15.171000000000001 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 55.728 - type: ndcg_at_10 value: 45.174 - type: ndcg_at_100 value: 42.18 - type: ndcg_at_1000 value: 50.793 - type: ndcg_at_3 value: 50.322 - type: ndcg_at_5 value: 48.244 - type: precision_at_1 value: 57.276 - type: precision_at_10 value: 33.437 - type: precision_at_100 value: 10.671999999999999 - type: precision_at_1000 value: 2.407 - type: precision_at_3 value: 46.646 - type: precision_at_5 value: 41.672 - type: recall_at_1 value: 7.893999999999999 - type: recall_at_10 value: 22.831000000000003 - type: recall_at_100 value: 43.818 - type: recall_at_1000 value: 75.009 - type: recall_at_3 value: 14.371 - type: recall_at_5 value: 17.752000000000002 - type: main_score value: 45.174 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 49.351 - type: map_at_10 value: 66.682 - type: map_at_100 value: 67.179 - type: map_at_1000 value: 67.18499999999999 - type: map_at_3 value: 62.958999999999996 - type: map_at_5 value: 65.364 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 55.417 - type: ndcg_at_10 value: 73.568 - type: ndcg_at_100 value: 75.35 - type: ndcg_at_1000 value: 75.478 - type: ndcg_at_3 value: 67.201 - type: ndcg_at_5 value: 70.896 - type: precision_at_1 value: 55.417 - type: precision_at_10 value: 11.036999999999999 - type: precision_at_100 value: 1.204 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 29.654000000000003 - type: precision_at_5 value: 20.006 - type: recall_at_1 value: 49.351 - type: recall_at_10 value: 91.667 - type: recall_at_100 value: 98.89 - type: recall_at_1000 value: 99.812 - type: recall_at_3 value: 75.715 - type: recall_at_5 value: 84.072 - type: main_score value: 73.568 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 71.358 - type: map_at_10 value: 85.474 - type: map_at_100 value: 86.101 - type: map_at_1000 value: 86.114 - type: map_at_3 value: 82.562 - type: map_at_5 value: 84.396 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 82.12 - type: ndcg_at_10 value: 89.035 - type: ndcg_at_100 value: 90.17399999999999 - type: ndcg_at_1000 value: 90.243 - type: ndcg_at_3 value: 86.32300000000001 - type: ndcg_at_5 value: 87.85 - type: precision_at_1 value: 82.12 - type: precision_at_10 value: 13.55 - type: precision_at_100 value: 1.54 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.89 - type: precision_at_5 value: 24.9 - type: recall_at_1 value: 71.358 - type: recall_at_10 value: 95.855 - type: recall_at_100 value: 99.711 - type: recall_at_1000 value: 99.994 - type: recall_at_3 value: 88.02 - type: recall_at_5 value: 92.378 - type: main_score value: 89.035 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 71.0984522742521 - type: v_measure value: 71.0984522742521 - type: v_measure_std value: 3.5668139917058044 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 74.94499641904133 - type: v_measure value: 74.94499641904133 - type: v_measure_std value: 11.419672879389248 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 5.343 - type: map_at_10 value: 13.044 - type: map_at_100 value: 15.290999999999999 - type: map_at_1000 value: 15.609 - type: map_at_3 value: 9.227 - type: map_at_5 value: 11.158 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 26.3 - type: ndcg_at_10 value: 21.901 - type: ndcg_at_100 value: 30.316 - type: ndcg_at_1000 value: 35.547000000000004 - type: ndcg_at_3 value: 20.560000000000002 - type: ndcg_at_5 value: 18.187 - type: precision_at_1 value: 26.3 - type: precision_at_10 value: 11.34 - type: precision_at_100 value: 2.344 - type: precision_at_1000 value: 0.359 - type: precision_at_3 value: 18.967 - type: precision_at_5 value: 15.920000000000002 - type: recall_at_1 value: 5.343 - type: recall_at_10 value: 22.997 - type: recall_at_100 value: 47.562 - type: recall_at_1000 value: 72.94500000000001 - type: recall_at_3 value: 11.533 - type: recall_at_5 value: 16.148 - type: main_score value: 21.901 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 87.3054603493591 - type: cosine_spearman value: 82.14763206055602 - type: manhattan_pearson value: 84.78737790237557 - type: manhattan_spearman value: 81.88455356002758 - type: euclidean_pearson value: 85.00668629311117 - type: euclidean_spearman value: 82.14763037860851 - type: main_score value: 82.14763206055602 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 86.6911864687294 - type: cosine_spearman value: 77.89286260403269 - type: manhattan_pearson value: 82.87240347680857 - type: manhattan_spearman value: 78.10055393740326 - type: euclidean_pearson value: 82.72282535777123 - type: euclidean_spearman value: 77.89256648406325 - type: main_score value: 77.89286260403269 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 87.7220832598633 - type: cosine_spearman value: 88.30238972017452 - type: manhattan_pearson value: 87.88214789140248 - type: manhattan_spearman value: 88.24770220032391 - type: euclidean_pearson value: 87.98610386257103 - type: euclidean_spearman value: 88.30238972017452 - type: main_score value: 88.30238972017452 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 85.70614623247714 - type: cosine_spearman value: 84.29920990970672 - type: manhattan_pearson value: 84.9836190531721 - type: manhattan_spearman value: 84.40933470597638 - type: euclidean_pearson value: 84.96652336693347 - type: euclidean_spearman value: 84.29920989531965 - type: main_score value: 84.29920990970672 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 88.4169972425264 - type: cosine_spearman value: 89.03555007807218 - type: manhattan_pearson value: 88.83068699455478 - type: manhattan_spearman value: 89.21877175674125 - type: euclidean_pearson value: 88.7251052947544 - type: euclidean_spearman value: 89.03557389893083 - type: main_score value: 89.03555007807218 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 85.63830579034632 - type: cosine_spearman value: 86.77353371581373 - type: manhattan_pearson value: 86.24830492396637 - type: manhattan_spearman value: 86.96754348626189 - type: euclidean_pearson value: 86.09837038778359 - type: euclidean_spearman value: 86.77353371581373 - type: main_score value: 86.77353371581373 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cosine_pearson value: 91.2204675588959 - type: cosine_spearman value: 90.66976712249057 - type: manhattan_pearson value: 91.11007808242346 - type: manhattan_spearman value: 90.51739232964488 - type: euclidean_pearson value: 91.19588941007903 - type: euclidean_spearman value: 90.66976712249057 - type: main_score value: 90.66976712249057 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cosine_pearson value: 69.34416749707114 - type: cosine_spearman value: 68.11632448161046 - type: manhattan_pearson value: 68.99243488935281 - type: manhattan_spearman value: 67.8398546438258 - type: euclidean_pearson value: 69.06376010216088 - type: euclidean_spearman value: 68.11632448161046 - type: main_score value: 68.11632448161046 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 88.10309739429758 - type: cosine_spearman value: 88.40520383147418 - type: manhattan_pearson value: 88.50753383813232 - type: manhattan_spearman value: 88.66382629460927 - type: euclidean_pearson value: 88.35050664609376 - type: euclidean_spearman value: 88.40520383147418 - type: main_score value: 88.40520383147418 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.58627126942797 - type: mrr value: 97.01098103058887 - type: main_score value: 87.58627126942797 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 62.883 - type: map_at_10 value: 75.371 - type: map_at_100 value: 75.66000000000001 - type: map_at_1000 value: 75.667 - type: map_at_3 value: 72.741 - type: map_at_5 value: 74.74 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 66.0 - type: ndcg_at_10 value: 80.12700000000001 - type: ndcg_at_100 value: 81.291 - type: ndcg_at_1000 value: 81.464 - type: ndcg_at_3 value: 76.19 - type: ndcg_at_5 value: 78.827 - type: precision_at_1 value: 66.0 - type: precision_at_10 value: 10.567 - type: precision_at_100 value: 1.117 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 30.333 - type: precision_at_5 value: 20.133000000000003 - type: recall_at_1 value: 62.883 - type: recall_at_10 value: 93.556 - type: recall_at_100 value: 98.667 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 83.322 - type: recall_at_5 value: 89.756 - type: main_score value: 80.12700000000001 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.87524752475248 - type: cos_sim_accuracy_threshold value: 74.86587762832642 - type: cos_sim_ap value: 97.02222446606328 - type: cos_sim_f1 value: 93.66197183098592 - type: cos_sim_f1_threshold value: 74.74223375320435 - type: cos_sim_precision value: 94.23076923076923 - type: cos_sim_recall value: 93.10000000000001 - type: dot_accuracy value: 99.87524752475248 - type: dot_accuracy_threshold value: 74.86587762832642 - type: dot_ap value: 97.02222688043362 - type: dot_f1 value: 93.66197183098592 - type: dot_f1_threshold value: 74.74223375320435 - type: dot_precision value: 94.23076923076923 - type: dot_recall value: 93.10000000000001 - type: euclidean_accuracy value: 99.87524752475248 - type: euclidean_accuracy_threshold value: 70.9000825881958 - type: euclidean_ap value: 97.02222446606329 - type: euclidean_f1 value: 93.66197183098592 - type: euclidean_f1_threshold value: 71.07426524162292 - type: euclidean_precision value: 94.23076923076923 - type: euclidean_recall value: 93.10000000000001 - type: manhattan_accuracy value: 99.87623762376238 - type: manhattan_accuracy_threshold value: 3588.5040283203125 - type: manhattan_ap value: 97.09194643777883 - type: manhattan_f1 value: 93.7375745526839 - type: manhattan_f1_threshold value: 3664.3760681152344 - type: manhattan_precision value: 93.18181818181817 - type: manhattan_recall value: 94.3 - type: max_accuracy value: 99.87623762376238 - type: max_ap value: 97.09194643777883 - type: max_f1 value: 93.7375745526839 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 82.10134099988541 - type: v_measure value: 82.10134099988541 - type: v_measure_std value: 2.7926349897769533 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 48.357450742397404 - type: v_measure value: 48.357450742397404 - type: v_measure_std value: 1.520118876440547 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.79277200802986 - type: mrr value: 56.742517082590616 - type: main_score value: 55.79277200802986 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_spearman value: 30.701215774712693 - type: cosine_pearson value: 31.26740037278488 - type: dot_spearman value: 30.701215774712693 - type: dot_pearson value: 31.267404144879997 - type: main_score value: 30.701215774712693 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.23800000000000002 - type: map_at_10 value: 2.31 - type: map_at_100 value: 15.495000000000001 - type: map_at_1000 value: 38.829 - type: map_at_3 value: 0.72 - type: map_at_5 value: 1.185 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 91.0 - type: ndcg_at_10 value: 88.442 - type: ndcg_at_100 value: 71.39 - type: ndcg_at_1000 value: 64.153 - type: ndcg_at_3 value: 89.877 - type: ndcg_at_5 value: 89.562 - type: precision_at_1 value: 92.0 - type: precision_at_10 value: 92.60000000000001 - type: precision_at_100 value: 73.74000000000001 - type: precision_at_1000 value: 28.222 - type: precision_at_3 value: 94.0 - type: precision_at_5 value: 93.60000000000001 - type: recall_at_1 value: 0.23800000000000002 - type: recall_at_10 value: 2.428 - type: recall_at_100 value: 18.099999999999998 - type: recall_at_1000 value: 60.79599999999999 - type: recall_at_3 value: 0.749 - type: recall_at_5 value: 1.238 - type: main_score value: 88.442 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.4939999999999998 - type: map_at_10 value: 12.531999999999998 - type: map_at_100 value: 19.147 - type: map_at_1000 value: 20.861 - type: map_at_3 value: 7.558 - type: map_at_5 value: 9.49 - type: mrr_at_1 value: 0.0 - type: mrr_at_10 value: 0.0 - type: mrr_at_100 value: 0.0 - type: mrr_at_1000 value: 0.0 - type: mrr_at_3 value: 0.0 - type: mrr_at_5 value: 0.0 - type: ndcg_at_1 value: 47.959 - type: ndcg_at_10 value: 31.781 - type: ndcg_at_100 value: 42.131 - type: ndcg_at_1000 value: 53.493 - type: ndcg_at_3 value: 39.204 - type: ndcg_at_5 value: 34.635 - type: precision_at_1 value: 48.980000000000004 - type: precision_at_10 value: 27.143 - type: precision_at_100 value: 8.224 - type: precision_at_1000 value: 1.584 - type: precision_at_3 value: 38.775999999999996 - type: precision_at_5 value: 33.061 - type: recall_at_1 value: 3.4939999999999998 - type: recall_at_10 value: 18.895 - type: recall_at_100 value: 50.192 - type: recall_at_1000 value: 85.167 - type: recall_at_3 value: 8.703 - type: recall_at_5 value: 11.824 - type: main_score value: 31.781 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 92.7402 - type: accuracy_stderr value: 1.020764595781027 - type: ap value: 44.38594756333084 - type: ap_stderr value: 1.817150701258273 - type: f1 value: 79.95699280019547 - type: f1_stderr value: 1.334582498702029 - type: main_score value: 92.7402 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 80.86870401810978 - type: accuracy_stderr value: 0.22688467782004712 - type: f1 value: 81.1829040745744 - type: f1_stderr value: 0.19774920574849694 - type: main_score value: 80.86870401810978 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 64.82048869927482 - type: v_measure value: 64.82048869927482 - type: v_measure_std value: 0.9170394252450564 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 88.44251057996067 - type: cos_sim_accuracy_threshold value: 70.2150285243988 - type: cos_sim_ap value: 81.11422351199913 - type: cos_sim_f1 value: 73.71062868615887 - type: cos_sim_f1_threshold value: 66.507488489151 - type: cos_sim_precision value: 70.2799712849964 - type: cos_sim_recall value: 77.4934036939314 - type: dot_accuracy value: 88.44251057996067 - type: dot_accuracy_threshold value: 70.2150285243988 - type: dot_ap value: 81.11420529068658 - type: dot_f1 value: 73.71062868615887 - type: dot_f1_threshold value: 66.50749444961548 - type: dot_precision value: 70.2799712849964 - type: dot_recall value: 77.4934036939314 - type: euclidean_accuracy value: 88.44251057996067 - type: euclidean_accuracy_threshold value: 77.18156576156616 - type: euclidean_ap value: 81.11422421732487 - type: euclidean_f1 value: 73.71062868615887 - type: euclidean_f1_threshold value: 81.84436559677124 - type: euclidean_precision value: 70.2799712849964 - type: euclidean_recall value: 77.4934036939314 - type: manhattan_accuracy value: 88.26369434344639 - type: manhattan_accuracy_threshold value: 3837.067413330078 - type: manhattan_ap value: 80.81442360477725 - type: manhattan_f1 value: 73.39883099117024 - type: manhattan_f1_threshold value: 4098.833847045898 - type: manhattan_precision value: 69.41896024464832 - type: manhattan_recall value: 77.86279683377309 - type: max_accuracy value: 88.44251057996067 - type: max_ap value: 81.11422421732487 - type: max_f1 value: 73.71062868615887 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 90.03182365040556 - type: cos_sim_accuracy_threshold value: 64.46443796157837 - type: cos_sim_ap value: 87.86649113691112 - type: cos_sim_f1 value: 80.45644844577821 - type: cos_sim_f1_threshold value: 61.40774488449097 - type: cos_sim_precision value: 77.54052702992216 - type: cos_sim_recall value: 83.60024638127503 - type: dot_accuracy value: 90.03182365040556 - type: dot_accuracy_threshold value: 64.46444988250732 - type: dot_ap value: 87.86649011954319 - type: dot_f1 value: 80.45644844577821 - type: dot_f1_threshold value: 61.407750844955444 - type: dot_precision value: 77.54052702992216 - type: dot_recall value: 83.60024638127503 - type: euclidean_accuracy value: 90.03182365040556 - type: euclidean_accuracy_threshold value: 84.30368900299072 - type: euclidean_ap value: 87.86649114275045 - type: euclidean_f1 value: 80.45644844577821 - type: euclidean_f1_threshold value: 87.8547191619873 - type: euclidean_precision value: 77.54052702992216 - type: euclidean_recall value: 83.60024638127503 - type: manhattan_accuracy value: 89.99883572010712 - type: manhattan_accuracy_threshold value: 4206.838607788086 - type: manhattan_ap value: 87.8600826607838 - type: manhattan_f1 value: 80.44054508120217 - type: manhattan_f1_threshold value: 4372.755432128906 - type: manhattan_precision value: 78.08219178082192 - type: manhattan_recall value: 82.94579611949491 - type: max_accuracy value: 90.03182365040556 - type: max_ap value: 87.86649114275045 - type: max_f1 value: 80.45644844577821 --- ## Introduction We present NV-Embed-v2, a generalist embedding model that ranks No. 1 on the Massive Text Embedding Benchmark ([MTEB benchmark](https://huggingface.co/spaces/mteb/leaderboard))(as of Aug 30, 2024) with a score of 72.31 across 56 text embedding tasks. It also holds the No. 1 in the retrieval sub-category (a score of 62.65 across 15 tasks) in the leaderboard, which is essential to the development of RAG technology. NV-Embed-v2 presents several new designs, including having the LLM attend to latent vectors for better pooled embedding output, and demonstrating a two-staged instruction tuning method to enhance the accuracy of both retrieval and non-retrieval tasks. Additionally, NV-Embed-v2 incorporates a novel hard-negative mining methods that take into account the positive relevance score for better false negatives removal. For more technical details, refer to our paper: [NV-Embed: Improved Techniques for Training LLMs as Generalist Embedding Models](https://arxiv.org/pdf/2405.17428). ## Model Details - Base Decoder-only LLM: [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) - Pooling Type: Latent-Attention - Embedding Dimension: 4096 ## How to use Here is an example of how to encode queries and passages using Huggingface-transformer and Sentence-transformer. Please find the required package version [here](https://huggingface.co/nvidia/NV-Embed-v2#2-required-packages). ### Usage (HuggingFace Transformers) ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel # Each query needs to be accompanied by an corresponding instruction describing the task. task_name_to_instruct = {"example": "Given a question, retrieve passages that answer the question",} query_prefix = "Instruct: "+task_name_to_instruct["example"]+"\nQuery: " queries = [ 'are judo throws allowed in wrestling?', 'how to become a radiology technician in michigan?' ] # No instruction needed for retrieval passages passage_prefix = "" passages = [ "Since you're reading this, you are probably someone from a judo background or someone who is just wondering how judo techniques can be applied under wrestling rules. So without further ado, let's get to the question. Are Judo throws allowed in wrestling? Yes, judo throws are allowed in freestyle and folkstyle wrestling. You only need to be careful to follow the slam rules when executing judo throws. In wrestling, a slam is lifting and returning an opponent to the mat with unnecessary force.", "Below are the basic steps to becoming a radiologic technologist in Michigan:Earn a high school diploma. As with most careers in health care, a high school education is the first step to finding entry-level employment. Taking classes in math and science, such as anatomy, biology, chemistry, physiology, and physics, can help prepare students for their college studies and future careers.Earn an associate degree. Entry-level radiologic positions typically require at least an Associate of Applied Science. Before enrolling in one of these degree programs, students should make sure it has been properly accredited by the Joint Review Committee on Education in Radiologic Technology (JRCERT).Get licensed or certified in the state of Michigan." ] # load model with tokenizer model = AutoModel.from_pretrained('nvidia/NV-Embed-v2', trust_remote_code=True) # get the embeddings max_length = 32768 query_embeddings = model.encode(queries, instruction=query_prefix, max_length=max_length) passage_embeddings = model.encode(passages, instruction=passage_prefix, max_length=max_length) # normalize embeddings query_embeddings = F.normalize(query_embeddings, p=2, dim=1) passage_embeddings = F.normalize(passage_embeddings, p=2, dim=1) # get the embeddings with DataLoader (spliting the datasets into multiple mini-batches) # batch_size=2 # query_embeddings = model._do_encode(queries, batch_size=batch_size, instruction=query_prefix, max_length=max_length, num_workers=32, return_numpy=True) # passage_embeddings = model._do_encode(passages, batch_size=batch_size, instruction=passage_prefix, max_length=max_length, num_workers=32, return_numpy=True) scores = (query_embeddings @ passage_embeddings.T) * 100 print(scores.tolist()) # [[87.42693328857422, 0.46283677220344543], [0.965264618396759, 86.03721618652344]] ``` ### Usage (Sentence-Transformers) ```python import torch from sentence_transformers import SentenceTransformer # Each query needs to be accompanied by an corresponding instruction describing the task. task_name_to_instruct = {"example": "Given a question, retrieve passages that answer the question",} query_prefix = "Instruct: "+task_name_to_instruct["example"]+"\nQuery: " queries = [ 'are judo throws allowed in wrestling?', 'how to become a radiology technician in michigan?' ] # No instruction needed for retrieval passages passages = [ "Since you're reading this, you are probably someone from a judo background or someone who is just wondering how judo techniques can be applied under wrestling rules. So without further ado, let's get to the question. Are Judo throws allowed in wrestling? Yes, judo throws are allowed in freestyle and folkstyle wrestling. You only need to be careful to follow the slam rules when executing judo throws. In wrestling, a slam is lifting and returning an opponent to the mat with unnecessary force.", "Below are the basic steps to becoming a radiologic technologist in Michigan:Earn a high school diploma. As with most careers in health care, a high school education is the first step to finding entry-level employment. Taking classes in math and science, such as anatomy, biology, chemistry, physiology, and physics, can help prepare students for their college studies and future careers.Earn an associate degree. Entry-level radiologic positions typically require at least an Associate of Applied Science. Before enrolling in one of these degree programs, students should make sure it has been properly accredited by the Joint Review Committee on Education in Radiologic Technology (JRCERT).Get licensed or certified in the state of Michigan." ] # load model with tokenizer model = SentenceTransformer('nvidia/NV-Embed-v2', trust_remote_code=True) model.max_seq_length = 32768 model.tokenizer.padding_side="right" def add_eos(input_examples): input_examples = [input_example + model.tokenizer.eos_token for input_example in input_examples] return input_examples # get the embeddings batch_size = 2 query_embeddings = model.encode(add_eos(queries), batch_size=batch_size, prompt=query_prefix, normalize_embeddings=True) passage_embeddings = model.encode(add_eos(passages), batch_size=batch_size, normalize_embeddings=True) scores = (query_embeddings @ passage_embeddings.T) * 100 print(scores.tolist()) ``` ## License This model should not be used for any commercial purpose. Refer the [license](https://spdx.org/licenses/CC-BY-NC-4.0) for the detailed terms. For commercial purpose, we recommend you to use the models of [NeMo Retriever Microservices (NIMs)](https://build.nvidia.com/explore/retrieval). ## Correspondence to Chankyu Lee ([email protected]), Rajarshi Roy ([email protected]), Wei Ping ([email protected]) ## Citation If you find this code useful in your research, please consider citing: ```bibtex @article{lee2024nv, title={NV-Embed: Improved Techniques for Training LLMs as Generalist Embedding Models}, author={Lee, Chankyu and Roy, Rajarshi and Xu, Mengyao and Raiman, Jonathan and Shoeybi, Mohammad and Catanzaro, Bryan and Ping, Wei}, journal={arXiv preprint arXiv:2405.17428}, year={2024} } ``` ```bibtex @article{moreira2024nv, title={NV-Retriever: Improving text embedding models with effective hard-negative mining}, author={Moreira, Gabriel de Souza P and Osmulski, Radek and Xu, Mengyao and Ak, Ronay and Schifferer, Benedikt and Oldridge, Even}, journal={arXiv preprint arXiv:2407.15831}, year={2024} } ``` ## Troubleshooting #### 1. Instruction template for MTEB benchmarks For MTEB sub-tasks for retrieval, STS, summarization, please use the instruction prefix template in [instructions.json](https://huggingface.co/nvidia/NV-Embed-v2/blob/main/instructions.json). For classification, clustering and reranking, please use the instructions provided in Table. 7 in [NV-Embed paper](https://arxiv.org/pdf/2405.17428). #### 2. Required Packages If you have trouble, try installing the python packages as below ```python pip uninstall -y transformer-engine pip install torch==2.2.0 pip install transformers==4.42.4 pip install flash-attn==2.2.0 pip install sentence-transformers==2.7.0 ``` #### 3. How to enable Multi-GPU (Note, this is the case for HuggingFace Transformers) ```python from transformers import AutoModel from torch.nn import DataParallel embedding_model = AutoModel.from_pretrained("nvidia/NV-Embed-v2") for module_key, module in embedding_model._modules.items(): embedding_model._modules[module_key] = DataParallel(module) ``` #### 4. Fixing "nvidia/NV-Embed-v2 is not the path to a directory containing a file named config.json" Switch to your local model path,and open config.json and change the value of **"_name_or_path"** and replace it with your local model path. #### 5. Access to model nvidia/NV-Embed-v2 is restricted. You must be authenticated to access it Use your huggingface access [token](https://huggingface.co/settings/tokens) to execute *"huggingface-cli login"*. #### 6. How to resolve slight mismatch in Sentence transformer results. A slight mismatch in the Sentence Transformer implementation is caused by a discrepancy in the calculation of the instruction prefix length within the Sentence Transformer package. To fix this issue, you need to build the Sentence Transformer package from source, making the necessary modification in this [line](https://github.com/UKPLab/sentence-transformers/blob/v2.7-release/sentence_transformers/SentenceTransformer.py#L353) as below. ```python git clone https://github.com/UKPLab/sentence-transformers.git cd sentence-transformers git checkout v2.7-release # Modify L353 in SentenceTransformer.py to **'extra_features["prompt_length"] = tokenized_prompt["input_ids"].shape[-1]'**. pip install -e . ```
[ "BIOSSES", "SCIFACT" ]
microsoft/Phi-4-mini-instruct
microsoft
text-generation
[ "transformers", "safetensors", "phi3", "text-generation", "nlp", "code", "conversational", "custom_code", "multilingual", "ar", "zh", "cs", "da", "nl", "en", "fi", "fr", "de", "he", "hu", "it", "ja", "ko", "no", "pl", "pt", "ru", "es", "sv", "th", "tr", "uk", "arxiv:2503.01743", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2025-02-19T01:00:58Z"
2025-03-10T22:22:22+00:00
228,671
368
--- language: - multilingual - ar - zh - cs - da - nl - en - fi - fr - de - he - hu - it - ja - ko - 'no' - pl - pt - ru - es - sv - th - tr - uk library_name: transformers license: mit license_link: https://huggingface.co/microsoft/Phi-4-mini-instruct/resolve/main/LICENSE pipeline_tag: text-generation tags: - nlp - code widget: - messages: - role: user content: Can you provide ways to eat combinations of bananas and dragonfruits? --- ## Model Summary Phi-4-mini-instruct is a lightweight open model built upon synthetic data and filtered publicly available websites - with a focus on high-quality, reasoning dense data. The model belongs to the Phi-4 model family and supports 128K token context length. The model underwent an enhancement process, incorporating both supervised fine-tuning and direct preference optimization to support precise instruction adherence and robust safety measures. 📰 [Phi-4-mini Microsoft Blog](https://aka.ms/phi4-feb2025) <br> 📖 [Phi-4-mini Technical Report](https://aka.ms/phi-4-multimodal/techreport) <br> 👩‍🍳 [Phi Cookbook](https://github.com/microsoft/PhiCookBook) <br> 🏡 [Phi Portal](https://azure.microsoft.com/en-us/products/phi) <br> 🖥️ Try It [Azure](https://aka.ms/phi-4-mini/azure), [Huggingface](https://huggingface.co/spaces/microsoft/phi-4-mini) <br> 🚀 [Model paper](https://huggingface.co/papers/2503.01743) 🎉**Phi-4**: [[multimodal-instruct](https://huggingface.co/microsoft/Phi-4-multimodal-instruct) | [onnx](https://huggingface.co/microsoft/Phi-4-multimodal-instruct-onnx)]; [[mini-instruct](https://huggingface.co/microsoft/Phi-4-mini-instruct) | [onnx](https://huggingface.co/microsoft/Phi-4-mini-instruct-onnx)] ## Intended Uses ### Primary Use Cases The model is intended for broad multilingual commercial and research use. The model provides uses for general purpose AI systems and applications which require: 1) Memory/compute constrained environments 2) Latency bound scenarios 3) Strong reasoning (especially math and logic). The model is designed to accelerate research on language and multimodal models, for use as a building block for generative AI powered features. ### Use Case Considerations The model is not specifically designed or evaluated for all downstream purposes. Developers should consider common limitations of language models, as well as performance difference across languages, as they select use cases, and evaluate and mitigate for accuracy, safety, and fairness before using within a specific downstream use case, particularly for high-risk scenarios. Developers should be aware of and adhere to applicable laws or regulations (including but not limited to privacy, trade compliance laws, etc.) that are relevant to their use case. ***Nothing contained in this Model Card should be interpreted as or deemed a restriction or modification to the license the model is released under.*** ## Release Notes This release of Phi-4-mini-instruct is based on valuable user feedback from the Phi-3 series. The Phi-4-mini model employed new architecture for efficiency, larger vocabulary for multilingual support, and better post-training techniques were used for instruction following, function calling, as well as additional data leading to substantial gains on key capabilities. It is anticipated that most use cases will benefit from this release, but users are encouraged to test in their particular AI applications. The enthusiastic support for the Phi-4 series is greatly appreciated. Feedback on Phi-4-mini-instruct is welcomed and crucial to the model’s evolution and improvement. ### Model Quality To understand the capabilities, the 3.8B parameters Phi-4-mini-instruct model was compared with a set of models over a variety of benchmarks using an internal benchmark platform (See Appendix A for benchmark methodology). A high-level overview of the model quality is as follows: | Benchmark | Similar size | | | | |2x size | | | | | | |----------------------------------|-------------|-------------------|-------------------|-------------------|-----------------|-----------------|-----------------|-----------------|-----------------|-----------------|-----------------| | | Phi-4 mini-Ins | Phi-3.5-mini-Ins | Llama-3.2-3B-Ins | Mistral-3B | Qwen2.5-3B-Ins | Qwen2.5-7B-Ins | Mistral-8B-2410 | Llama-3.1-8B-Ins | Llama-3.1-Tulu-3-8B | Gemma2-9B-Ins | GPT-4o-mini-2024-07-18 | | **Popular aggregated benchmark** | | | | | | | | | | | | | Arena Hard | 32.8 | 34.4 | 17.0 | 26.9 | 32.0 | 55.5 | 37.3 | 25.7 | 42.7 | 43.7 | 53.7 | | BigBench Hard (0-shot, CoT) | 70.4 | 63.1 | 55.4 | 51.2 | 56.2 | 72.4 | 53.3 | 63.4 | 55.5 | 65.7 | 80.4 | | MMLU (5-shot) | 67.3 | 65.5 | 61.8 | 60.8 | 65.0 | 72.6 | 63.0 | 68.1 | 65.0 | 71.3 | 77.2 | | MMLU-Pro (0-shot, CoT) | 52.8 | 47.4 | 39.2 | 35.3 | 44.7 | 56.2 | 36.6 | 44.0 | 40.9 | 50.1 | 62.8 | | **Reasoning** | | | | | | | | | | | | | ARC Challenge (10-shot) | 83.7 | 84.6 | 76.1 | 80.3 | 82.6 | 90.1 | 82.7 | 83.1 | 79.4 | 89.8 | 93.5 | | BoolQ (2-shot) | 81.2 | 77.7 | 71.4 | 79.4 | 65.4 | 80.0 | 80.5 | 82.8 | 79.3 | 85.7 | 88.7 | | GPQA (0-shot, CoT) | 25.2 | 26.6 | 24.3 | 24.4 | 23.4 | 30.6 | 26.3 | 26.3 | 29.9 | 39.1 | 41.1 | | HellaSwag (5-shot) | 69.1 | 72.2 | 77.2 | 74.6 | 74.6 | 80.0 | 73.5 | 72.8 | 80.9 | 87.1 | 88.7 | | OpenBookQA (10-shot) | 79.2 | 81.2 | 72.6 | 79.8 | 79.3 | 82.6 | 80.2 | 84.8 | 79.8 | 90.0 | 90.0 | | PIQA (5-shot) | 77.6 | 78.2 | 68.2 | 73.2 | 72.6 | 76.2 | 81.2 | 83.2 | 78.3 | 83.7 | 88.7 | | Social IQA (5-shot) | 72.5 | 75.1 | 68.3 | 73.9 | 75.3 | 75.3 | 77.6 | 71.8 | 73.4 | 74.7 | 82.9 | | TruthfulQA (MC2) (10-shot) | 66.4 | 65.2 | 59.2 | 62.9 | 64.3 | 69.4 | 63.0 | 69.2 | 64.1 | 76.6 | 78.2 | | Winogrande (5-shot) | 67.0 | 72.2 | 53.2 | 59.8 | 63.3 | 71.1 | 63.1 | 64.7 | 65.4 | 74.0 | 76.9 | | **Multilingual** | | | | | | | | | | | | | Multilingual MMLU (5-shot) | 49.3 | 51.8 | 48.1 | 46.4 | 55.9 | 64.4 | 53.7 | 56.2 | 54.5 | 63.8 | 72.9 | | MGSM (0-shot, CoT) | 63.9 | 49.6 | 44.6 | 44.6 | 53.5 | 64.5 | 56.7 | 56.7 | 58.6 | 75.1 | 81.7 | | **Math** | | | | | | | | | | | | | GSM8K (8-shot, CoT) | 88.6 | 76.9 | 75.6 | 80.1 | 80.6 | 88.7 | 81.9 | 82.4 | 84.3 | 84.9 | 91.3 | | MATH (0-shot, CoT) | 64.0 | 49.8 | 46.7 | 41.8 | 61.7 | 60.4 | 41.6 | 47.6 | 46.1 | 51.3 | 70.2 | | **Overall** | **63.5** | **60.5** | **56.2** | **56.9** | **60.1** | **67.9** | **60.2** | **62.3** | **60.9** | **65.0** | **75.5** | Overall, the model with only 3.8B-param achieves a similar level of multilingual language understanding and reasoning ability as much larger models. However, it is still fundamentally limited by its size for certain tasks. The model simply does not have the capacity to store too much factual knowledge, therefore, users may experience factual incorrectness. However, it may be possible to resolve such weakness by augmenting Phi-4 with a search engine, particularly when using the model under RAG settings. ## Usage ### Tokenizer Phi-4-mini-instruct supports a vocabulary size of up to `200064` tokens. The [tokenizer files](https://huggingface.co/microsoft/Phi-4-mini-instruct/blob/main/added_tokens.json) already provide placeholder tokens that can be used for downstream fine-tuning, but they can also be extended up to the model's vocabulary size. ### Input Formats Given the nature of the training data, the Phi-4-mini-instruct model is best suited for prompts using specific formats. Below are the two primary formats: #### Chat format This format is used for general conversation and instructions: ```yaml <|system|>Insert System Message<|end|><|user|>Insert User Message<|end|><|assistant|> ``` #### Tool-enabled function-calling format This format is used when the user wants the model to provide function calls based on the given tools. The user should provide the available tools in the system prompt, wrapped by <|tool|> and <|/tool|> tokens. The tools should be specified in JSON format, using a JSON dump structure. Example: ` <|system|>You are a helpful assistant with some tools.<|tool|>[{"name": "get_weather_updates", "description": "Fetches weather updates for a given city using the RapidAPI Weather API.", "parameters": {"city": {"description": "The name of the city for which to retrieve weather information.", "type": "str", "default": "London"}}}]<|/tool|><|end|><|user|>What is the weather like in Paris today?<|end|><|assistant|> ` ### Inference with vLLM #### Requirements List of required packages: ``` flash_attn==2.7.4.post1 torch==2.5.1 vllm>=0.7.3 ``` #### Example To perform inference using vLLM, you can use the following code snippet: ```python from vllm import LLM, SamplingParams llm = LLM(model="microsoft/Phi-4-mini-instruct", trust_remote_code=True) messages = [ {"role": "system", "content": "You are a helpful AI assistant."}, {"role": "user", "content": "Can you provide ways to eat combinations of bananas and dragonfruits?"}, {"role": "assistant", "content": "Sure! Here are some ways to eat bananas and dragonfruits together: 1. Banana and dragonfruit smoothie: Blend bananas and dragonfruits together with some milk and honey. 2. Banana and dragonfruit salad: Mix sliced bananas and dragonfruits together with some lemon juice and honey."}, {"role": "user", "content": "What about solving an 2x + 3 = 7 equation?"}, ] sampling_params = SamplingParams( max_tokens=500, temperature=0.0, ) output = llm.chat(messages=messages, sampling_params=sampling_params) print(output[0].outputs[0].text) ``` ### Inference with Transformers #### Requirements Phi-4 family has been integrated in the `4.49.0` version of `transformers`. The current `transformers` version can be verified with: `pip list | grep transformers`. Python 3.8 and 3.10 will work best. List of required packages: ``` flash_attn==2.7.4.post1 torch==2.5.1 transformers==4.49.0 accelerate==1.3.0 ``` Phi-4-mini-instruct is also available in [Azure AI Studio]() #### Example After obtaining the Phi-4-mini-instruct model checkpoints, users can use this sample code for inference. ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline torch.random.manual_seed(0) model_path = "microsoft/Phi-4-mini-instruct" model = AutoModelForCausalLM.from_pretrained( model_path, device_map="auto", torch_dtype="auto", trust_remote_code=True, ) tokenizer = AutoTokenizer.from_pretrained(model_path) messages = [ {"role": "system", "content": "You are a helpful AI assistant."}, {"role": "user", "content": "Can you provide ways to eat combinations of bananas and dragonfruits?"}, {"role": "assistant", "content": "Sure! Here are some ways to eat bananas and dragonfruits together: 1. Banana and dragonfruit smoothie: Blend bananas and dragonfruits together with some milk and honey. 2. Banana and dragonfruit salad: Mix sliced bananas and dragonfruits together with some lemon juice and honey."}, {"role": "user", "content": "What about solving an 2x + 3 = 7 equation?"}, ] pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, ) generation_args = { "max_new_tokens": 500, "return_full_text": False, "temperature": 0.0, "do_sample": False, } output = pipe(messages, **generation_args) print(output[0]['generated_text']) ``` ## Responsible AI Considerations Like other language models, the Phi family of models can potentially behave in ways that are unfair, unreliable, or offensive. Some of the limiting behaviors to be aware of include: + Quality of Service: The Phi models are trained primarily on English text and some additional multilingual text. Languages other than English will experience worse performance as well as performance disparities across non-English. English language varieties with less representation in the training data might experience worse performance than standard American English. + Multilingual performance and safety gaps: We believe it is important to make language models more widely available across different languages, but the Phi 4 models still exhibit challenges common across multilingual releases. As with any deployment of LLMs, developers will be better positioned to test for performance or safety gaps for their linguistic and cultural context and customize the model with additional fine-tuning and appropriate safeguards. + Representation of Harms & Perpetuation of Stereotypes: These models can over- or under-represent groups of people, erase representation of some groups, or reinforce demeaning or negative stereotypes. Despite safety post-training, these limitations may still be present due to differing levels of representation of different groups, cultural contexts, or prevalence of examples of negative stereotypes in training data that reflect real-world patterns and societal biases. + Inappropriate or Offensive Content: These models may produce other types of inappropriate or offensive content, which may make it inappropriate to deploy for sensitive contexts without additional mitigations that are specific to the case. + Information Reliability: Language models can generate nonsensical content or fabricate content that might sound reasonable but is inaccurate or outdated. + Limited Scope for Code: The majority of Phi 4 training data is based in Python and uses common packages such as "typing, math, random, collections, datetime, itertools". If the model generates Python scripts that utilize other packages or scripts in other languages, it is strongly recommended that users manually verify all API uses. + Long Conversation: Phi 4 models, like other models, can in some cases generate responses that are repetitive, unhelpful, or inconsistent in very long chat sessions in both English and non-English languages. Developers are encouraged to place appropriate mitigations, like limiting conversation turns to account for the possible conversational drift. Developers should apply responsible AI best practices, including mapping, measuring, and mitigating risks associated with their specific use case and cultural, linguistic context. Phi 4 family of models are general purpose models. As developers plan to deploy these models for specific use cases, they are encouraged to fine-tune the models for their use case and leverage the models as part of broader AI systems with language-specific safeguards in place. Important areas for consideration include: + Allocation: Models may not be suitable for scenarios that could have consequential impact on legal status or the allocation of resources or life opportunities (ex: housing, employment, credit, etc.) without further assessments and additional debiasing techniques. + High-Risk Scenarios: Developers should assess the suitability of using models in high-risk scenarios where unfair, unreliable or offensive outputs might be extremely costly or lead to harm. This includes providing advice in sensitive or expert domains where accuracy and reliability are critical (ex: legal or health advice). Additional safeguards should be implemented at the application level according to the deployment context. + Misinformation: Models may produce inaccurate information. Developers should follow transparency best practices and inform end-users they are interacting with an AI system. At the application level, developers can build feedback mechanisms and pipelines to ground responses in use-case specific, contextual information, a technique known as Retrieval Augmented Generation (RAG). + Generation of Harmful Content: Developers should assess outputs for their context and use available safety classifiers or custom solutions appropriate for their use case. + Misuse: Other forms of misuse such as fraud, spam, or malware production may be possible, and developers should ensure that their applications do not violate applicable laws and regulations. ## Training ### Model + **Architecture:** Phi-4-mini-instruct has 3.8B parameters and is a dense decoder-only Transformer model. When compared with Phi-3.5-mini, the major changes with Phi-4-mini-instruct are 200K vocabulary, grouped-query attention, and shared input and output embedding.<br> + **Inputs:** Text. It is best suited for prompts using the chat format.<br> + **Context length:** 128K tokens<br> + **GPUs:** 512 A100-80G<br> + **Training time:** 21 days<br> + **Training data:** 5T tokens<br> + **Outputs:** Generated text in response to the input<br> + **Dates:** Trained between November and December 2024<br> + **Status:** This is a static model trained on offline datasets with the cutoff date of June 2024 for publicly available data.<br> + **Supported languages:** Arabic, Chinese, Czech, Danish, Dutch, English, Finnish, French, German, Hebrew, Hungarian, Italian, Japanese, Korean, Norwegian, Polish, Portuguese, Russian, Spanish, Swedish, Thai, Turkish, Ukrainian<br> + **Release date:** February 2025<br> ### Training Datasets Phi-4-mini’s training data includes a wide variety of sources, totaling 5 trillion tokens, and is a combination of 1) publicly available documents filtered for quality, selected high-quality educational data, and code 2) newly created synthetic, “textbook-like” data for the purpose of teaching math, coding, common sense reasoning, general knowledge of the world (e.g., science, daily activities, theory of mind, etc.) 3) high quality chat format supervised data covering various topics to reflect human preferences on different aspects such as instruct-following, truthfulness, honesty and helpfulness. Focus was placed on the quality of data that could potentially improve the reasoning ability for the model, and the publicly available documents were filtered to contain a preferred level of knowledge. As an example, the result of a game in premier league on a particular day might be good training data for frontier models, but such information was removed to leave more model capacity for reasoning for the model’s small size. More details about data can be found in the Phi-4-mini-instruct technical report. The decontamination process involved normalizing and tokenizing the dataset, then generating and comparing n-grams between the target dataset and benchmark datasets. Samples with matching n-grams above a threshold were flagged as contaminated and removed from the dataset. A detailed contamination report was generated, summarizing the matched text, matching ratio, and filtered results for further analysis. ### Fine-tuning A basic example of multi-GPUs supervised fine-tuning (SFT) with TRL and Accelerate modules is provided [here](https://huggingface.co/microsoft/Phi-4-mini-instruct/resolve/main/sample_finetune.py). ## Safety Evaluation and Red-Teaming Various evaluation techniques including red teaming, adversarial conversation simulations, and multilingual safety evaluation benchmark datasets were leveraged to evaluate Phi-4 models’ propensity to produce undesirable outputs across multiple languages and risk categories. Several approaches were used to compensate for the limitations of one approach alone. Findings across the various evaluation methods indicate that safety post-training that was done as detailed in the Phi 3 Safety Post-Training paper had a positive impact across multiple languages and risk categories as observed by refusal rates (refusal to output undesirable outputs) and robustness to jailbreak techniques. Details on prior red team evaluations across Phi models can be found in the Phi 3 Safety Post-Training paper. For this release, the red team tested the model in English, Chinese, Japanese, Spanish, Portuguese, Arabic, Thai, and Russian for the following potential harms: Hate Speech and Bias, Violent Crimes, Specialized Advice, and Election Information. Their findings indicate that the model is resistant to jailbreak techniques across languages, but that language-specific attack prompts leveraging cultural context can cause the model to output harmful content. Another insight was that with function calling scenarios, the model could sometimes hallucinate function names or URL’s. The model may also be more susceptible to longer multi-turn jailbreak techniques across both English and non-English languages. These findings highlight the need for industry-wide investment in the development of high-quality safety evaluation datasets across multiple languages, including low resource languages, and risk areas that account for cultural nuances where those languages are spoken. ## Software * [PyTorch](https://github.com/pytorch/pytorch) * [Transformers](https://github.com/huggingface/transformers) * [Flash-Attention](https://github.com/HazyResearch/flash-attention) ## Hardware Note that by default, the Phi-4-mini-instruct model uses flash attention, which requires certain types of GPU hardware to run. We have tested on the following GPU types: * NVIDIA A100 * NVIDIA A6000 * NVIDIA H100 If you want to run the model on: * NVIDIA V100 or earlier generation GPUs: call AutoModelForCausalLM.from_pretrained() with attn_implementation="eager" ## License The model is licensed under the [MIT license](./LICENSE). ## Trademarks This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow [Microsoft’s Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party’s policies. ## Appendix A: Benchmark Methodology We include a brief word on methodology here - and in particular, how we think about optimizing prompts. In an ideal world, we would never change any prompts in our benchmarks to ensure it is always an apples-to-apples comparison when comparing different models. Indeed, this is our default approach, and is the case in the vast majority of models we have run to date. There are, however, some exceptions to this. In some cases, we see a model that performs worse than expected on a given eval due to a failure to respect the output format. For example: + A model may refuse to answer questions (for no apparent reason), or in coding tasks models may prefix their response with “Sure, I can help with that. …” which may break the parser. In such cases, we have opted to try different system messages (e.g. “You must always respond to a question” or “Get to the point!”). + With some models, we observed that few shots actually hurt model performance. In this case we did allow running the benchmarks with 0-shots for all cases. + We have tools to convert between chat and completions APIs. When converting a chat prompt to a completion prompt, some models have different keywords e.g. Human vs User. In these cases, we do allow for model-specific mappings for chat to completion prompts. However, we do not: + Pick different few-shot examples. Few shots will always be the same when comparing different models. + Change prompt format: e.g. if it is an A/B/C/D multiple choice, we do not tweak this to 1/2/3/4 multiple choice. ### Benchmark datasets The model was evaluated across a breadth of public and internal benchmarks to understand the model’s capabilities under multiple tasks and conditions. While most evaluations use English, the leading multilingual benchmark was incorporated that covers performance in select languages. More specifically, + Reasoning: + Winogrande: commonsense reasoning around pronoun resolution + PIQA: physical commonsense reasoning around everyday situations + ARC-challenge: grade-school multiple choice science questions + GPQA: very hard questions written and validated by experts in biology, physics, and chemistry + MedQA: medical questions answering + Social IQA: social commonsense intelligence + BoolQ: natural questions from context + TruthfulQA: grounded reasoning + Language understanding: + HellaSwag: commonsense natural language inference around everyday events + ANLI: adversarial natural language inference + Function calling: + Berkeley function calling function and tool call + Internal function calling benchmarks + World knowledge: + TriviaQA: trivia question on general topics + Math: + GSM8K: grade-school math word problems + GSM8K Hard: grade-school math word problems with large values and some absurdity. + MATH: challenging competition math problems + Code: + HumanEval HumanEval+, MBPP, MBPP+: python coding tasks + LiveCodeBenh, LiveBench: contamination-free code tasks + BigCode Bench: challenging programming tasks + Spider: SQL query tasks + Internal coding benchmarks + Instructions following: + IFEval: verifiable instructions + Internal instructions following benchmarks + Multilingual: + MGSM: multilingual grade-school math + Multilingual MMLU and MMLU-pro + MEGA: multilingual NLP tasks + Popular aggregated datasets: MMLU, MMLU-pro, BigBench-Hard, AGI Eval + Multi-turn conversations: + Data generated by in-house adversarial conversation simulation tool + Single-turn trustworthiness evaluation: + DecodingTrust: a collection of trustworthiness benchmarks in eight different perspectives + XSTest: exaggerated safety evaluation + Toxigen: adversarial and hate speech detection + Red Team: + Responses to prompts provided by AI Red Team at Microsoft
[ "MEDQA" ]
jinaai/jina-embeddings-v2-base-en
jinaai
feature-extraction
[ "sentence-transformers", "pytorch", "coreml", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "mteb", "custom_code", "en", "dataset:allenai/c4", "arxiv:2108.12409", "arxiv:2310.19923", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "region:us" ]
"2023-09-27T17:04:00Z"
2025-01-06T16:24:38+00:00
227,825
716
--- datasets: - allenai/c4 language: en license: apache-2.0 tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb inference: false model-index: - name: jina-embedding-b-en-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 74.73134328358209 - type: ap value: 37.765427081831035 - type: f1 value: 68.79367444339518 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 88.544275 - type: ap value: 84.61328675662887 - type: f1 value: 88.51879035862375 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 45.263999999999996 - type: f1 value: 43.778759656699435 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 21.693 - type: map_at_10 value: 35.487 - type: map_at_100 value: 36.862 - type: map_at_1000 value: 36.872 - type: map_at_3 value: 30.049999999999997 - type: map_at_5 value: 32.966 - type: mrr_at_1 value: 21.977 - type: mrr_at_10 value: 35.565999999999995 - type: mrr_at_100 value: 36.948 - type: mrr_at_1000 value: 36.958 - type: mrr_at_3 value: 30.121 - type: mrr_at_5 value: 33.051 - type: ndcg_at_1 value: 21.693 - type: ndcg_at_10 value: 44.181 - type: ndcg_at_100 value: 49.982 - type: ndcg_at_1000 value: 50.233000000000004 - type: ndcg_at_3 value: 32.830999999999996 - type: ndcg_at_5 value: 38.080000000000005 - type: precision_at_1 value: 21.693 - type: precision_at_10 value: 7.248 - type: precision_at_100 value: 0.9769999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 13.632 - type: precision_at_5 value: 10.725 - type: recall_at_1 value: 21.693 - type: recall_at_10 value: 72.475 - type: recall_at_100 value: 97.653 - type: recall_at_1000 value: 99.57300000000001 - type: recall_at_3 value: 40.896 - type: recall_at_5 value: 53.627 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 45.39242428696777 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 36.675626784714 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.247725694904034 - type: mrr value: 74.91359978894604 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 82.68003802970496 - type: cos_sim_spearman value: 81.23438110096286 - type: euclidean_pearson value: 81.87462986142582 - type: euclidean_spearman value: 81.23438110096286 - type: manhattan_pearson value: 81.61162566600755 - type: manhattan_spearman value: 81.11329400456184 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 84.01298701298701 - type: f1 value: 83.31690714969382 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 37.050108150972086 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 30.15731442819715 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 31.391999999999996 - type: map_at_10 value: 42.597 - type: map_at_100 value: 44.07 - type: map_at_1000 value: 44.198 - type: map_at_3 value: 38.957 - type: map_at_5 value: 40.961 - type: mrr_at_1 value: 37.196 - type: mrr_at_10 value: 48.152 - type: mrr_at_100 value: 48.928 - type: mrr_at_1000 value: 48.964999999999996 - type: mrr_at_3 value: 45.446 - type: mrr_at_5 value: 47.205999999999996 - type: ndcg_at_1 value: 37.196 - type: ndcg_at_10 value: 49.089 - type: ndcg_at_100 value: 54.471000000000004 - type: ndcg_at_1000 value: 56.385 - type: ndcg_at_3 value: 43.699 - type: ndcg_at_5 value: 46.22 - type: precision_at_1 value: 37.196 - type: precision_at_10 value: 9.313 - type: precision_at_100 value: 1.478 - type: precision_at_1000 value: 0.198 - type: precision_at_3 value: 20.839 - type: precision_at_5 value: 14.936 - type: recall_at_1 value: 31.391999999999996 - type: recall_at_10 value: 61.876 - type: recall_at_100 value: 84.214 - type: recall_at_1000 value: 95.985 - type: recall_at_3 value: 46.6 - type: recall_at_5 value: 53.588 - type: map_at_1 value: 29.083 - type: map_at_10 value: 38.812999999999995 - type: map_at_100 value: 40.053 - type: map_at_1000 value: 40.188 - type: map_at_3 value: 36.111 - type: map_at_5 value: 37.519000000000005 - type: mrr_at_1 value: 36.497 - type: mrr_at_10 value: 44.85 - type: mrr_at_100 value: 45.546 - type: mrr_at_1000 value: 45.593 - type: mrr_at_3 value: 42.686 - type: mrr_at_5 value: 43.909 - type: ndcg_at_1 value: 36.497 - type: ndcg_at_10 value: 44.443 - type: ndcg_at_100 value: 48.979 - type: ndcg_at_1000 value: 51.154999999999994 - type: ndcg_at_3 value: 40.660000000000004 - type: ndcg_at_5 value: 42.193000000000005 - type: precision_at_1 value: 36.497 - type: precision_at_10 value: 8.433 - type: precision_at_100 value: 1.369 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 19.894000000000002 - type: precision_at_5 value: 13.873 - type: recall_at_1 value: 29.083 - type: recall_at_10 value: 54.313 - type: recall_at_100 value: 73.792 - type: recall_at_1000 value: 87.629 - type: recall_at_3 value: 42.257 - type: recall_at_5 value: 47.066 - type: map_at_1 value: 38.556000000000004 - type: map_at_10 value: 50.698 - type: map_at_100 value: 51.705 - type: map_at_1000 value: 51.768 - type: map_at_3 value: 47.848 - type: map_at_5 value: 49.358000000000004 - type: mrr_at_1 value: 43.95 - type: mrr_at_10 value: 54.191 - type: mrr_at_100 value: 54.852999999999994 - type: mrr_at_1000 value: 54.885 - type: mrr_at_3 value: 51.954 - type: mrr_at_5 value: 53.13 - type: ndcg_at_1 value: 43.95 - type: ndcg_at_10 value: 56.516 - type: ndcg_at_100 value: 60.477000000000004 - type: ndcg_at_1000 value: 61.746 - type: ndcg_at_3 value: 51.601 - type: ndcg_at_5 value: 53.795 - type: precision_at_1 value: 43.95 - type: precision_at_10 value: 9.009 - type: precision_at_100 value: 1.189 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 22.989 - type: precision_at_5 value: 15.473 - type: recall_at_1 value: 38.556000000000004 - type: recall_at_10 value: 70.159 - type: recall_at_100 value: 87.132 - type: recall_at_1000 value: 96.16 - type: recall_at_3 value: 56.906 - type: recall_at_5 value: 62.332 - type: map_at_1 value: 24.238 - type: map_at_10 value: 32.5 - type: map_at_100 value: 33.637 - type: map_at_1000 value: 33.719 - type: map_at_3 value: 30.026999999999997 - type: map_at_5 value: 31.555 - type: mrr_at_1 value: 26.328000000000003 - type: mrr_at_10 value: 34.44 - type: mrr_at_100 value: 35.455999999999996 - type: mrr_at_1000 value: 35.521 - type: mrr_at_3 value: 32.034 - type: mrr_at_5 value: 33.565 - type: ndcg_at_1 value: 26.328000000000003 - type: ndcg_at_10 value: 37.202 - type: ndcg_at_100 value: 42.728 - type: ndcg_at_1000 value: 44.792 - type: ndcg_at_3 value: 32.368 - type: ndcg_at_5 value: 35.008 - type: precision_at_1 value: 26.328000000000003 - type: precision_at_10 value: 5.7059999999999995 - type: precision_at_100 value: 0.8880000000000001 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 13.672 - type: precision_at_5 value: 9.74 - type: recall_at_1 value: 24.238 - type: recall_at_10 value: 49.829 - type: recall_at_100 value: 75.21 - type: recall_at_1000 value: 90.521 - type: recall_at_3 value: 36.867 - type: recall_at_5 value: 43.241 - type: map_at_1 value: 15.378 - type: map_at_10 value: 22.817999999999998 - type: map_at_100 value: 23.977999999999998 - type: map_at_1000 value: 24.108 - type: map_at_3 value: 20.719 - type: map_at_5 value: 21.889 - type: mrr_at_1 value: 19.03 - type: mrr_at_10 value: 27.022000000000002 - type: mrr_at_100 value: 28.011999999999997 - type: mrr_at_1000 value: 28.096 - type: mrr_at_3 value: 24.855 - type: mrr_at_5 value: 26.029999999999998 - type: ndcg_at_1 value: 19.03 - type: ndcg_at_10 value: 27.526 - type: ndcg_at_100 value: 33.040000000000006 - type: ndcg_at_1000 value: 36.187000000000005 - type: ndcg_at_3 value: 23.497 - type: ndcg_at_5 value: 25.334 - type: precision_at_1 value: 19.03 - type: precision_at_10 value: 4.963 - type: precision_at_100 value: 0.893 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 11.360000000000001 - type: precision_at_5 value: 8.134 - type: recall_at_1 value: 15.378 - type: recall_at_10 value: 38.061 - type: recall_at_100 value: 61.754 - type: recall_at_1000 value: 84.259 - type: recall_at_3 value: 26.788 - type: recall_at_5 value: 31.326999999999998 - type: map_at_1 value: 27.511999999999997 - type: map_at_10 value: 37.429 - type: map_at_100 value: 38.818000000000005 - type: map_at_1000 value: 38.924 - type: map_at_3 value: 34.625 - type: map_at_5 value: 36.064 - type: mrr_at_1 value: 33.300999999999995 - type: mrr_at_10 value: 43.036 - type: mrr_at_100 value: 43.894 - type: mrr_at_1000 value: 43.936 - type: mrr_at_3 value: 40.825 - type: mrr_at_5 value: 42.028 - type: ndcg_at_1 value: 33.300999999999995 - type: ndcg_at_10 value: 43.229 - type: ndcg_at_100 value: 48.992000000000004 - type: ndcg_at_1000 value: 51.02100000000001 - type: ndcg_at_3 value: 38.794000000000004 - type: ndcg_at_5 value: 40.65 - type: precision_at_1 value: 33.300999999999995 - type: precision_at_10 value: 7.777000000000001 - type: precision_at_100 value: 1.269 - type: precision_at_1000 value: 0.163 - type: precision_at_3 value: 18.351 - type: precision_at_5 value: 12.762 - type: recall_at_1 value: 27.511999999999997 - type: recall_at_10 value: 54.788000000000004 - type: recall_at_100 value: 79.105 - type: recall_at_1000 value: 92.49199999999999 - type: recall_at_3 value: 41.924 - type: recall_at_5 value: 47.026 - type: map_at_1 value: 24.117 - type: map_at_10 value: 33.32 - type: map_at_100 value: 34.677 - type: map_at_1000 value: 34.78 - type: map_at_3 value: 30.233999999999998 - type: map_at_5 value: 31.668000000000003 - type: mrr_at_1 value: 29.566 - type: mrr_at_10 value: 38.244 - type: mrr_at_100 value: 39.245000000000005 - type: mrr_at_1000 value: 39.296 - type: mrr_at_3 value: 35.864000000000004 - type: mrr_at_5 value: 36.919999999999995 - type: ndcg_at_1 value: 29.566 - type: ndcg_at_10 value: 39.127 - type: ndcg_at_100 value: 44.989000000000004 - type: ndcg_at_1000 value: 47.189 - type: ndcg_at_3 value: 34.039 - type: ndcg_at_5 value: 35.744 - type: precision_at_1 value: 29.566 - type: precision_at_10 value: 7.385999999999999 - type: precision_at_100 value: 1.204 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 16.286 - type: precision_at_5 value: 11.484 - type: recall_at_1 value: 24.117 - type: recall_at_10 value: 51.559999999999995 - type: recall_at_100 value: 77.104 - type: recall_at_1000 value: 91.79899999999999 - type: recall_at_3 value: 36.82 - type: recall_at_5 value: 41.453 - type: map_at_1 value: 25.17625 - type: map_at_10 value: 34.063916666666664 - type: map_at_100 value: 35.255500000000005 - type: map_at_1000 value: 35.37275 - type: map_at_3 value: 31.351666666666667 - type: map_at_5 value: 32.80608333333333 - type: mrr_at_1 value: 29.59783333333333 - type: mrr_at_10 value: 38.0925 - type: mrr_at_100 value: 38.957249999999995 - type: mrr_at_1000 value: 39.01608333333333 - type: mrr_at_3 value: 35.77625 - type: mrr_at_5 value: 37.04991666666667 - type: ndcg_at_1 value: 29.59783333333333 - type: ndcg_at_10 value: 39.343666666666664 - type: ndcg_at_100 value: 44.488249999999994 - type: ndcg_at_1000 value: 46.83358333333334 - type: ndcg_at_3 value: 34.69708333333333 - type: ndcg_at_5 value: 36.75075 - type: precision_at_1 value: 29.59783333333333 - type: precision_at_10 value: 6.884083333333332 - type: precision_at_100 value: 1.114 - type: precision_at_1000 value: 0.15108333333333332 - type: precision_at_3 value: 15.965250000000003 - type: precision_at_5 value: 11.246500000000001 - type: recall_at_1 value: 25.17625 - type: recall_at_10 value: 51.015999999999984 - type: recall_at_100 value: 73.60174999999998 - type: recall_at_1000 value: 89.849 - type: recall_at_3 value: 37.88399999999999 - type: recall_at_5 value: 43.24541666666666 - type: map_at_1 value: 24.537 - type: map_at_10 value: 31.081999999999997 - type: map_at_100 value: 32.042 - type: map_at_1000 value: 32.141 - type: map_at_3 value: 29.137 - type: map_at_5 value: 30.079 - type: mrr_at_1 value: 27.454 - type: mrr_at_10 value: 33.694 - type: mrr_at_100 value: 34.579 - type: mrr_at_1000 value: 34.649 - type: mrr_at_3 value: 32.004 - type: mrr_at_5 value: 32.794000000000004 - type: ndcg_at_1 value: 27.454 - type: ndcg_at_10 value: 34.915 - type: ndcg_at_100 value: 39.641 - type: ndcg_at_1000 value: 42.105 - type: ndcg_at_3 value: 31.276 - type: ndcg_at_5 value: 32.65 - type: precision_at_1 value: 27.454 - type: precision_at_10 value: 5.337 - type: precision_at_100 value: 0.8250000000000001 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 13.241 - type: precision_at_5 value: 8.895999999999999 - type: recall_at_1 value: 24.537 - type: recall_at_10 value: 44.324999999999996 - type: recall_at_100 value: 65.949 - type: recall_at_1000 value: 84.017 - type: recall_at_3 value: 33.857 - type: recall_at_5 value: 37.316 - type: map_at_1 value: 17.122 - type: map_at_10 value: 24.32 - type: map_at_100 value: 25.338 - type: map_at_1000 value: 25.462 - type: map_at_3 value: 22.064 - type: map_at_5 value: 23.322000000000003 - type: mrr_at_1 value: 20.647 - type: mrr_at_10 value: 27.858 - type: mrr_at_100 value: 28.743999999999996 - type: mrr_at_1000 value: 28.819 - type: mrr_at_3 value: 25.769 - type: mrr_at_5 value: 26.964 - type: ndcg_at_1 value: 20.647 - type: ndcg_at_10 value: 28.849999999999998 - type: ndcg_at_100 value: 33.849000000000004 - type: ndcg_at_1000 value: 36.802 - type: ndcg_at_3 value: 24.799 - type: ndcg_at_5 value: 26.682 - type: precision_at_1 value: 20.647 - type: precision_at_10 value: 5.2170000000000005 - type: precision_at_100 value: 0.906 - type: precision_at_1000 value: 0.134 - type: precision_at_3 value: 11.769 - type: precision_at_5 value: 8.486 - type: recall_at_1 value: 17.122 - type: recall_at_10 value: 38.999 - type: recall_at_100 value: 61.467000000000006 - type: recall_at_1000 value: 82.716 - type: recall_at_3 value: 27.601 - type: recall_at_5 value: 32.471 - type: map_at_1 value: 24.396 - type: map_at_10 value: 33.415 - type: map_at_100 value: 34.521 - type: map_at_1000 value: 34.631 - type: map_at_3 value: 30.703999999999997 - type: map_at_5 value: 32.166 - type: mrr_at_1 value: 28.825 - type: mrr_at_10 value: 37.397000000000006 - type: mrr_at_100 value: 38.286 - type: mrr_at_1000 value: 38.346000000000004 - type: mrr_at_3 value: 35.028 - type: mrr_at_5 value: 36.32 - type: ndcg_at_1 value: 28.825 - type: ndcg_at_10 value: 38.656 - type: ndcg_at_100 value: 43.856 - type: ndcg_at_1000 value: 46.31 - type: ndcg_at_3 value: 33.793 - type: ndcg_at_5 value: 35.909 - type: precision_at_1 value: 28.825 - type: precision_at_10 value: 6.567 - type: precision_at_100 value: 1.0330000000000001 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 15.516 - type: precision_at_5 value: 10.914 - type: recall_at_1 value: 24.396 - type: recall_at_10 value: 50.747 - type: recall_at_100 value: 73.477 - type: recall_at_1000 value: 90.801 - type: recall_at_3 value: 37.1 - type: recall_at_5 value: 42.589 - type: map_at_1 value: 25.072 - type: map_at_10 value: 34.307 - type: map_at_100 value: 35.725 - type: map_at_1000 value: 35.943999999999996 - type: map_at_3 value: 30.906 - type: map_at_5 value: 32.818000000000005 - type: mrr_at_1 value: 29.644 - type: mrr_at_10 value: 38.673 - type: mrr_at_100 value: 39.459 - type: mrr_at_1000 value: 39.527 - type: mrr_at_3 value: 35.771 - type: mrr_at_5 value: 37.332 - type: ndcg_at_1 value: 29.644 - type: ndcg_at_10 value: 40.548 - type: ndcg_at_100 value: 45.678999999999995 - type: ndcg_at_1000 value: 48.488 - type: ndcg_at_3 value: 34.887 - type: ndcg_at_5 value: 37.543 - type: precision_at_1 value: 29.644 - type: precision_at_10 value: 7.688000000000001 - type: precision_at_100 value: 1.482 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 16.206 - type: precision_at_5 value: 12.016 - type: recall_at_1 value: 25.072 - type: recall_at_10 value: 53.478 - type: recall_at_100 value: 76.07300000000001 - type: recall_at_1000 value: 93.884 - type: recall_at_3 value: 37.583 - type: recall_at_5 value: 44.464 - type: map_at_1 value: 20.712 - type: map_at_10 value: 27.467999999999996 - type: map_at_100 value: 28.502 - type: map_at_1000 value: 28.610000000000003 - type: map_at_3 value: 24.887999999999998 - type: map_at_5 value: 26.273999999999997 - type: mrr_at_1 value: 22.736 - type: mrr_at_10 value: 29.553 - type: mrr_at_100 value: 30.485 - type: mrr_at_1000 value: 30.56 - type: mrr_at_3 value: 27.078999999999997 - type: mrr_at_5 value: 28.401 - type: ndcg_at_1 value: 22.736 - type: ndcg_at_10 value: 32.023 - type: ndcg_at_100 value: 37.158 - type: ndcg_at_1000 value: 39.823 - type: ndcg_at_3 value: 26.951999999999998 - type: ndcg_at_5 value: 29.281000000000002 - type: precision_at_1 value: 22.736 - type: precision_at_10 value: 5.213 - type: precision_at_100 value: 0.832 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 11.459999999999999 - type: precision_at_5 value: 8.244 - type: recall_at_1 value: 20.712 - type: recall_at_10 value: 44.057 - type: recall_at_100 value: 67.944 - type: recall_at_1000 value: 87.925 - type: recall_at_3 value: 30.305 - type: recall_at_5 value: 36.071999999999996 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 10.181999999999999 - type: map_at_10 value: 16.66 - type: map_at_100 value: 18.273 - type: map_at_1000 value: 18.45 - type: map_at_3 value: 14.141 - type: map_at_5 value: 15.455 - type: mrr_at_1 value: 22.15 - type: mrr_at_10 value: 32.062000000000005 - type: mrr_at_100 value: 33.116 - type: mrr_at_1000 value: 33.168 - type: mrr_at_3 value: 28.827 - type: mrr_at_5 value: 30.892999999999997 - type: ndcg_at_1 value: 22.15 - type: ndcg_at_10 value: 23.532 - type: ndcg_at_100 value: 30.358 - type: ndcg_at_1000 value: 33.783 - type: ndcg_at_3 value: 19.222 - type: ndcg_at_5 value: 20.919999999999998 - type: precision_at_1 value: 22.15 - type: precision_at_10 value: 7.185999999999999 - type: precision_at_100 value: 1.433 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 13.941 - type: precision_at_5 value: 10.906 - type: recall_at_1 value: 10.181999999999999 - type: recall_at_10 value: 28.104000000000003 - type: recall_at_100 value: 51.998999999999995 - type: recall_at_1000 value: 71.311 - type: recall_at_3 value: 17.698 - type: recall_at_5 value: 22.262999999999998 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 6.669 - type: map_at_10 value: 15.552 - type: map_at_100 value: 21.865000000000002 - type: map_at_1000 value: 23.268 - type: map_at_3 value: 11.309 - type: map_at_5 value: 13.084000000000001 - type: mrr_at_1 value: 55.50000000000001 - type: mrr_at_10 value: 66.46600000000001 - type: mrr_at_100 value: 66.944 - type: mrr_at_1000 value: 66.956 - type: mrr_at_3 value: 64.542 - type: mrr_at_5 value: 65.717 - type: ndcg_at_1 value: 44.75 - type: ndcg_at_10 value: 35.049 - type: ndcg_at_100 value: 39.073 - type: ndcg_at_1000 value: 46.208 - type: ndcg_at_3 value: 39.525 - type: ndcg_at_5 value: 37.156 - type: precision_at_1 value: 55.50000000000001 - type: precision_at_10 value: 27.800000000000004 - type: precision_at_100 value: 9.013 - type: precision_at_1000 value: 1.8800000000000001 - type: precision_at_3 value: 42.667 - type: precision_at_5 value: 36.0 - type: recall_at_1 value: 6.669 - type: recall_at_10 value: 21.811 - type: recall_at_100 value: 45.112 - type: recall_at_1000 value: 67.806 - type: recall_at_3 value: 13.373 - type: recall_at_5 value: 16.615 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 48.769999999999996 - type: f1 value: 42.91448356376592 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 54.013 - type: map_at_10 value: 66.239 - type: map_at_100 value: 66.62599999999999 - type: map_at_1000 value: 66.644 - type: map_at_3 value: 63.965 - type: map_at_5 value: 65.45400000000001 - type: mrr_at_1 value: 58.221000000000004 - type: mrr_at_10 value: 70.43700000000001 - type: mrr_at_100 value: 70.744 - type: mrr_at_1000 value: 70.75099999999999 - type: mrr_at_3 value: 68.284 - type: mrr_at_5 value: 69.721 - type: ndcg_at_1 value: 58.221000000000004 - type: ndcg_at_10 value: 72.327 - type: ndcg_at_100 value: 73.953 - type: ndcg_at_1000 value: 74.312 - type: ndcg_at_3 value: 68.062 - type: ndcg_at_5 value: 70.56400000000001 - type: precision_at_1 value: 58.221000000000004 - type: precision_at_10 value: 9.521 - type: precision_at_100 value: 1.045 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 27.348 - type: precision_at_5 value: 17.794999999999998 - type: recall_at_1 value: 54.013 - type: recall_at_10 value: 86.957 - type: recall_at_100 value: 93.911 - type: recall_at_1000 value: 96.38 - type: recall_at_3 value: 75.555 - type: recall_at_5 value: 81.671 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 21.254 - type: map_at_10 value: 33.723 - type: map_at_100 value: 35.574 - type: map_at_1000 value: 35.730000000000004 - type: map_at_3 value: 29.473 - type: map_at_5 value: 31.543 - type: mrr_at_1 value: 41.358 - type: mrr_at_10 value: 49.498 - type: mrr_at_100 value: 50.275999999999996 - type: mrr_at_1000 value: 50.308 - type: mrr_at_3 value: 47.016000000000005 - type: mrr_at_5 value: 48.336 - type: ndcg_at_1 value: 41.358 - type: ndcg_at_10 value: 41.579 - type: ndcg_at_100 value: 48.455 - type: ndcg_at_1000 value: 51.165000000000006 - type: ndcg_at_3 value: 37.681 - type: ndcg_at_5 value: 38.49 - type: precision_at_1 value: 41.358 - type: precision_at_10 value: 11.543000000000001 - type: precision_at_100 value: 1.87 - type: precision_at_1000 value: 0.23600000000000002 - type: precision_at_3 value: 24.743000000000002 - type: precision_at_5 value: 17.994 - type: recall_at_1 value: 21.254 - type: recall_at_10 value: 48.698 - type: recall_at_100 value: 74.588 - type: recall_at_1000 value: 91.00200000000001 - type: recall_at_3 value: 33.939 - type: recall_at_5 value: 39.367000000000004 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 35.922 - type: map_at_10 value: 52.32599999999999 - type: map_at_100 value: 53.18000000000001 - type: map_at_1000 value: 53.245 - type: map_at_3 value: 49.294 - type: map_at_5 value: 51.202999999999996 - type: mrr_at_1 value: 71.843 - type: mrr_at_10 value: 78.24600000000001 - type: mrr_at_100 value: 78.515 - type: mrr_at_1000 value: 78.527 - type: mrr_at_3 value: 77.17500000000001 - type: mrr_at_5 value: 77.852 - type: ndcg_at_1 value: 71.843 - type: ndcg_at_10 value: 61.379 - type: ndcg_at_100 value: 64.535 - type: ndcg_at_1000 value: 65.888 - type: ndcg_at_3 value: 56.958 - type: ndcg_at_5 value: 59.434 - type: precision_at_1 value: 71.843 - type: precision_at_10 value: 12.686 - type: precision_at_100 value: 1.517 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_3 value: 35.778 - type: precision_at_5 value: 23.422 - type: recall_at_1 value: 35.922 - type: recall_at_10 value: 63.43 - type: recall_at_100 value: 75.868 - type: recall_at_1000 value: 84.88900000000001 - type: recall_at_3 value: 53.666000000000004 - type: recall_at_5 value: 58.555 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 79.4408 - type: ap value: 73.52820871620366 - type: f1 value: 79.36240238685001 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.826999999999998 - type: map_at_10 value: 34.04 - type: map_at_100 value: 35.226 - type: map_at_1000 value: 35.275 - type: map_at_3 value: 30.165999999999997 - type: map_at_5 value: 32.318000000000005 - type: mrr_at_1 value: 22.464000000000002 - type: mrr_at_10 value: 34.631 - type: mrr_at_100 value: 35.752 - type: mrr_at_1000 value: 35.795 - type: mrr_at_3 value: 30.798 - type: mrr_at_5 value: 32.946999999999996 - type: ndcg_at_1 value: 22.464000000000002 - type: ndcg_at_10 value: 40.919 - type: ndcg_at_100 value: 46.632 - type: ndcg_at_1000 value: 47.833 - type: ndcg_at_3 value: 32.992 - type: ndcg_at_5 value: 36.834 - type: precision_at_1 value: 22.464000000000002 - type: precision_at_10 value: 6.494 - type: precision_at_100 value: 0.9369999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.021 - type: precision_at_5 value: 10.347000000000001 - type: recall_at_1 value: 21.826999999999998 - type: recall_at_10 value: 62.132 - type: recall_at_100 value: 88.55199999999999 - type: recall_at_1000 value: 97.707 - type: recall_at_3 value: 40.541 - type: recall_at_5 value: 49.739 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.68399452804377 - type: f1 value: 95.25490609832268 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 83.15321477428182 - type: f1 value: 60.35476439087966 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.92669804976462 - type: f1 value: 69.22815107207565 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.4855413584398 - type: f1 value: 72.92107516103387 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 32.412679360205544 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.09211869875204 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.540919056982545 - type: mrr value: 31.529904607063536 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.745 - type: map_at_10 value: 12.013 - type: map_at_100 value: 15.040000000000001 - type: map_at_1000 value: 16.427 - type: map_at_3 value: 8.841000000000001 - type: map_at_5 value: 10.289 - type: mrr_at_1 value: 45.201 - type: mrr_at_10 value: 53.483999999999995 - type: mrr_at_100 value: 54.20700000000001 - type: mrr_at_1000 value: 54.252 - type: mrr_at_3 value: 51.29 - type: mrr_at_5 value: 52.73 - type: ndcg_at_1 value: 43.808 - type: ndcg_at_10 value: 32.445 - type: ndcg_at_100 value: 30.031000000000002 - type: ndcg_at_1000 value: 39.007 - type: ndcg_at_3 value: 37.204 - type: ndcg_at_5 value: 35.07 - type: precision_at_1 value: 45.201 - type: precision_at_10 value: 23.684 - type: precision_at_100 value: 7.600999999999999 - type: precision_at_1000 value: 2.043 - type: precision_at_3 value: 33.953 - type: precision_at_5 value: 29.412 - type: recall_at_1 value: 5.745 - type: recall_at_10 value: 16.168 - type: recall_at_100 value: 30.875999999999998 - type: recall_at_1000 value: 62.686 - type: recall_at_3 value: 9.75 - type: recall_at_5 value: 12.413 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 37.828 - type: map_at_10 value: 53.239000000000004 - type: map_at_100 value: 54.035999999999994 - type: map_at_1000 value: 54.067 - type: map_at_3 value: 49.289 - type: map_at_5 value: 51.784 - type: mrr_at_1 value: 42.497 - type: mrr_at_10 value: 55.916999999999994 - type: mrr_at_100 value: 56.495 - type: mrr_at_1000 value: 56.516999999999996 - type: mrr_at_3 value: 52.800000000000004 - type: mrr_at_5 value: 54.722 - type: ndcg_at_1 value: 42.468 - type: ndcg_at_10 value: 60.437 - type: ndcg_at_100 value: 63.731 - type: ndcg_at_1000 value: 64.41799999999999 - type: ndcg_at_3 value: 53.230999999999995 - type: ndcg_at_5 value: 57.26 - type: precision_at_1 value: 42.468 - type: precision_at_10 value: 9.47 - type: precision_at_100 value: 1.1360000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.724999999999998 - type: precision_at_5 value: 16.593 - type: recall_at_1 value: 37.828 - type: recall_at_10 value: 79.538 - type: recall_at_100 value: 93.646 - type: recall_at_1000 value: 98.72999999999999 - type: recall_at_3 value: 61.134 - type: recall_at_5 value: 70.377 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.548 - type: map_at_10 value: 84.466 - type: map_at_100 value: 85.10600000000001 - type: map_at_1000 value: 85.123 - type: map_at_3 value: 81.57600000000001 - type: map_at_5 value: 83.399 - type: mrr_at_1 value: 81.24 - type: mrr_at_10 value: 87.457 - type: mrr_at_100 value: 87.574 - type: mrr_at_1000 value: 87.575 - type: mrr_at_3 value: 86.507 - type: mrr_at_5 value: 87.205 - type: ndcg_at_1 value: 81.25 - type: ndcg_at_10 value: 88.203 - type: ndcg_at_100 value: 89.457 - type: ndcg_at_1000 value: 89.563 - type: ndcg_at_3 value: 85.465 - type: ndcg_at_5 value: 87.007 - type: precision_at_1 value: 81.25 - type: precision_at_10 value: 13.373 - type: precision_at_100 value: 1.5270000000000001 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.417 - type: precision_at_5 value: 24.556 - type: recall_at_1 value: 70.548 - type: recall_at_10 value: 95.208 - type: recall_at_100 value: 99.514 - type: recall_at_1000 value: 99.988 - type: recall_at_3 value: 87.214 - type: recall_at_5 value: 91.696 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 53.04822095496839 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 60.30778476474675 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.692 - type: map_at_10 value: 11.766 - type: map_at_100 value: 13.904 - type: map_at_1000 value: 14.216999999999999 - type: map_at_3 value: 8.245 - type: map_at_5 value: 9.92 - type: mrr_at_1 value: 23.0 - type: mrr_at_10 value: 33.78 - type: mrr_at_100 value: 34.922 - type: mrr_at_1000 value: 34.973 - type: mrr_at_3 value: 30.2 - type: mrr_at_5 value: 32.565 - type: ndcg_at_1 value: 23.0 - type: ndcg_at_10 value: 19.863 - type: ndcg_at_100 value: 28.141 - type: ndcg_at_1000 value: 33.549 - type: ndcg_at_3 value: 18.434 - type: ndcg_at_5 value: 16.384 - type: precision_at_1 value: 23.0 - type: precision_at_10 value: 10.39 - type: precision_at_100 value: 2.235 - type: precision_at_1000 value: 0.35300000000000004 - type: precision_at_3 value: 17.133000000000003 - type: precision_at_5 value: 14.44 - type: recall_at_1 value: 4.692 - type: recall_at_10 value: 21.025 - type: recall_at_100 value: 45.324999999999996 - type: recall_at_1000 value: 71.675 - type: recall_at_3 value: 10.440000000000001 - type: recall_at_5 value: 14.64 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.96178184892842 - type: cos_sim_spearman value: 79.6487740813199 - type: euclidean_pearson value: 82.06661161625023 - type: euclidean_spearman value: 79.64876769031183 - type: manhattan_pearson value: 82.07061164575131 - type: manhattan_spearman value: 79.65197039464537 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.15305604100027 - type: cos_sim_spearman value: 74.27447427941591 - type: euclidean_pearson value: 80.52737337565307 - type: euclidean_spearman value: 74.27416077132192 - type: manhattan_pearson value: 80.53728571140387 - type: manhattan_spearman value: 74.28853605753457 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 83.44386080639279 - type: cos_sim_spearman value: 84.17947648159536 - type: euclidean_pearson value: 83.34145388129387 - type: euclidean_spearman value: 84.17947648159536 - type: manhattan_pearson value: 83.30699061927966 - type: manhattan_spearman value: 84.18125737380451 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 81.57392220985612 - type: cos_sim_spearman value: 78.80745014464101 - type: euclidean_pearson value: 80.01660371487199 - type: euclidean_spearman value: 78.80741240102256 - type: manhattan_pearson value: 79.96810779507953 - type: manhattan_spearman value: 78.75600400119448 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.85421063026625 - type: cos_sim_spearman value: 87.55320285299192 - type: euclidean_pearson value: 86.69750143323517 - type: euclidean_spearman value: 87.55320284326378 - type: manhattan_pearson value: 86.63379169960379 - type: manhattan_spearman value: 87.4815029877984 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.31314130411842 - type: cos_sim_spearman value: 85.3489588181433 - type: euclidean_pearson value: 84.13240933463535 - type: euclidean_spearman value: 85.34902871403281 - type: manhattan_pearson value: 84.01183086503559 - type: manhattan_spearman value: 85.19316703166102 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.09979781689536 - type: cos_sim_spearman value: 88.87813323759015 - type: euclidean_pearson value: 88.65413031123792 - type: euclidean_spearman value: 88.87813323759015 - type: manhattan_pearson value: 88.61818758256024 - type: manhattan_spearman value: 88.81044100494604 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 62.30693258111531 - type: cos_sim_spearman value: 62.195516523251946 - type: euclidean_pearson value: 62.951283701049476 - type: euclidean_spearman value: 62.195516523251946 - type: manhattan_pearson value: 63.068322281439535 - type: manhattan_spearman value: 62.10621171028406 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.27092833763909 - type: cos_sim_spearman value: 84.84429717949759 - type: euclidean_pearson value: 84.8516966060792 - type: euclidean_spearman value: 84.84429717949759 - type: manhattan_pearson value: 84.82203139242881 - type: manhattan_spearman value: 84.8358503952945 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 83.10290863981409 - type: mrr value: 95.31168450286097 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 52.161 - type: map_at_10 value: 62.138000000000005 - type: map_at_100 value: 62.769 - type: map_at_1000 value: 62.812 - type: map_at_3 value: 59.111000000000004 - type: map_at_5 value: 60.995999999999995 - type: mrr_at_1 value: 55.333 - type: mrr_at_10 value: 63.504000000000005 - type: mrr_at_100 value: 64.036 - type: mrr_at_1000 value: 64.08 - type: mrr_at_3 value: 61.278 - type: mrr_at_5 value: 62.778 - type: ndcg_at_1 value: 55.333 - type: ndcg_at_10 value: 66.678 - type: ndcg_at_100 value: 69.415 - type: ndcg_at_1000 value: 70.453 - type: ndcg_at_3 value: 61.755 - type: ndcg_at_5 value: 64.546 - type: precision_at_1 value: 55.333 - type: precision_at_10 value: 9.033 - type: precision_at_100 value: 1.043 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 24.221999999999998 - type: precision_at_5 value: 16.333000000000002 - type: recall_at_1 value: 52.161 - type: recall_at_10 value: 79.156 - type: recall_at_100 value: 91.333 - type: recall_at_1000 value: 99.333 - type: recall_at_3 value: 66.43299999999999 - type: recall_at_5 value: 73.272 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.81287128712871 - type: cos_sim_ap value: 95.30034785910676 - type: cos_sim_f1 value: 90.28629856850716 - type: cos_sim_precision value: 92.36401673640168 - type: cos_sim_recall value: 88.3 - type: dot_accuracy value: 99.81287128712871 - type: dot_ap value: 95.30034785910676 - type: dot_f1 value: 90.28629856850716 - type: dot_precision value: 92.36401673640168 - type: dot_recall value: 88.3 - type: euclidean_accuracy value: 99.81287128712871 - type: euclidean_ap value: 95.30034785910676 - type: euclidean_f1 value: 90.28629856850716 - type: euclidean_precision value: 92.36401673640168 - type: euclidean_recall value: 88.3 - type: manhattan_accuracy value: 99.80990099009901 - type: manhattan_ap value: 95.26880751950654 - type: manhattan_f1 value: 90.22177419354838 - type: manhattan_precision value: 90.95528455284553 - type: manhattan_recall value: 89.5 - type: max_accuracy value: 99.81287128712871 - type: max_ap value: 95.30034785910676 - type: max_f1 value: 90.28629856850716 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 58.518662504351184 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.96168178378587 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.04862593471896 - type: mrr value: 52.97238402936932 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.092545236479946 - type: cos_sim_spearman value: 31.599851000175498 - type: dot_pearson value: 30.092542723901676 - type: dot_spearman value: 31.599851000175498 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.189 - type: map_at_10 value: 1.662 - type: map_at_100 value: 9.384 - type: map_at_1000 value: 22.669 - type: map_at_3 value: 0.5559999999999999 - type: map_at_5 value: 0.9039999999999999 - type: mrr_at_1 value: 68.0 - type: mrr_at_10 value: 81.01899999999999 - type: mrr_at_100 value: 81.01899999999999 - type: mrr_at_1000 value: 81.01899999999999 - type: mrr_at_3 value: 79.333 - type: mrr_at_5 value: 80.733 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_10 value: 65.913 - type: ndcg_at_100 value: 51.895 - type: ndcg_at_1000 value: 46.967 - type: ndcg_at_3 value: 65.49199999999999 - type: ndcg_at_5 value: 66.69699999999999 - type: precision_at_1 value: 68.0 - type: precision_at_10 value: 71.6 - type: precision_at_100 value: 53.66 - type: precision_at_1000 value: 21.124000000000002 - type: precision_at_3 value: 72.667 - type: precision_at_5 value: 74.0 - type: recall_at_1 value: 0.189 - type: recall_at_10 value: 1.913 - type: recall_at_100 value: 12.601999999999999 - type: recall_at_1000 value: 44.296 - type: recall_at_3 value: 0.605 - type: recall_at_5 value: 1.018 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.701 - type: map_at_10 value: 10.445 - type: map_at_100 value: 17.324 - type: map_at_1000 value: 19.161 - type: map_at_3 value: 5.497 - type: map_at_5 value: 7.278 - type: mrr_at_1 value: 30.612000000000002 - type: mrr_at_10 value: 45.534 - type: mrr_at_100 value: 45.792 - type: mrr_at_1000 value: 45.806999999999995 - type: mrr_at_3 value: 37.755 - type: mrr_at_5 value: 43.469 - type: ndcg_at_1 value: 26.531 - type: ndcg_at_10 value: 26.235000000000003 - type: ndcg_at_100 value: 39.17 - type: ndcg_at_1000 value: 51.038 - type: ndcg_at_3 value: 23.625 - type: ndcg_at_5 value: 24.338 - type: precision_at_1 value: 30.612000000000002 - type: precision_at_10 value: 24.285999999999998 - type: precision_at_100 value: 8.224 - type: precision_at_1000 value: 1.6179999999999999 - type: precision_at_3 value: 24.490000000000002 - type: precision_at_5 value: 24.898 - type: recall_at_1 value: 2.701 - type: recall_at_10 value: 17.997 - type: recall_at_100 value: 51.766999999999996 - type: recall_at_1000 value: 87.863 - type: recall_at_3 value: 6.295000000000001 - type: recall_at_5 value: 9.993 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 73.3474 - type: ap value: 15.393431414459924 - type: f1 value: 56.466681887882416 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 62.062818336163 - type: f1 value: 62.11230840463252 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 42.464892820845115 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.15962329379508 - type: cos_sim_ap value: 74.73674057919256 - type: cos_sim_f1 value: 68.81245642574947 - type: cos_sim_precision value: 61.48255813953488 - type: cos_sim_recall value: 78.12664907651715 - type: dot_accuracy value: 86.15962329379508 - type: dot_ap value: 74.7367634988281 - type: dot_f1 value: 68.81245642574947 - type: dot_precision value: 61.48255813953488 - type: dot_recall value: 78.12664907651715 - type: euclidean_accuracy value: 86.15962329379508 - type: euclidean_ap value: 74.7367761466634 - type: euclidean_f1 value: 68.81245642574947 - type: euclidean_precision value: 61.48255813953488 - type: euclidean_recall value: 78.12664907651715 - type: manhattan_accuracy value: 86.21326816474935 - type: manhattan_ap value: 74.64416473733951 - type: manhattan_f1 value: 68.80924855491331 - type: manhattan_precision value: 61.23456790123457 - type: manhattan_recall value: 78.52242744063325 - type: max_accuracy value: 86.21326816474935 - type: max_ap value: 74.7367761466634 - type: max_f1 value: 68.81245642574947 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.97620988085536 - type: cos_sim_ap value: 86.08680845745758 - type: cos_sim_f1 value: 78.02793637114438 - type: cos_sim_precision value: 73.11082699683736 - type: cos_sim_recall value: 83.65414228518632 - type: dot_accuracy value: 88.97620988085536 - type: dot_ap value: 86.08681149437946 - type: dot_f1 value: 78.02793637114438 - type: dot_precision value: 73.11082699683736 - type: dot_recall value: 83.65414228518632 - type: euclidean_accuracy value: 88.97620988085536 - type: euclidean_ap value: 86.08681215460771 - type: euclidean_f1 value: 78.02793637114438 - type: euclidean_precision value: 73.11082699683736 - type: euclidean_recall value: 83.65414228518632 - type: manhattan_accuracy value: 88.88888888888889 - type: manhattan_ap value: 86.02916327562438 - type: manhattan_f1 value: 78.02063045516843 - type: manhattan_precision value: 73.38851947346994 - type: manhattan_recall value: 83.2768709578072 - type: max_accuracy value: 88.97620988085536 - type: max_ap value: 86.08681215460771 - type: max_f1 value: 78.02793637114438 --- <!-- TODO: add evaluation results here --> <br><br> <p align="center"> <img src="https://huggingface.co/datasets/jinaai/documentation-images/resolve/main/logo.webp" alt="Jina AI: Your Search Foundation, Supercharged!" width="150px"> </p> <p align="center"> <b>The text embedding set trained by <a href="https://jina.ai/"><b>Jina AI</b></a>.</b> </p> ## Quick Start The easiest way to starting using `jina-embeddings-v2-base-en` is to use Jina AI's [Embedding API](https://jina.ai/embeddings/). ## Intended Usage & Model Info `jina-embeddings-v2-base-en` is an English, monolingual **embedding model** supporting **8192 sequence length**. It is based on a BERT architecture (JinaBERT) that supports the symmetric bidirectional variant of [ALiBi](https://arxiv.org/abs/2108.12409) to allow longer sequence length. The backbone `jina-bert-v2-base-en` is pretrained on the C4 dataset. The model is further trained on Jina AI's collection of more than 400 millions of sentence pairs and hard negatives. These pairs were obtained from various domains and were carefully selected through a thorough cleaning process. The embedding model was trained using 512 sequence length, but extrapolates to 8k sequence length (or even longer) thanks to ALiBi. This makes our model useful for a range of use cases, especially when processing long documents is needed, including long document retrieval, semantic textual similarity, text reranking, recommendation, RAG and LLM-based generative search, etc. With a standard size of 137 million parameters, the model enables fast inference while delivering better performance than our small model. It is recommended to use a single GPU for inference. Additionally, we provide the following embedding models: - [`jina-embeddings-v2-small-en`](https://huggingface.co/jinaai/jina-embeddings-v2-small-en): 33 million parameters. - [`jina-embeddings-v2-base-en`](https://huggingface.co/jinaai/jina-embeddings-v2-base-en): 137 million parameters **(you are here)**. - [`jina-embeddings-v2-base-zh`](https://huggingface.co/jinaai/jina-embeddings-v2-base-zh): Chinese-English Bilingual embeddings. - [`jina-embeddings-v2-base-de`](https://huggingface.co/jinaai/jina-embeddings-v2-base-de): German-English Bilingual embeddings. - [`jina-embeddings-v2-base-es`](https://huggingface.co/jinaai/jina-embeddings-v2-base-es): Spanish-English Bilingual embeddings. ## Data & Parameters Jina Embeddings V2 [technical report](https://arxiv.org/abs/2310.19923) ## Usage **<details><summary>Please apply mean pooling when integrating the model.</summary>** <p> ### Why mean pooling? `mean poooling` takes all token embeddings from model output and averaging them at sentence/paragraph level. It has been proved to be the most effective way to produce high-quality sentence embeddings. We offer an `encode` function to deal with this. However, if you would like to do it without using the default `encode` function: ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) sentences = ['How is the weather today?', 'What is the current weather like today?'] tokenizer = AutoTokenizer.from_pretrained('jinaai/jina-embeddings-v2-small-en') model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-small-en', trust_remote_code=True) encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') with torch.no_grad(): model_output = model(**encoded_input) embeddings = mean_pooling(model_output, encoded_input['attention_mask']) embeddings = F.normalize(embeddings, p=2, dim=1) ``` </p> </details> You can use Jina Embedding models directly from transformers package. ```python !pip install transformers from transformers import AutoModel from numpy.linalg import norm cos_sim = lambda a,b: (a @ b.T) / (norm(a)*norm(b)) model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-base-en', trust_remote_code=True) # trust_remote_code is needed to use the encode method embeddings = model.encode(['How is the weather today?', 'What is the current weather like today?']) print(cos_sim(embeddings[0], embeddings[1])) ``` If you only want to handle shorter sequence, such as 2k, pass the `max_length` parameter to the `encode` function: ```python embeddings = model.encode( ['Very long ... document'], max_length=2048 ) ``` Using the its latest release (v2.3.0) sentence-transformers also supports Jina embeddings (Please make sure that you are logged into huggingface as well): ```python !pip install -U sentence-transformers from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim model = SentenceTransformer( "jinaai/jina-embeddings-v2-base-en", # switch to en/zh for English or Chinese trust_remote_code=True ) # control your input sequence length up to 8192 model.max_seq_length = 1024 embeddings = model.encode([ 'How is the weather today?', 'What is the current weather like today?' ]) print(cos_sim(embeddings[0], embeddings[1])) ``` ## Alternatives to Using Transformers (or SentencTransformers) Package 1. _Managed SaaS_: Get started with a free key on Jina AI's [Embedding API](https://jina.ai/embeddings/). 2. _Private and high-performance deployment_: Get started by picking from our suite of models and deploy them on [AWS Sagemaker](https://aws.amazon.com/marketplace/seller-profile?id=seller-stch2ludm6vgy). ## Use Jina Embeddings for RAG According to the latest blog post from [LLamaIndex](https://blog.llamaindex.ai/boosting-rag-picking-the-best-embedding-reranker-models-42d079022e83), > In summary, to achieve the peak performance in both hit rate and MRR, the combination of OpenAI or JinaAI-Base embeddings with the CohereRerank/bge-reranker-large reranker stands out. <img src="https://miro.medium.com/v2/resize:fit:4800/format:webp/1*ZP2RVejCZovF3FDCg-Bx3A.png" width="780px"> ## Plans 1. Bilingual embedding models supporting more European & Asian languages, including Spanish, French, Italian and Japanese. 2. Multimodal embedding models enable Multimodal RAG applications. 3. High-performt rerankers. ## Trouble Shooting **Loading of Model Code failed** If you forgot to pass the `trust_remote_code=True` flag when calling `AutoModel.from_pretrained` or initializing the model via the `SentenceTransformer` class, you will receive an error that the model weights could not be initialized. This is caused by tranformers falling back to creating a default BERT model, instead of a jina-embedding model: ```bash Some weights of the model checkpoint at jinaai/jina-embeddings-v2-base-en were not used when initializing BertModel: ['encoder.layer.2.mlp.layernorm.weight', 'encoder.layer.3.mlp.layernorm.weight', 'encoder.layer.10.mlp.wo.bias', 'encoder.layer.5.mlp.wo.bias', 'encoder.layer.2.mlp.layernorm.bias', 'encoder.layer.1.mlp.gated_layers.weight', 'encoder.layer.5.mlp.gated_layers.weight', 'encoder.layer.8.mlp.layernorm.bias', ... ``` **User is not logged into Huggingface** The model is only availabe under [gated access](https://huggingface.co/docs/hub/models-gated). This means you need to be logged into huggingface load load it. If you receive the following error, you need to provide an access token, either by using the huggingface-cli or providing the token via an environment variable as described above: ```bash OSError: jinaai/jina-embeddings-v2-base-en is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' If this is a private repository, make sure to pass a token having permission to this repo with `use_auth_token` or log in with `huggingface-cli login` and pass `use_auth_token=True`. ``` ## Contact Join our [Discord community](https://discord.jina.ai) and chat with other community members about ideas. ## Citation If you find Jina Embeddings useful in your research, please cite the following paper: ``` @misc{günther2023jina, title={Jina Embeddings 2: 8192-Token General-Purpose Text Embeddings for Long Documents}, author={Michael Günther and Jackmin Ong and Isabelle Mohr and Alaeddine Abdessalem and Tanguy Abel and Mohammad Kalim Akram and Susana Guzman and Georgios Mastrapas and Saba Sturua and Bo Wang and Maximilian Werk and Nan Wang and Han Xiao}, year={2023}, eprint={2310.19923}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "BIOSSES", "SCIFACT" ]
EleutherAI/gpt-neo-1.3B
EleutherAI
text-generation
[ "transformers", "pytorch", "jax", "rust", "safetensors", "gpt_neo", "text-generation", "text generation", "causal-lm", "en", "dataset:EleutherAI/pile", "arxiv:2101.00027", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2022-03-02T23:29:04Z"
2024-01-31T20:30:21+00:00
221,024
291
--- datasets: - EleutherAI/pile language: - en license: mit tags: - text generation - pytorch - causal-lm --- # GPT-Neo 1.3B ## Model Description GPT-Neo 1.3B is a transformer model designed using EleutherAI's replication of the GPT-3 architecture. GPT-Neo refers to the class of models, while 1.3B represents the number of parameters of this particular pre-trained model. ## Training data GPT-Neo 1.3B was trained on the Pile, a large scale curated dataset created by EleutherAI for the purpose of training this model. ## Training procedure This model was trained on the Pile for 380 billion tokens over 362,000 steps. It was trained as a masked autoregressive language model, using cross-entropy loss. ## Intended Use and Limitations This way, the model learns an inner representation of the English language that can then be used to extract features useful for downstream tasks. The model is best at what it was pretrained for however, which is generating texts from a prompt. ### How to use You can use this model directly with a pipeline for text generation. This example generates a different sequence each time it's run: ```py >>> from transformers import pipeline >>> generator = pipeline('text-generation', model='EleutherAI/gpt-neo-1.3B') >>> generator("EleutherAI has", do_sample=True, min_length=50) [{'generated_text': 'EleutherAI has made a commitment to create new software packages for each of its major clients and has'}] ``` ### Limitations and Biases GPT-Neo was trained as an autoregressive language model. This means that its core functionality is taking a string of text and predicting the next token. While language models are widely used for tasks other than this, there are a lot of unknowns with this work. GPT-Neo was trained on the Pile, a dataset known to contain profanity, lewd, and otherwise abrasive language. Depending on your usecase GPT-Neo may produce socially unacceptable text. See Sections 5 and 6 of the Pile paper for a more detailed analysis of the biases in the Pile. As with all language models, it is hard to predict in advance how GPT-Neo will respond to particular prompts and offensive content may occur without warning. We recommend having a human curate or filter the outputs before releasing them, both to censor undesirable content and to improve the quality of the results. ## Eval results ### Linguistic Reasoning | Model and Size | Pile BPB | Pile PPL | Wikitext PPL | Lambada PPL | Lambada Acc | Winogrande | Hellaswag | | ---------------- | ---------- | ---------- | ------------- | ----------- | ----------- | ---------- | ----------- | | **GPT-Neo 1.3B** | **0.7527** | **6.159** | **13.10** | **7.498** | **57.23%** | **55.01%** | **38.66%** | | GPT-2 1.5B | 1.0468 | ----- | 17.48 | 10.634 | 51.21% | 59.40% | 40.03% | | GPT-Neo 2.7B | 0.7165 | 5.646 | 11.39 | 5.626 | 62.22% | 56.50% | 42.73% | | GPT-3 Ada | 0.9631 | ----- | ----- | 9.954 | 51.60% | 52.90% | 35.93% | ### Physical and Scientific Reasoning | Model and Size | MathQA | PubMedQA | Piqa | | ---------------- | ---------- | ---------- | ----------- | | **GPT-Neo 1.3B** | **24.05%** | **54.40%** | **71.11%** | | GPT-2 1.5B | 23.64% | 58.33% | 70.78% | | GPT-Neo 2.7B | 24.72% | 57.54% | 72.14% | | GPT-3 Ada | 24.29% | 52.80% | 68.88% | ### Down-Stream Applications TBD ### BibTeX entry and citation info To cite this model, please use ```bibtex @software{gpt-neo, author = {Black, Sid and Leo, Gao and Wang, Phil and Leahy, Connor and Biderman, Stella}, title = {{GPT-Neo: Large Scale Autoregressive Language Modeling with Mesh-Tensorflow}}, month = mar, year = 2021, note = {{If you use this software, please cite it using these metadata.}}, publisher = {Zenodo}, version = {1.0}, doi = {10.5281/zenodo.5297715}, url = {https://doi.org/10.5281/zenodo.5297715} } @article{gao2020pile, title={The Pile: An 800GB Dataset of Diverse Text for Language Modeling}, author={Gao, Leo and Biderman, Stella and Black, Sid and Golding, Laurence and Hoppe, Travis and Foster, Charles and Phang, Jason and He, Horace and Thite, Anish and Nabeshima, Noa and others}, journal={arXiv preprint arXiv:2101.00027}, year={2020} } ``` # [Open LLM Leaderboard Evaluation Results](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard) Detailed results can be found [here](https://huggingface.co/datasets/open-llm-leaderboard/details_EleutherAI__gpt-neo-1.3B) | Metric | Value | |-----------------------|---------------------------| | Avg. | 29.44 | | ARC (25-shot) | 31.23 | | HellaSwag (10-shot) | 48.47 | | MMLU (5-shot) | 24.82 | | TruthfulQA (0-shot) | 39.63 | | Winogrande (5-shot) | 56.91 | | GSM8K (5-shot) | 0.45 | | DROP (3-shot) | 4.6 |
[ "PUBMEDQA" ]
BAAI/bge-large-zh-v1.5
BAAI
feature-extraction
[ "sentence-transformers", "pytorch", "bert", "feature-extraction", "sentence-similarity", "transformers", "zh", "arxiv:2401.03462", "arxiv:2312.15503", "arxiv:2311.13534", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-09-12T05:22:11Z"
2024-04-02T14:00:04+00:00
210,152
490
--- language: - zh license: mit tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers --- <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> For more details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). If you are looking for a model that supports more languages, longer texts, and other retrieval methods, you can try using [bge-m3](https://huggingface.co/BAAI/bge-m3). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding focuses on retrieval-augmented LLMs, consisting of the following projects currently: - **Long-Context LLM**: [Activation Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon) - **Fine-tuning of LM** : [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail) - **Dense Retrieval**: [BGE-M3](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3), [LLM Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), [BGE Embedding](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/baai_general_embedding) - **Reranker Model**: [BGE Reranker](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) - **Benchmark**: [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) ## News - 1/30/2024: Release **BGE-M3**, a new member to BGE model series! M3 stands for **M**ulti-linguality (100+ languages), **M**ulti-granularities (input length up to 8192), **M**ulti-Functionality (unification of dense, lexical, multi-vec/colbert retrieval). It is the first embedding model which supports all three retrieval methods, achieving new SOTA on multi-lingual (MIRACL) and cross-lingual (MKQA) benchmarks. [Technical Report](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/BGE_M3/BGE_M3.pdf) and [Code](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3). :fire: - 1/9/2024: Release [Activation-Beacon](https://github.com/FlagOpen/FlagEmbedding/tree/master/Long_LLM/activation_beacon), an effective, efficient, compatible, and low-cost (training) method to extend the context length of LLM. [Technical Report](https://arxiv.org/abs/2401.03462) :fire: - 12/24/2023: Release **LLaRA**, a LLaMA-7B based dense retriever, leading to state-of-the-art performances on MS MARCO and BEIR. Model and code will be open-sourced. Please stay tuned. [Technical Report](https://arxiv.org/abs/2312.15503) :fire: - 11/23/2023: Release [LM-Cocktail](https://github.com/FlagOpen/FlagEmbedding/tree/master/LM_Cocktail), a method to maintain general capabilities during fine-tuning by merging multiple language models. [Technical Report](https://arxiv.org/abs/2311.13534) :fire: - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Technical Report](https://arxiv.org/pdf/2310.07554.pdf) - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) and [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3#usage) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3) | Multi-Functionality(dense retrieval, sparse retrieval, multi-vector(colbert)), Multi-Linguality, and Multi-Granularity(8192 tokens) | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR" ]
intfloat/e5-small-v2
intfloat
sentence-similarity
[ "sentence-transformers", "pytorch", "tf", "onnx", "safetensors", "openvino", "bert", "mteb", "Sentence Transformers", "sentence-similarity", "en", "arxiv:2212.03533", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-05-19T06:45:35Z"
2025-02-17T03:24:44+00:00
209,296
85
--- language: - en license: mit tags: - mteb - Sentence Transformers - sentence-similarity - sentence-transformers model-index: - name: e5-small-v2 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 77.59701492537313 - type: ap value: 41.67064885731708 - type: f1 value: 71.86465946398573 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.265875 - type: ap value: 87.67633085349644 - type: f1 value: 91.24297521425744 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 45.882000000000005 - type: f1 value: 45.08058870381236 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 20.697 - type: map_at_10 value: 33.975 - type: map_at_100 value: 35.223 - type: map_at_1000 value: 35.260000000000005 - type: map_at_3 value: 29.776999999999997 - type: map_at_5 value: 32.035000000000004 - type: mrr_at_1 value: 20.982 - type: mrr_at_10 value: 34.094 - type: mrr_at_100 value: 35.343 - type: mrr_at_1000 value: 35.38 - type: mrr_at_3 value: 29.884 - type: mrr_at_5 value: 32.141999999999996 - type: ndcg_at_1 value: 20.697 - type: ndcg_at_10 value: 41.668 - type: ndcg_at_100 value: 47.397 - type: ndcg_at_1000 value: 48.305 - type: ndcg_at_3 value: 32.928000000000004 - type: ndcg_at_5 value: 36.998999999999995 - type: precision_at_1 value: 20.697 - type: precision_at_10 value: 6.636 - type: precision_at_100 value: 0.924 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 14.035 - type: precision_at_5 value: 10.398 - type: recall_at_1 value: 20.697 - type: recall_at_10 value: 66.35799999999999 - type: recall_at_100 value: 92.39 - type: recall_at_1000 value: 99.36 - type: recall_at_3 value: 42.105 - type: recall_at_5 value: 51.991 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 42.1169517447068 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 34.79553720107097 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 58.10811337308168 - type: mrr value: 71.56410763751482 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 78.46834918248696 - type: cos_sim_spearman value: 79.4289182755206 - type: euclidean_pearson value: 76.26662973727008 - type: euclidean_spearman value: 78.11744260952536 - type: manhattan_pearson value: 76.08175262609434 - type: manhattan_spearman value: 78.29395265552289 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 81.63636363636364 - type: f1 value: 81.55779952376953 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.88541137137571 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 30.05205685274407 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 30.293999999999997 - type: map_at_10 value: 39.876 - type: map_at_100 value: 41.315000000000005 - type: map_at_1000 value: 41.451 - type: map_at_3 value: 37.194 - type: map_at_5 value: 38.728 - type: mrr_at_1 value: 37.053000000000004 - type: mrr_at_10 value: 45.281 - type: mrr_at_100 value: 46.188 - type: mrr_at_1000 value: 46.245999999999995 - type: mrr_at_3 value: 43.228 - type: mrr_at_5 value: 44.366 - type: ndcg_at_1 value: 37.053000000000004 - type: ndcg_at_10 value: 45.086 - type: ndcg_at_100 value: 50.756 - type: ndcg_at_1000 value: 53.123 - type: ndcg_at_3 value: 41.416 - type: ndcg_at_5 value: 43.098 - type: precision_at_1 value: 37.053000000000004 - type: precision_at_10 value: 8.34 - type: precision_at_100 value: 1.346 - type: precision_at_1000 value: 0.186 - type: precision_at_3 value: 19.647000000000002 - type: precision_at_5 value: 13.877 - type: recall_at_1 value: 30.293999999999997 - type: recall_at_10 value: 54.309 - type: recall_at_100 value: 78.59 - type: recall_at_1000 value: 93.82300000000001 - type: recall_at_3 value: 43.168 - type: recall_at_5 value: 48.192 - type: map_at_1 value: 28.738000000000003 - type: map_at_10 value: 36.925999999999995 - type: map_at_100 value: 38.017 - type: map_at_1000 value: 38.144 - type: map_at_3 value: 34.446 - type: map_at_5 value: 35.704 - type: mrr_at_1 value: 35.478 - type: mrr_at_10 value: 42.786 - type: mrr_at_100 value: 43.458999999999996 - type: mrr_at_1000 value: 43.507 - type: mrr_at_3 value: 40.648 - type: mrr_at_5 value: 41.804 - type: ndcg_at_1 value: 35.478 - type: ndcg_at_10 value: 42.044 - type: ndcg_at_100 value: 46.249 - type: ndcg_at_1000 value: 48.44 - type: ndcg_at_3 value: 38.314 - type: ndcg_at_5 value: 39.798 - type: precision_at_1 value: 35.478 - type: precision_at_10 value: 7.764 - type: precision_at_100 value: 1.253 - type: precision_at_1000 value: 0.174 - type: precision_at_3 value: 18.047 - type: precision_at_5 value: 12.637 - type: recall_at_1 value: 28.738000000000003 - type: recall_at_10 value: 50.659 - type: recall_at_100 value: 68.76299999999999 - type: recall_at_1000 value: 82.811 - type: recall_at_3 value: 39.536 - type: recall_at_5 value: 43.763999999999996 - type: map_at_1 value: 38.565 - type: map_at_10 value: 50.168 - type: map_at_100 value: 51.11 - type: map_at_1000 value: 51.173 - type: map_at_3 value: 47.044000000000004 - type: map_at_5 value: 48.838 - type: mrr_at_1 value: 44.201 - type: mrr_at_10 value: 53.596999999999994 - type: mrr_at_100 value: 54.211 - type: mrr_at_1000 value: 54.247 - type: mrr_at_3 value: 51.202000000000005 - type: mrr_at_5 value: 52.608999999999995 - type: ndcg_at_1 value: 44.201 - type: ndcg_at_10 value: 55.694 - type: ndcg_at_100 value: 59.518 - type: ndcg_at_1000 value: 60.907 - type: ndcg_at_3 value: 50.395999999999994 - type: ndcg_at_5 value: 53.022999999999996 - type: precision_at_1 value: 44.201 - type: precision_at_10 value: 8.84 - type: precision_at_100 value: 1.162 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 22.153 - type: precision_at_5 value: 15.260000000000002 - type: recall_at_1 value: 38.565 - type: recall_at_10 value: 68.65 - type: recall_at_100 value: 85.37400000000001 - type: recall_at_1000 value: 95.37400000000001 - type: recall_at_3 value: 54.645999999999994 - type: recall_at_5 value: 60.958 - type: map_at_1 value: 23.945 - type: map_at_10 value: 30.641000000000002 - type: map_at_100 value: 31.599 - type: map_at_1000 value: 31.691000000000003 - type: map_at_3 value: 28.405 - type: map_at_5 value: 29.704000000000004 - type: mrr_at_1 value: 25.537 - type: mrr_at_10 value: 32.22 - type: mrr_at_100 value: 33.138 - type: mrr_at_1000 value: 33.214 - type: mrr_at_3 value: 30.151 - type: mrr_at_5 value: 31.298 - type: ndcg_at_1 value: 25.537 - type: ndcg_at_10 value: 34.638000000000005 - type: ndcg_at_100 value: 39.486 - type: ndcg_at_1000 value: 41.936 - type: ndcg_at_3 value: 30.333 - type: ndcg_at_5 value: 32.482 - type: precision_at_1 value: 25.537 - type: precision_at_10 value: 5.153 - type: precision_at_100 value: 0.7929999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 12.429 - type: precision_at_5 value: 8.723 - type: recall_at_1 value: 23.945 - type: recall_at_10 value: 45.412 - type: recall_at_100 value: 67.836 - type: recall_at_1000 value: 86.467 - type: recall_at_3 value: 34.031 - type: recall_at_5 value: 39.039 - type: map_at_1 value: 14.419 - type: map_at_10 value: 20.858999999999998 - type: map_at_100 value: 22.067999999999998 - type: map_at_1000 value: 22.192 - type: map_at_3 value: 18.673000000000002 - type: map_at_5 value: 19.968 - type: mrr_at_1 value: 17.785999999999998 - type: mrr_at_10 value: 24.878 - type: mrr_at_100 value: 26.021 - type: mrr_at_1000 value: 26.095000000000002 - type: mrr_at_3 value: 22.616 - type: mrr_at_5 value: 23.785 - type: ndcg_at_1 value: 17.785999999999998 - type: ndcg_at_10 value: 25.153 - type: ndcg_at_100 value: 31.05 - type: ndcg_at_1000 value: 34.052 - type: ndcg_at_3 value: 21.117 - type: ndcg_at_5 value: 23.048 - type: precision_at_1 value: 17.785999999999998 - type: precision_at_10 value: 4.590000000000001 - type: precision_at_100 value: 0.864 - type: precision_at_1000 value: 0.125 - type: precision_at_3 value: 9.908999999999999 - type: precision_at_5 value: 7.313 - type: recall_at_1 value: 14.419 - type: recall_at_10 value: 34.477999999999994 - type: recall_at_100 value: 60.02499999999999 - type: recall_at_1000 value: 81.646 - type: recall_at_3 value: 23.515 - type: recall_at_5 value: 28.266999999999996 - type: map_at_1 value: 26.268 - type: map_at_10 value: 35.114000000000004 - type: map_at_100 value: 36.212 - type: map_at_1000 value: 36.333 - type: map_at_3 value: 32.436 - type: map_at_5 value: 33.992 - type: mrr_at_1 value: 31.761 - type: mrr_at_10 value: 40.355999999999995 - type: mrr_at_100 value: 41.125 - type: mrr_at_1000 value: 41.186 - type: mrr_at_3 value: 37.937 - type: mrr_at_5 value: 39.463 - type: ndcg_at_1 value: 31.761 - type: ndcg_at_10 value: 40.422000000000004 - type: ndcg_at_100 value: 45.458999999999996 - type: ndcg_at_1000 value: 47.951 - type: ndcg_at_3 value: 35.972 - type: ndcg_at_5 value: 38.272 - type: precision_at_1 value: 31.761 - type: precision_at_10 value: 7.103 - type: precision_at_100 value: 1.133 - type: precision_at_1000 value: 0.152 - type: precision_at_3 value: 16.779 - type: precision_at_5 value: 11.877 - type: recall_at_1 value: 26.268 - type: recall_at_10 value: 51.053000000000004 - type: recall_at_100 value: 72.702 - type: recall_at_1000 value: 89.521 - type: recall_at_3 value: 38.619 - type: recall_at_5 value: 44.671 - type: map_at_1 value: 25.230999999999998 - type: map_at_10 value: 34.227000000000004 - type: map_at_100 value: 35.370000000000005 - type: map_at_1000 value: 35.488 - type: map_at_3 value: 31.496000000000002 - type: map_at_5 value: 33.034 - type: mrr_at_1 value: 30.822 - type: mrr_at_10 value: 39.045 - type: mrr_at_100 value: 39.809 - type: mrr_at_1000 value: 39.873 - type: mrr_at_3 value: 36.663000000000004 - type: mrr_at_5 value: 37.964 - type: ndcg_at_1 value: 30.822 - type: ndcg_at_10 value: 39.472 - type: ndcg_at_100 value: 44.574999999999996 - type: ndcg_at_1000 value: 47.162 - type: ndcg_at_3 value: 34.929 - type: ndcg_at_5 value: 37.002 - type: precision_at_1 value: 30.822 - type: precision_at_10 value: 7.055 - type: precision_at_100 value: 1.124 - type: precision_at_1000 value: 0.152 - type: precision_at_3 value: 16.591 - type: precision_at_5 value: 11.667 - type: recall_at_1 value: 25.230999999999998 - type: recall_at_10 value: 50.42100000000001 - type: recall_at_100 value: 72.685 - type: recall_at_1000 value: 90.469 - type: recall_at_3 value: 37.503 - type: recall_at_5 value: 43.123 - type: map_at_1 value: 24.604166666666664 - type: map_at_10 value: 32.427166666666665 - type: map_at_100 value: 33.51474999999999 - type: map_at_1000 value: 33.6345 - type: map_at_3 value: 30.02366666666667 - type: map_at_5 value: 31.382333333333328 - type: mrr_at_1 value: 29.001166666666666 - type: mrr_at_10 value: 36.3315 - type: mrr_at_100 value: 37.16683333333333 - type: mrr_at_1000 value: 37.23341666666668 - type: mrr_at_3 value: 34.19916666666667 - type: mrr_at_5 value: 35.40458333333334 - type: ndcg_at_1 value: 29.001166666666666 - type: ndcg_at_10 value: 37.06883333333334 - type: ndcg_at_100 value: 41.95816666666666 - type: ndcg_at_1000 value: 44.501583333333336 - type: ndcg_at_3 value: 32.973499999999994 - type: ndcg_at_5 value: 34.90833333333334 - type: precision_at_1 value: 29.001166666666666 - type: precision_at_10 value: 6.336 - type: precision_at_100 value: 1.0282499999999999 - type: precision_at_1000 value: 0.14391666666666664 - type: precision_at_3 value: 14.932499999999996 - type: precision_at_5 value: 10.50825 - type: recall_at_1 value: 24.604166666666664 - type: recall_at_10 value: 46.9525 - type: recall_at_100 value: 68.67816666666667 - type: recall_at_1000 value: 86.59783333333334 - type: recall_at_3 value: 35.49783333333333 - type: recall_at_5 value: 40.52525000000001 - type: map_at_1 value: 23.559 - type: map_at_10 value: 29.023 - type: map_at_100 value: 29.818 - type: map_at_1000 value: 29.909000000000002 - type: map_at_3 value: 27.037 - type: map_at_5 value: 28.225 - type: mrr_at_1 value: 26.994 - type: mrr_at_10 value: 31.962000000000003 - type: mrr_at_100 value: 32.726 - type: mrr_at_1000 value: 32.800000000000004 - type: mrr_at_3 value: 30.266 - type: mrr_at_5 value: 31.208999999999996 - type: ndcg_at_1 value: 26.994 - type: ndcg_at_10 value: 32.53 - type: ndcg_at_100 value: 36.758 - type: ndcg_at_1000 value: 39.362 - type: ndcg_at_3 value: 28.985 - type: ndcg_at_5 value: 30.757 - type: precision_at_1 value: 26.994 - type: precision_at_10 value: 4.968999999999999 - type: precision_at_100 value: 0.759 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 12.219 - type: precision_at_5 value: 8.527999999999999 - type: recall_at_1 value: 23.559 - type: recall_at_10 value: 40.585 - type: recall_at_100 value: 60.306000000000004 - type: recall_at_1000 value: 80.11 - type: recall_at_3 value: 30.794 - type: recall_at_5 value: 35.186 - type: map_at_1 value: 16.384999999999998 - type: map_at_10 value: 22.142 - type: map_at_100 value: 23.057 - type: map_at_1000 value: 23.177 - type: map_at_3 value: 20.29 - type: map_at_5 value: 21.332 - type: mrr_at_1 value: 19.89 - type: mrr_at_10 value: 25.771 - type: mrr_at_100 value: 26.599 - type: mrr_at_1000 value: 26.680999999999997 - type: mrr_at_3 value: 23.962 - type: mrr_at_5 value: 24.934 - type: ndcg_at_1 value: 19.89 - type: ndcg_at_10 value: 25.97 - type: ndcg_at_100 value: 30.605 - type: ndcg_at_1000 value: 33.619 - type: ndcg_at_3 value: 22.704 - type: ndcg_at_5 value: 24.199 - type: precision_at_1 value: 19.89 - type: precision_at_10 value: 4.553 - type: precision_at_100 value: 0.8049999999999999 - type: precision_at_1000 value: 0.122 - type: precision_at_3 value: 10.541 - type: precision_at_5 value: 7.46 - type: recall_at_1 value: 16.384999999999998 - type: recall_at_10 value: 34.001 - type: recall_at_100 value: 55.17100000000001 - type: recall_at_1000 value: 77.125 - type: recall_at_3 value: 24.618000000000002 - type: recall_at_5 value: 28.695999999999998 - type: map_at_1 value: 23.726 - type: map_at_10 value: 31.227 - type: map_at_100 value: 32.311 - type: map_at_1000 value: 32.419 - type: map_at_3 value: 28.765 - type: map_at_5 value: 30.229 - type: mrr_at_1 value: 27.705000000000002 - type: mrr_at_10 value: 35.085 - type: mrr_at_100 value: 35.931000000000004 - type: mrr_at_1000 value: 36 - type: mrr_at_3 value: 32.603 - type: mrr_at_5 value: 34.117999999999995 - type: ndcg_at_1 value: 27.705000000000002 - type: ndcg_at_10 value: 35.968 - type: ndcg_at_100 value: 41.197 - type: ndcg_at_1000 value: 43.76 - type: ndcg_at_3 value: 31.304 - type: ndcg_at_5 value: 33.661 - type: precision_at_1 value: 27.705000000000002 - type: precision_at_10 value: 5.942 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.13 - type: precision_at_3 value: 13.868 - type: precision_at_5 value: 9.944 - type: recall_at_1 value: 23.726 - type: recall_at_10 value: 46.786 - type: recall_at_100 value: 70.072 - type: recall_at_1000 value: 88.2 - type: recall_at_3 value: 33.981 - type: recall_at_5 value: 39.893 - type: map_at_1 value: 23.344 - type: map_at_10 value: 31.636999999999997 - type: map_at_100 value: 33.065 - type: map_at_1000 value: 33.300000000000004 - type: map_at_3 value: 29.351 - type: map_at_5 value: 30.432 - type: mrr_at_1 value: 27.866000000000003 - type: mrr_at_10 value: 35.587 - type: mrr_at_100 value: 36.52 - type: mrr_at_1000 value: 36.597 - type: mrr_at_3 value: 33.696 - type: mrr_at_5 value: 34.713 - type: ndcg_at_1 value: 27.866000000000003 - type: ndcg_at_10 value: 36.61 - type: ndcg_at_100 value: 41.88 - type: ndcg_at_1000 value: 45.105000000000004 - type: ndcg_at_3 value: 33.038000000000004 - type: ndcg_at_5 value: 34.331 - type: precision_at_1 value: 27.866000000000003 - type: precision_at_10 value: 6.917 - type: precision_at_100 value: 1.3599999999999999 - type: precision_at_1000 value: 0.233 - type: precision_at_3 value: 15.547 - type: precision_at_5 value: 10.791 - type: recall_at_1 value: 23.344 - type: recall_at_10 value: 45.782000000000004 - type: recall_at_100 value: 69.503 - type: recall_at_1000 value: 90.742 - type: recall_at_3 value: 35.160000000000004 - type: recall_at_5 value: 39.058 - type: map_at_1 value: 20.776 - type: map_at_10 value: 27.285999999999998 - type: map_at_100 value: 28.235 - type: map_at_1000 value: 28.337 - type: map_at_3 value: 25.147000000000002 - type: map_at_5 value: 26.401999999999997 - type: mrr_at_1 value: 22.921 - type: mrr_at_10 value: 29.409999999999997 - type: mrr_at_100 value: 30.275000000000002 - type: mrr_at_1000 value: 30.354999999999997 - type: mrr_at_3 value: 27.418 - type: mrr_at_5 value: 28.592000000000002 - type: ndcg_at_1 value: 22.921 - type: ndcg_at_10 value: 31.239 - type: ndcg_at_100 value: 35.965 - type: ndcg_at_1000 value: 38.602 - type: ndcg_at_3 value: 27.174 - type: ndcg_at_5 value: 29.229 - type: precision_at_1 value: 22.921 - type: precision_at_10 value: 4.806 - type: precision_at_100 value: 0.776 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 11.459999999999999 - type: precision_at_5 value: 8.022 - type: recall_at_1 value: 20.776 - type: recall_at_10 value: 41.294 - type: recall_at_100 value: 63.111 - type: recall_at_1000 value: 82.88600000000001 - type: recall_at_3 value: 30.403000000000002 - type: recall_at_5 value: 35.455999999999996 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 9.376 - type: map_at_10 value: 15.926000000000002 - type: map_at_100 value: 17.585 - type: map_at_1000 value: 17.776 - type: map_at_3 value: 13.014000000000001 - type: map_at_5 value: 14.417 - type: mrr_at_1 value: 20.195 - type: mrr_at_10 value: 29.95 - type: mrr_at_100 value: 31.052000000000003 - type: mrr_at_1000 value: 31.108000000000004 - type: mrr_at_3 value: 26.667 - type: mrr_at_5 value: 28.458 - type: ndcg_at_1 value: 20.195 - type: ndcg_at_10 value: 22.871 - type: ndcg_at_100 value: 29.921999999999997 - type: ndcg_at_1000 value: 33.672999999999995 - type: ndcg_at_3 value: 17.782999999999998 - type: ndcg_at_5 value: 19.544 - type: precision_at_1 value: 20.195 - type: precision_at_10 value: 7.394 - type: precision_at_100 value: 1.493 - type: precision_at_1000 value: 0.218 - type: precision_at_3 value: 13.073 - type: precision_at_5 value: 10.436 - type: recall_at_1 value: 9.376 - type: recall_at_10 value: 28.544999999999998 - type: recall_at_100 value: 53.147999999999996 - type: recall_at_1000 value: 74.62 - type: recall_at_3 value: 16.464000000000002 - type: recall_at_5 value: 21.004 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.415000000000001 - type: map_at_10 value: 18.738 - type: map_at_100 value: 27.291999999999998 - type: map_at_1000 value: 28.992 - type: map_at_3 value: 13.196 - type: map_at_5 value: 15.539 - type: mrr_at_1 value: 66.5 - type: mrr_at_10 value: 74.518 - type: mrr_at_100 value: 74.86 - type: mrr_at_1000 value: 74.87 - type: mrr_at_3 value: 72.375 - type: mrr_at_5 value: 73.86200000000001 - type: ndcg_at_1 value: 54.37499999999999 - type: ndcg_at_10 value: 41.317 - type: ndcg_at_100 value: 45.845 - type: ndcg_at_1000 value: 52.92 - type: ndcg_at_3 value: 44.983000000000004 - type: ndcg_at_5 value: 42.989 - type: precision_at_1 value: 66.5 - type: precision_at_10 value: 33.6 - type: precision_at_100 value: 10.972999999999999 - type: precision_at_1000 value: 2.214 - type: precision_at_3 value: 48.583 - type: precision_at_5 value: 42.15 - type: recall_at_1 value: 8.415000000000001 - type: recall_at_10 value: 24.953 - type: recall_at_100 value: 52.48199999999999 - type: recall_at_1000 value: 75.093 - type: recall_at_3 value: 14.341000000000001 - type: recall_at_5 value: 18.468 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 47.06499999999999 - type: f1 value: 41.439327599975385 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 66.02 - type: map_at_10 value: 76.68599999999999 - type: map_at_100 value: 76.959 - type: map_at_1000 value: 76.972 - type: map_at_3 value: 75.024 - type: map_at_5 value: 76.153 - type: mrr_at_1 value: 71.197 - type: mrr_at_10 value: 81.105 - type: mrr_at_100 value: 81.232 - type: mrr_at_1000 value: 81.233 - type: mrr_at_3 value: 79.758 - type: mrr_at_5 value: 80.69 - type: ndcg_at_1 value: 71.197 - type: ndcg_at_10 value: 81.644 - type: ndcg_at_100 value: 82.645 - type: ndcg_at_1000 value: 82.879 - type: ndcg_at_3 value: 78.792 - type: ndcg_at_5 value: 80.528 - type: precision_at_1 value: 71.197 - type: precision_at_10 value: 10.206999999999999 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 30.868000000000002 - type: precision_at_5 value: 19.559 - type: recall_at_1 value: 66.02 - type: recall_at_10 value: 92.50699999999999 - type: recall_at_100 value: 96.497 - type: recall_at_1000 value: 97.956 - type: recall_at_3 value: 84.866 - type: recall_at_5 value: 89.16199999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 17.948 - type: map_at_10 value: 29.833 - type: map_at_100 value: 31.487 - type: map_at_1000 value: 31.674000000000003 - type: map_at_3 value: 26.029999999999998 - type: map_at_5 value: 28.038999999999998 - type: mrr_at_1 value: 34.721999999999994 - type: mrr_at_10 value: 44.214999999999996 - type: mrr_at_100 value: 44.994 - type: mrr_at_1000 value: 45.051 - type: mrr_at_3 value: 41.667 - type: mrr_at_5 value: 43.032 - type: ndcg_at_1 value: 34.721999999999994 - type: ndcg_at_10 value: 37.434 - type: ndcg_at_100 value: 43.702000000000005 - type: ndcg_at_1000 value: 46.993 - type: ndcg_at_3 value: 33.56 - type: ndcg_at_5 value: 34.687 - type: precision_at_1 value: 34.721999999999994 - type: precision_at_10 value: 10.401 - type: precision_at_100 value: 1.7049999999999998 - type: precision_at_1000 value: 0.22799999999999998 - type: precision_at_3 value: 22.531000000000002 - type: precision_at_5 value: 16.42 - type: recall_at_1 value: 17.948 - type: recall_at_10 value: 45.062999999999995 - type: recall_at_100 value: 68.191 - type: recall_at_1000 value: 87.954 - type: recall_at_3 value: 31.112000000000002 - type: recall_at_5 value: 36.823 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 36.644 - type: map_at_10 value: 57.658 - type: map_at_100 value: 58.562000000000005 - type: map_at_1000 value: 58.62500000000001 - type: map_at_3 value: 54.022999999999996 - type: map_at_5 value: 56.293000000000006 - type: mrr_at_1 value: 73.288 - type: mrr_at_10 value: 80.51700000000001 - type: mrr_at_100 value: 80.72 - type: mrr_at_1000 value: 80.728 - type: mrr_at_3 value: 79.33200000000001 - type: mrr_at_5 value: 80.085 - type: ndcg_at_1 value: 73.288 - type: ndcg_at_10 value: 66.61 - type: ndcg_at_100 value: 69.723 - type: ndcg_at_1000 value: 70.96000000000001 - type: ndcg_at_3 value: 61.358999999999995 - type: ndcg_at_5 value: 64.277 - type: precision_at_1 value: 73.288 - type: precision_at_10 value: 14.17 - type: precision_at_100 value: 1.659 - type: precision_at_1000 value: 0.182 - type: precision_at_3 value: 39.487 - type: precision_at_5 value: 25.999 - type: recall_at_1 value: 36.644 - type: recall_at_10 value: 70.851 - type: recall_at_100 value: 82.94399999999999 - type: recall_at_1000 value: 91.134 - type: recall_at_3 value: 59.230000000000004 - type: recall_at_5 value: 64.997 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 86.00280000000001 - type: ap value: 80.46302061021223 - type: f1 value: 85.9592921596419 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.541 - type: map_at_10 value: 34.625 - type: map_at_100 value: 35.785 - type: map_at_1000 value: 35.831 - type: map_at_3 value: 30.823 - type: map_at_5 value: 32.967999999999996 - type: mrr_at_1 value: 23.180999999999997 - type: mrr_at_10 value: 35.207 - type: mrr_at_100 value: 36.315 - type: mrr_at_1000 value: 36.355 - type: mrr_at_3 value: 31.483 - type: mrr_at_5 value: 33.589999999999996 - type: ndcg_at_1 value: 23.195 - type: ndcg_at_10 value: 41.461 - type: ndcg_at_100 value: 47.032000000000004 - type: ndcg_at_1000 value: 48.199999999999996 - type: ndcg_at_3 value: 33.702 - type: ndcg_at_5 value: 37.522 - type: precision_at_1 value: 23.195 - type: precision_at_10 value: 6.526999999999999 - type: precision_at_100 value: 0.932 - type: precision_at_1000 value: 0.10300000000000001 - type: precision_at_3 value: 14.308000000000002 - type: precision_at_5 value: 10.507 - type: recall_at_1 value: 22.541 - type: recall_at_10 value: 62.524 - type: recall_at_100 value: 88.228 - type: recall_at_1000 value: 97.243 - type: recall_at_3 value: 41.38 - type: recall_at_5 value: 50.55 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.69949840401279 - type: f1 value: 92.54141471311786 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 72.56041951664386 - type: f1 value: 55.88499977508287 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.62071284465365 - type: f1 value: 69.36717546572152 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.35843981170142 - type: f1 value: 76.15496453538884 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.33664956793118 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 27.883839621715524 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.096874986740758 - type: mrr value: 30.97300481932132 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.4 - type: map_at_10 value: 11.852 - type: map_at_100 value: 14.758 - type: map_at_1000 value: 16.134 - type: map_at_3 value: 8.558 - type: map_at_5 value: 10.087 - type: mrr_at_1 value: 44.272 - type: mrr_at_10 value: 52.05800000000001 - type: mrr_at_100 value: 52.689 - type: mrr_at_1000 value: 52.742999999999995 - type: mrr_at_3 value: 50.205999999999996 - type: mrr_at_5 value: 51.367 - type: ndcg_at_1 value: 42.57 - type: ndcg_at_10 value: 32.449 - type: ndcg_at_100 value: 29.596 - type: ndcg_at_1000 value: 38.351 - type: ndcg_at_3 value: 37.044 - type: ndcg_at_5 value: 35.275 - type: precision_at_1 value: 44.272 - type: precision_at_10 value: 23.87 - type: precision_at_100 value: 7.625 - type: precision_at_1000 value: 2.045 - type: precision_at_3 value: 34.365 - type: precision_at_5 value: 30.341 - type: recall_at_1 value: 5.4 - type: recall_at_10 value: 15.943999999999999 - type: recall_at_100 value: 29.805 - type: recall_at_1000 value: 61.695 - type: recall_at_3 value: 9.539 - type: recall_at_5 value: 12.127 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 36.047000000000004 - type: map_at_10 value: 51.6 - type: map_at_100 value: 52.449999999999996 - type: map_at_1000 value: 52.476 - type: map_at_3 value: 47.452 - type: map_at_5 value: 49.964 - type: mrr_at_1 value: 40.382 - type: mrr_at_10 value: 54.273 - type: mrr_at_100 value: 54.859 - type: mrr_at_1000 value: 54.876000000000005 - type: mrr_at_3 value: 51.014 - type: mrr_at_5 value: 52.983999999999995 - type: ndcg_at_1 value: 40.353 - type: ndcg_at_10 value: 59.11300000000001 - type: ndcg_at_100 value: 62.604000000000006 - type: ndcg_at_1000 value: 63.187000000000005 - type: ndcg_at_3 value: 51.513 - type: ndcg_at_5 value: 55.576 - type: precision_at_1 value: 40.353 - type: precision_at_10 value: 9.418 - type: precision_at_100 value: 1.1440000000000001 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 23.078000000000003 - type: precision_at_5 value: 16.250999999999998 - type: recall_at_1 value: 36.047000000000004 - type: recall_at_10 value: 79.22200000000001 - type: recall_at_100 value: 94.23 - type: recall_at_1000 value: 98.51100000000001 - type: recall_at_3 value: 59.678 - type: recall_at_5 value: 68.967 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 68.232 - type: map_at_10 value: 81.674 - type: map_at_100 value: 82.338 - type: map_at_1000 value: 82.36099999999999 - type: map_at_3 value: 78.833 - type: map_at_5 value: 80.58 - type: mrr_at_1 value: 78.64 - type: mrr_at_10 value: 85.164 - type: mrr_at_100 value: 85.317 - type: mrr_at_1000 value: 85.319 - type: mrr_at_3 value: 84.127 - type: mrr_at_5 value: 84.789 - type: ndcg_at_1 value: 78.63 - type: ndcg_at_10 value: 85.711 - type: ndcg_at_100 value: 87.238 - type: ndcg_at_1000 value: 87.444 - type: ndcg_at_3 value: 82.788 - type: ndcg_at_5 value: 84.313 - type: precision_at_1 value: 78.63 - type: precision_at_10 value: 12.977 - type: precision_at_100 value: 1.503 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.113 - type: precision_at_5 value: 23.71 - type: recall_at_1 value: 68.232 - type: recall_at_10 value: 93.30199999999999 - type: recall_at_100 value: 98.799 - type: recall_at_1000 value: 99.885 - type: recall_at_3 value: 84.827 - type: recall_at_5 value: 89.188 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 45.71879170816294 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 59.65866311751794 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.218 - type: map_at_10 value: 10.337 - type: map_at_100 value: 12.131 - type: map_at_1000 value: 12.411 - type: map_at_3 value: 7.4270000000000005 - type: map_at_5 value: 8.913 - type: mrr_at_1 value: 20.8 - type: mrr_at_10 value: 30.868000000000002 - type: mrr_at_100 value: 31.903 - type: mrr_at_1000 value: 31.972 - type: mrr_at_3 value: 27.367 - type: mrr_at_5 value: 29.372 - type: ndcg_at_1 value: 20.8 - type: ndcg_at_10 value: 17.765 - type: ndcg_at_100 value: 24.914 - type: ndcg_at_1000 value: 30.206 - type: ndcg_at_3 value: 16.64 - type: ndcg_at_5 value: 14.712 - type: precision_at_1 value: 20.8 - type: precision_at_10 value: 9.24 - type: precision_at_100 value: 1.9560000000000002 - type: precision_at_1000 value: 0.32299999999999995 - type: precision_at_3 value: 15.467 - type: precision_at_5 value: 12.94 - type: recall_at_1 value: 4.218 - type: recall_at_10 value: 18.752 - type: recall_at_100 value: 39.7 - type: recall_at_1000 value: 65.57300000000001 - type: recall_at_3 value: 9.428 - type: recall_at_5 value: 13.133000000000001 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.04338850207233 - type: cos_sim_spearman value: 78.5054651430423 - type: euclidean_pearson value: 80.30739451228612 - type: euclidean_spearman value: 78.48377464299097 - type: manhattan_pearson value: 80.40795049052781 - type: manhattan_spearman value: 78.49506205443114 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.11596224442962 - type: cos_sim_spearman value: 76.20997388935461 - type: euclidean_pearson value: 80.56858451349109 - type: euclidean_spearman value: 75.92659183871186 - type: manhattan_pearson value: 80.60246102203844 - type: manhattan_spearman value: 76.03018971432664 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 81.34691640755737 - type: cos_sim_spearman value: 82.4018369631579 - type: euclidean_pearson value: 81.87673092245366 - type: euclidean_spearman value: 82.3671489960678 - type: manhattan_pearson value: 81.88222387719948 - type: manhattan_spearman value: 82.3816590344736 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 81.2836092579524 - type: cos_sim_spearman value: 78.99982781772064 - type: euclidean_pearson value: 80.5184271010527 - type: euclidean_spearman value: 78.89777392101904 - type: manhattan_pearson value: 80.53585705018664 - type: manhattan_spearman value: 78.92898405472994 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.7349907750784 - type: cos_sim_spearman value: 87.7611234446225 - type: euclidean_pearson value: 86.98759326731624 - type: euclidean_spearman value: 87.58321319424618 - type: manhattan_pearson value: 87.03483090370842 - type: manhattan_spearman value: 87.63278333060288 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 81.75873694924825 - type: cos_sim_spearman value: 83.80237999094724 - type: euclidean_pearson value: 83.55023725861537 - type: euclidean_spearman value: 84.12744338577744 - type: manhattan_pearson value: 83.58816983036232 - type: manhattan_spearman value: 84.18520748676501 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.21630882940174 - type: cos_sim_spearman value: 87.72382883437031 - type: euclidean_pearson value: 88.69933350930333 - type: euclidean_spearman value: 88.24660814383081 - type: manhattan_pearson value: 88.77331018833499 - type: manhattan_spearman value: 88.26109989380632 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 61.11854063060489 - type: cos_sim_spearman value: 63.14678634195072 - type: euclidean_pearson value: 61.679090067000864 - type: euclidean_spearman value: 62.28876589509653 - type: manhattan_pearson value: 62.082324165511004 - type: manhattan_spearman value: 62.56030932816679 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.00319882832645 - type: cos_sim_spearman value: 85.94529772647257 - type: euclidean_pearson value: 85.6661390122756 - type: euclidean_spearman value: 85.97747815545827 - type: manhattan_pearson value: 85.58422770541893 - type: manhattan_spearman value: 85.9237139181532 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 79.16198731863916 - type: mrr value: 94.25202702163487 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 54.761 - type: map_at_10 value: 64.396 - type: map_at_100 value: 65.07 - type: map_at_1000 value: 65.09899999999999 - type: map_at_3 value: 61.846000000000004 - type: map_at_5 value: 63.284 - type: mrr_at_1 value: 57.667 - type: mrr_at_10 value: 65.83099999999999 - type: mrr_at_100 value: 66.36800000000001 - type: mrr_at_1000 value: 66.39399999999999 - type: mrr_at_3 value: 64.056 - type: mrr_at_5 value: 65.206 - type: ndcg_at_1 value: 57.667 - type: ndcg_at_10 value: 68.854 - type: ndcg_at_100 value: 71.59100000000001 - type: ndcg_at_1000 value: 72.383 - type: ndcg_at_3 value: 64.671 - type: ndcg_at_5 value: 66.796 - type: precision_at_1 value: 57.667 - type: precision_at_10 value: 9.167 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 25.444 - type: precision_at_5 value: 16.667 - type: recall_at_1 value: 54.761 - type: recall_at_10 value: 80.9 - type: recall_at_100 value: 92.767 - type: recall_at_1000 value: 99 - type: recall_at_3 value: 69.672 - type: recall_at_5 value: 75.083 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.8079207920792 - type: cos_sim_ap value: 94.88470927617445 - type: cos_sim_f1 value: 90.08179959100204 - type: cos_sim_precision value: 92.15481171548117 - type: cos_sim_recall value: 88.1 - type: dot_accuracy value: 99.58613861386138 - type: dot_ap value: 82.94822578881316 - type: dot_f1 value: 77.33333333333333 - type: dot_precision value: 79.36842105263158 - type: dot_recall value: 75.4 - type: euclidean_accuracy value: 99.8069306930693 - type: euclidean_ap value: 94.81367858031837 - type: euclidean_f1 value: 90.01009081735621 - type: euclidean_precision value: 90.83503054989816 - type: euclidean_recall value: 89.2 - type: manhattan_accuracy value: 99.81188118811882 - type: manhattan_ap value: 94.91405337220161 - type: manhattan_f1 value: 90.2763561924258 - type: manhattan_precision value: 92.45283018867924 - type: manhattan_recall value: 88.2 - type: max_accuracy value: 99.81188118811882 - type: max_ap value: 94.91405337220161 - type: max_f1 value: 90.2763561924258 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 58.511599500053094 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 31.984728147814707 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.93428193939015 - type: mrr value: 50.916557911043206 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.562500894537145 - type: cos_sim_spearman value: 31.162587976726307 - type: dot_pearson value: 22.633662187735762 - type: dot_spearman value: 22.723000282378962 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.219 - type: map_at_10 value: 1.871 - type: map_at_100 value: 10.487 - type: map_at_1000 value: 25.122 - type: map_at_3 value: 0.657 - type: map_at_5 value: 1.0699999999999998 - type: mrr_at_1 value: 84 - type: mrr_at_10 value: 89.567 - type: mrr_at_100 value: 89.748 - type: mrr_at_1000 value: 89.748 - type: mrr_at_3 value: 88.667 - type: mrr_at_5 value: 89.567 - type: ndcg_at_1 value: 80 - type: ndcg_at_10 value: 74.533 - type: ndcg_at_100 value: 55.839000000000006 - type: ndcg_at_1000 value: 49.748 - type: ndcg_at_3 value: 79.53099999999999 - type: ndcg_at_5 value: 78.245 - type: precision_at_1 value: 84 - type: precision_at_10 value: 78.4 - type: precision_at_100 value: 56.99999999999999 - type: precision_at_1000 value: 21.98 - type: precision_at_3 value: 85.333 - type: precision_at_5 value: 84.8 - type: recall_at_1 value: 0.219 - type: recall_at_10 value: 2.02 - type: recall_at_100 value: 13.555 - type: recall_at_1000 value: 46.739999999999995 - type: recall_at_3 value: 0.685 - type: recall_at_5 value: 1.13 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 3.5029999999999997 - type: map_at_10 value: 11.042 - type: map_at_100 value: 16.326999999999998 - type: map_at_1000 value: 17.836 - type: map_at_3 value: 6.174 - type: map_at_5 value: 7.979 - type: mrr_at_1 value: 42.857 - type: mrr_at_10 value: 52.617000000000004 - type: mrr_at_100 value: 53.351000000000006 - type: mrr_at_1000 value: 53.351000000000006 - type: mrr_at_3 value: 46.939 - type: mrr_at_5 value: 50.714000000000006 - type: ndcg_at_1 value: 38.775999999999996 - type: ndcg_at_10 value: 27.125 - type: ndcg_at_100 value: 35.845 - type: ndcg_at_1000 value: 47.377 - type: ndcg_at_3 value: 29.633 - type: ndcg_at_5 value: 28.378999999999998 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 24.082 - type: precision_at_100 value: 6.877999999999999 - type: precision_at_1000 value: 1.463 - type: precision_at_3 value: 29.932 - type: precision_at_5 value: 28.571 - type: recall_at_1 value: 3.5029999999999997 - type: recall_at_10 value: 17.068 - type: recall_at_100 value: 43.361 - type: recall_at_1000 value: 78.835 - type: recall_at_3 value: 6.821000000000001 - type: recall_at_5 value: 10.357 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.0954 - type: ap value: 14.216844153511959 - type: f1 value: 54.63687418565117 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.46293152235427 - type: f1 value: 61.744177921638645 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 41.12708617788644 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.75430649102938 - type: cos_sim_ap value: 73.34252536948081 - type: cos_sim_f1 value: 67.53758935173774 - type: cos_sim_precision value: 63.3672525439408 - type: cos_sim_recall value: 72.29551451187335 - type: dot_accuracy value: 81.71305954580676 - type: dot_ap value: 59.5532209082386 - type: dot_f1 value: 56.18466898954705 - type: dot_precision value: 47.830923248053395 - type: dot_recall value: 68.07387862796834 - type: euclidean_accuracy value: 85.81987244441795 - type: euclidean_ap value: 73.34325409809446 - type: euclidean_f1 value: 67.83451360417443 - type: euclidean_precision value: 64.09955388588871 - type: euclidean_recall value: 72.0316622691293 - type: manhattan_accuracy value: 85.68277999642368 - type: manhattan_ap value: 73.1535450121903 - type: manhattan_f1 value: 67.928237896289 - type: manhattan_precision value: 63.56945722171113 - type: manhattan_recall value: 72.9287598944591 - type: max_accuracy value: 85.81987244441795 - type: max_ap value: 73.34325409809446 - type: max_f1 value: 67.928237896289 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.90441262079403 - type: cos_sim_ap value: 85.79331880741438 - type: cos_sim_f1 value: 78.31563529842548 - type: cos_sim_precision value: 74.6683424102779 - type: cos_sim_recall value: 82.33754234678165 - type: dot_accuracy value: 84.89928978926534 - type: dot_ap value: 75.25819218316 - type: dot_f1 value: 69.88730119720536 - type: dot_precision value: 64.23362374959665 - type: dot_recall value: 76.63227594702803 - type: euclidean_accuracy value: 89.01695967710637 - type: euclidean_ap value: 85.98986606038852 - type: euclidean_f1 value: 78.5277880014722 - type: euclidean_precision value: 75.22211253701876 - type: euclidean_recall value: 82.13735756082538 - type: manhattan_accuracy value: 88.99561454573679 - type: manhattan_ap value: 85.92262421793953 - type: manhattan_f1 value: 78.38866094740769 - type: manhattan_precision value: 76.02373028505282 - type: manhattan_recall value: 80.9054511857099 - type: max_accuracy value: 89.01695967710637 - type: max_ap value: 85.98986606038852 - type: max_f1 value: 78.5277880014722 --- # E5-small-v2 [Text Embeddings by Weakly-Supervised Contrastive Pre-training](https://arxiv.org/pdf/2212.03533.pdf). Liang Wang, Nan Yang, Xiaolong Huang, Binxing Jiao, Linjun Yang, Daxin Jiang, Rangan Majumder, Furu Wei, arXiv 2022 This model has 12 layers and the embedding size is 384. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ". # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments."] tokenizer = AutoTokenizer.from_pretrained('intfloat/e5-small-v2') model = AutoModel.from_pretrained('intfloat/e5-small-v2') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Training Details Please refer to our paper at [https://arxiv.org/pdf/2212.03533.pdf](https://arxiv.org/pdf/2212.03533.pdf). ## Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/e5-small-v2') input_texts = [ 'query: how much protein should a female eat', 'query: summit define', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2022text, title={Text Embeddings by Weakly-Supervised Contrastive Pre-training}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Jiao, Binxing and Yang, Linjun and Jiang, Daxin and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2212.03533}, year={2022} } ``` ## Limitations This model only works for English texts. Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
hkunlp/instructor-large
hkunlp
sentence-similarity
[ "sentence-transformers", "pytorch", "t5", "text-embedding", "embeddings", "information-retrieval", "beir", "text-classification", "language-model", "text-clustering", "text-semantic-similarity", "text-evaluation", "prompt-retrieval", "text-reranking", "feature-extraction", "sentence-similarity", "transformers", "English", "Sentence Similarity", "natural_questions", "ms_marco", "fever", "hotpot_qa", "mteb", "en", "arxiv:2212.09741", "license:apache-2.0", "model-index", "autotrain_compatible", "text-generation-inference", "region:us" ]
"2022-12-20T05:31:06Z"
2023-04-21T06:04:33+00:00
200,795
506
--- language: en license: apache-2.0 pipeline_tag: sentence-similarity tags: - text-embedding - embeddings - information-retrieval - beir - text-classification - language-model - text-clustering - text-semantic-similarity - text-evaluation - prompt-retrieval - text-reranking - sentence-transformers - feature-extraction - sentence-similarity - transformers - t5 - English - Sentence Similarity - natural_questions - ms_marco - fever - hotpot_qa - mteb inference: false model-index: - name: INSTRUCTOR results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 88.13432835820896 - type: ap value: 59.298209334395665 - type: f1 value: 83.31769058643586 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 91.526375 - type: ap value: 88.16327709705504 - type: f1 value: 91.51095801287843 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 47.856 - type: f1 value: 45.41490917650942 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 31.223 - type: map_at_10 value: 47.947 - type: map_at_100 value: 48.742000000000004 - type: map_at_1000 value: 48.745 - type: map_at_3 value: 43.137 - type: map_at_5 value: 45.992 - type: mrr_at_1 value: 32.432 - type: mrr_at_10 value: 48.4 - type: mrr_at_100 value: 49.202 - type: mrr_at_1000 value: 49.205 - type: mrr_at_3 value: 43.551 - type: mrr_at_5 value: 46.467999999999996 - type: ndcg_at_1 value: 31.223 - type: ndcg_at_10 value: 57.045 - type: ndcg_at_100 value: 60.175 - type: ndcg_at_1000 value: 60.233000000000004 - type: ndcg_at_3 value: 47.171 - type: ndcg_at_5 value: 52.322 - type: precision_at_1 value: 31.223 - type: precision_at_10 value: 8.599 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 19.63 - type: precision_at_5 value: 14.282 - type: recall_at_1 value: 31.223 - type: recall_at_10 value: 85.989 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.502 - type: recall_at_3 value: 58.89 - type: recall_at_5 value: 71.408 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 43.1621946393635 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 32.56417132407894 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.29539304390207 - type: mrr value: 76.44484017060196 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_spearman value: 84.38746499431112 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 78.51298701298701 - type: f1 value: 77.49041754069235 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 37.61848554098577 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 31.32623280148178 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 35.803000000000004 - type: map_at_10 value: 48.848 - type: map_at_100 value: 50.5 - type: map_at_1000 value: 50.602999999999994 - type: map_at_3 value: 45.111000000000004 - type: map_at_5 value: 47.202 - type: mrr_at_1 value: 44.635000000000005 - type: mrr_at_10 value: 55.593 - type: mrr_at_100 value: 56.169999999999995 - type: mrr_at_1000 value: 56.19499999999999 - type: mrr_at_3 value: 53.361999999999995 - type: mrr_at_5 value: 54.806999999999995 - type: ndcg_at_1 value: 44.635000000000005 - type: ndcg_at_10 value: 55.899 - type: ndcg_at_100 value: 60.958 - type: ndcg_at_1000 value: 62.302 - type: ndcg_at_3 value: 51.051 - type: ndcg_at_5 value: 53.351000000000006 - type: precision_at_1 value: 44.635000000000005 - type: precision_at_10 value: 10.786999999999999 - type: precision_at_100 value: 1.6580000000000001 - type: precision_at_1000 value: 0.213 - type: precision_at_3 value: 24.893 - type: precision_at_5 value: 17.740000000000002 - type: recall_at_1 value: 35.803000000000004 - type: recall_at_10 value: 68.657 - type: recall_at_100 value: 89.77199999999999 - type: recall_at_1000 value: 97.67 - type: recall_at_3 value: 54.066 - type: recall_at_5 value: 60.788 - type: map_at_1 value: 33.706 - type: map_at_10 value: 44.896 - type: map_at_100 value: 46.299 - type: map_at_1000 value: 46.44 - type: map_at_3 value: 41.721000000000004 - type: map_at_5 value: 43.486000000000004 - type: mrr_at_1 value: 41.592 - type: mrr_at_10 value: 50.529 - type: mrr_at_100 value: 51.22 - type: mrr_at_1000 value: 51.258 - type: mrr_at_3 value: 48.205999999999996 - type: mrr_at_5 value: 49.528 - type: ndcg_at_1 value: 41.592 - type: ndcg_at_10 value: 50.77199999999999 - type: ndcg_at_100 value: 55.383 - type: ndcg_at_1000 value: 57.288 - type: ndcg_at_3 value: 46.324 - type: ndcg_at_5 value: 48.346000000000004 - type: precision_at_1 value: 41.592 - type: precision_at_10 value: 9.516 - type: precision_at_100 value: 1.541 - type: precision_at_1000 value: 0.2 - type: precision_at_3 value: 22.399 - type: precision_at_5 value: 15.770999999999999 - type: recall_at_1 value: 33.706 - type: recall_at_10 value: 61.353 - type: recall_at_100 value: 80.182 - type: recall_at_1000 value: 91.896 - type: recall_at_3 value: 48.204 - type: recall_at_5 value: 53.89699999999999 - type: map_at_1 value: 44.424 - type: map_at_10 value: 57.169000000000004 - type: map_at_100 value: 58.202 - type: map_at_1000 value: 58.242000000000004 - type: map_at_3 value: 53.825 - type: map_at_5 value: 55.714 - type: mrr_at_1 value: 50.470000000000006 - type: mrr_at_10 value: 60.489000000000004 - type: mrr_at_100 value: 61.096 - type: mrr_at_1000 value: 61.112 - type: mrr_at_3 value: 58.192 - type: mrr_at_5 value: 59.611999999999995 - type: ndcg_at_1 value: 50.470000000000006 - type: ndcg_at_10 value: 63.071999999999996 - type: ndcg_at_100 value: 66.964 - type: ndcg_at_1000 value: 67.659 - type: ndcg_at_3 value: 57.74399999999999 - type: ndcg_at_5 value: 60.367000000000004 - type: precision_at_1 value: 50.470000000000006 - type: precision_at_10 value: 10.019 - type: precision_at_100 value: 1.29 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 25.558999999999997 - type: precision_at_5 value: 17.467 - type: recall_at_1 value: 44.424 - type: recall_at_10 value: 77.02 - type: recall_at_100 value: 93.738 - type: recall_at_1000 value: 98.451 - type: recall_at_3 value: 62.888 - type: recall_at_5 value: 69.138 - type: map_at_1 value: 26.294 - type: map_at_10 value: 34.503 - type: map_at_100 value: 35.641 - type: map_at_1000 value: 35.724000000000004 - type: map_at_3 value: 31.753999999999998 - type: map_at_5 value: 33.190999999999995 - type: mrr_at_1 value: 28.362 - type: mrr_at_10 value: 36.53 - type: mrr_at_100 value: 37.541000000000004 - type: mrr_at_1000 value: 37.602000000000004 - type: mrr_at_3 value: 33.917 - type: mrr_at_5 value: 35.358000000000004 - type: ndcg_at_1 value: 28.362 - type: ndcg_at_10 value: 39.513999999999996 - type: ndcg_at_100 value: 44.815 - type: ndcg_at_1000 value: 46.839 - type: ndcg_at_3 value: 34.02 - type: ndcg_at_5 value: 36.522 - type: precision_at_1 value: 28.362 - type: precision_at_10 value: 6.101999999999999 - type: precision_at_100 value: 0.9129999999999999 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 14.161999999999999 - type: precision_at_5 value: 9.966 - type: recall_at_1 value: 26.294 - type: recall_at_10 value: 53.098 - type: recall_at_100 value: 76.877 - type: recall_at_1000 value: 91.834 - type: recall_at_3 value: 38.266 - type: recall_at_5 value: 44.287 - type: map_at_1 value: 16.407 - type: map_at_10 value: 25.185999999999996 - type: map_at_100 value: 26.533 - type: map_at_1000 value: 26.657999999999998 - type: map_at_3 value: 22.201999999999998 - type: map_at_5 value: 23.923 - type: mrr_at_1 value: 20.522000000000002 - type: mrr_at_10 value: 29.522 - type: mrr_at_100 value: 30.644 - type: mrr_at_1000 value: 30.713 - type: mrr_at_3 value: 26.679000000000002 - type: mrr_at_5 value: 28.483000000000004 - type: ndcg_at_1 value: 20.522000000000002 - type: ndcg_at_10 value: 30.656 - type: ndcg_at_100 value: 36.864999999999995 - type: ndcg_at_1000 value: 39.675 - type: ndcg_at_3 value: 25.319000000000003 - type: ndcg_at_5 value: 27.992 - type: precision_at_1 value: 20.522000000000002 - type: precision_at_10 value: 5.795999999999999 - type: precision_at_100 value: 1.027 - type: precision_at_1000 value: 0.13999999999999999 - type: precision_at_3 value: 12.396 - type: precision_at_5 value: 9.328 - type: recall_at_1 value: 16.407 - type: recall_at_10 value: 43.164 - type: recall_at_100 value: 69.695 - type: recall_at_1000 value: 89.41900000000001 - type: recall_at_3 value: 28.634999999999998 - type: recall_at_5 value: 35.308 - type: map_at_1 value: 30.473 - type: map_at_10 value: 41.676 - type: map_at_100 value: 43.120999999999995 - type: map_at_1000 value: 43.230000000000004 - type: map_at_3 value: 38.306000000000004 - type: map_at_5 value: 40.355999999999995 - type: mrr_at_1 value: 37.536 - type: mrr_at_10 value: 47.643 - type: mrr_at_100 value: 48.508 - type: mrr_at_1000 value: 48.551 - type: mrr_at_3 value: 45.348 - type: mrr_at_5 value: 46.744 - type: ndcg_at_1 value: 37.536 - type: ndcg_at_10 value: 47.823 - type: ndcg_at_100 value: 53.395 - type: ndcg_at_1000 value: 55.271 - type: ndcg_at_3 value: 42.768 - type: ndcg_at_5 value: 45.373000000000005 - type: precision_at_1 value: 37.536 - type: precision_at_10 value: 8.681 - type: precision_at_100 value: 1.34 - type: precision_at_1000 value: 0.165 - type: precision_at_3 value: 20.468 - type: precision_at_5 value: 14.495 - type: recall_at_1 value: 30.473 - type: recall_at_10 value: 60.092999999999996 - type: recall_at_100 value: 82.733 - type: recall_at_1000 value: 94.875 - type: recall_at_3 value: 45.734 - type: recall_at_5 value: 52.691 - type: map_at_1 value: 29.976000000000003 - type: map_at_10 value: 41.097 - type: map_at_100 value: 42.547000000000004 - type: map_at_1000 value: 42.659000000000006 - type: map_at_3 value: 37.251 - type: map_at_5 value: 39.493 - type: mrr_at_1 value: 37.557 - type: mrr_at_10 value: 46.605000000000004 - type: mrr_at_100 value: 47.487 - type: mrr_at_1000 value: 47.54 - type: mrr_at_3 value: 43.721 - type: mrr_at_5 value: 45.411 - type: ndcg_at_1 value: 37.557 - type: ndcg_at_10 value: 47.449000000000005 - type: ndcg_at_100 value: 53.052 - type: ndcg_at_1000 value: 55.010999999999996 - type: ndcg_at_3 value: 41.439 - type: ndcg_at_5 value: 44.292 - type: precision_at_1 value: 37.557 - type: precision_at_10 value: 8.847 - type: precision_at_100 value: 1.357 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_3 value: 20.091 - type: precision_at_5 value: 14.384 - type: recall_at_1 value: 29.976000000000003 - type: recall_at_10 value: 60.99099999999999 - type: recall_at_100 value: 84.245 - type: recall_at_1000 value: 96.97200000000001 - type: recall_at_3 value: 43.794 - type: recall_at_5 value: 51.778999999999996 - type: map_at_1 value: 28.099166666666665 - type: map_at_10 value: 38.1365 - type: map_at_100 value: 39.44491666666667 - type: map_at_1000 value: 39.55858333333334 - type: map_at_3 value: 35.03641666666666 - type: map_at_5 value: 36.79833333333334 - type: mrr_at_1 value: 33.39966666666667 - type: mrr_at_10 value: 42.42583333333333 - type: mrr_at_100 value: 43.28575 - type: mrr_at_1000 value: 43.33741666666667 - type: mrr_at_3 value: 39.94975 - type: mrr_at_5 value: 41.41633333333334 - type: ndcg_at_1 value: 33.39966666666667 - type: ndcg_at_10 value: 43.81741666666667 - type: ndcg_at_100 value: 49.08166666666667 - type: ndcg_at_1000 value: 51.121166666666674 - type: ndcg_at_3 value: 38.73575 - type: ndcg_at_5 value: 41.18158333333333 - type: precision_at_1 value: 33.39966666666667 - type: precision_at_10 value: 7.738916666666667 - type: precision_at_100 value: 1.2265833333333331 - type: precision_at_1000 value: 0.15983333333333336 - type: precision_at_3 value: 17.967416666666665 - type: precision_at_5 value: 12.78675 - type: recall_at_1 value: 28.099166666666665 - type: recall_at_10 value: 56.27049999999999 - type: recall_at_100 value: 78.93291666666667 - type: recall_at_1000 value: 92.81608333333334 - type: recall_at_3 value: 42.09775 - type: recall_at_5 value: 48.42533333333334 - type: map_at_1 value: 23.663 - type: map_at_10 value: 30.377 - type: map_at_100 value: 31.426 - type: map_at_1000 value: 31.519000000000002 - type: map_at_3 value: 28.069 - type: map_at_5 value: 29.256999999999998 - type: mrr_at_1 value: 26.687 - type: mrr_at_10 value: 33.107 - type: mrr_at_100 value: 34.055 - type: mrr_at_1000 value: 34.117999999999995 - type: mrr_at_3 value: 31.058000000000003 - type: mrr_at_5 value: 32.14 - type: ndcg_at_1 value: 26.687 - type: ndcg_at_10 value: 34.615 - type: ndcg_at_100 value: 39.776 - type: ndcg_at_1000 value: 42.05 - type: ndcg_at_3 value: 30.322 - type: ndcg_at_5 value: 32.157000000000004 - type: precision_at_1 value: 26.687 - type: precision_at_10 value: 5.491 - type: precision_at_100 value: 0.877 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 13.139000000000001 - type: precision_at_5 value: 9.049 - type: recall_at_1 value: 23.663 - type: recall_at_10 value: 45.035 - type: recall_at_100 value: 68.554 - type: recall_at_1000 value: 85.077 - type: recall_at_3 value: 32.982 - type: recall_at_5 value: 37.688 - type: map_at_1 value: 17.403 - type: map_at_10 value: 25.197000000000003 - type: map_at_100 value: 26.355 - type: map_at_1000 value: 26.487 - type: map_at_3 value: 22.733 - type: map_at_5 value: 24.114 - type: mrr_at_1 value: 21.37 - type: mrr_at_10 value: 29.091 - type: mrr_at_100 value: 30.018 - type: mrr_at_1000 value: 30.096 - type: mrr_at_3 value: 26.887 - type: mrr_at_5 value: 28.157 - type: ndcg_at_1 value: 21.37 - type: ndcg_at_10 value: 30.026000000000003 - type: ndcg_at_100 value: 35.416 - type: ndcg_at_1000 value: 38.45 - type: ndcg_at_3 value: 25.764 - type: ndcg_at_5 value: 27.742 - type: precision_at_1 value: 21.37 - type: precision_at_10 value: 5.609 - type: precision_at_100 value: 0.9860000000000001 - type: precision_at_1000 value: 0.14300000000000002 - type: precision_at_3 value: 12.423 - type: precision_at_5 value: 9.009 - type: recall_at_1 value: 17.403 - type: recall_at_10 value: 40.573 - type: recall_at_100 value: 64.818 - type: recall_at_1000 value: 86.53699999999999 - type: recall_at_3 value: 28.493000000000002 - type: recall_at_5 value: 33.660000000000004 - type: map_at_1 value: 28.639 - type: map_at_10 value: 38.951 - type: map_at_100 value: 40.238 - type: map_at_1000 value: 40.327 - type: map_at_3 value: 35.842 - type: map_at_5 value: 37.617 - type: mrr_at_1 value: 33.769 - type: mrr_at_10 value: 43.088 - type: mrr_at_100 value: 44.03 - type: mrr_at_1000 value: 44.072 - type: mrr_at_3 value: 40.656 - type: mrr_at_5 value: 42.138999999999996 - type: ndcg_at_1 value: 33.769 - type: ndcg_at_10 value: 44.676 - type: ndcg_at_100 value: 50.416000000000004 - type: ndcg_at_1000 value: 52.227999999999994 - type: ndcg_at_3 value: 39.494 - type: ndcg_at_5 value: 42.013 - type: precision_at_1 value: 33.769 - type: precision_at_10 value: 7.668 - type: precision_at_100 value: 1.18 - type: precision_at_1000 value: 0.145 - type: precision_at_3 value: 18.221 - type: precision_at_5 value: 12.966 - type: recall_at_1 value: 28.639 - type: recall_at_10 value: 57.687999999999995 - type: recall_at_100 value: 82.541 - type: recall_at_1000 value: 94.896 - type: recall_at_3 value: 43.651 - type: recall_at_5 value: 49.925999999999995 - type: map_at_1 value: 29.57 - type: map_at_10 value: 40.004 - type: map_at_100 value: 41.75 - type: map_at_1000 value: 41.97 - type: map_at_3 value: 36.788 - type: map_at_5 value: 38.671 - type: mrr_at_1 value: 35.375 - type: mrr_at_10 value: 45.121 - type: mrr_at_100 value: 45.994 - type: mrr_at_1000 value: 46.04 - type: mrr_at_3 value: 42.227 - type: mrr_at_5 value: 43.995 - type: ndcg_at_1 value: 35.375 - type: ndcg_at_10 value: 46.392 - type: ndcg_at_100 value: 52.196 - type: ndcg_at_1000 value: 54.274 - type: ndcg_at_3 value: 41.163 - type: ndcg_at_5 value: 43.813 - type: precision_at_1 value: 35.375 - type: precision_at_10 value: 8.676 - type: precision_at_100 value: 1.678 - type: precision_at_1000 value: 0.253 - type: precision_at_3 value: 19.104 - type: precision_at_5 value: 13.913 - type: recall_at_1 value: 29.57 - type: recall_at_10 value: 58.779 - type: recall_at_100 value: 83.337 - type: recall_at_1000 value: 95.979 - type: recall_at_3 value: 44.005 - type: recall_at_5 value: 50.975 - type: map_at_1 value: 20.832 - type: map_at_10 value: 29.733999999999998 - type: map_at_100 value: 30.727 - type: map_at_1000 value: 30.843999999999998 - type: map_at_3 value: 26.834999999999997 - type: map_at_5 value: 28.555999999999997 - type: mrr_at_1 value: 22.921 - type: mrr_at_10 value: 31.791999999999998 - type: mrr_at_100 value: 32.666000000000004 - type: mrr_at_1000 value: 32.751999999999995 - type: mrr_at_3 value: 29.144 - type: mrr_at_5 value: 30.622 - type: ndcg_at_1 value: 22.921 - type: ndcg_at_10 value: 34.915 - type: ndcg_at_100 value: 39.744 - type: ndcg_at_1000 value: 42.407000000000004 - type: ndcg_at_3 value: 29.421000000000003 - type: ndcg_at_5 value: 32.211 - type: precision_at_1 value: 22.921 - type: precision_at_10 value: 5.675 - type: precision_at_100 value: 0.872 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 12.753999999999998 - type: precision_at_5 value: 9.353 - type: recall_at_1 value: 20.832 - type: recall_at_10 value: 48.795 - type: recall_at_100 value: 70.703 - type: recall_at_1000 value: 90.187 - type: recall_at_3 value: 34.455000000000005 - type: recall_at_5 value: 40.967 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 10.334 - type: map_at_10 value: 19.009999999999998 - type: map_at_100 value: 21.129 - type: map_at_1000 value: 21.328 - type: map_at_3 value: 15.152 - type: map_at_5 value: 17.084 - type: mrr_at_1 value: 23.453 - type: mrr_at_10 value: 36.099 - type: mrr_at_100 value: 37.069 - type: mrr_at_1000 value: 37.104 - type: mrr_at_3 value: 32.096000000000004 - type: mrr_at_5 value: 34.451 - type: ndcg_at_1 value: 23.453 - type: ndcg_at_10 value: 27.739000000000004 - type: ndcg_at_100 value: 35.836 - type: ndcg_at_1000 value: 39.242 - type: ndcg_at_3 value: 21.263 - type: ndcg_at_5 value: 23.677 - type: precision_at_1 value: 23.453 - type: precision_at_10 value: 9.199 - type: precision_at_100 value: 1.791 - type: precision_at_1000 value: 0.242 - type: precision_at_3 value: 16.2 - type: precision_at_5 value: 13.147 - type: recall_at_1 value: 10.334 - type: recall_at_10 value: 35.177 - type: recall_at_100 value: 63.009 - type: recall_at_1000 value: 81.938 - type: recall_at_3 value: 19.914 - type: recall_at_5 value: 26.077 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.212 - type: map_at_10 value: 17.386 - type: map_at_100 value: 24.234 - type: map_at_1000 value: 25.724999999999998 - type: map_at_3 value: 12.727 - type: map_at_5 value: 14.785 - type: mrr_at_1 value: 59.25 - type: mrr_at_10 value: 68.687 - type: mrr_at_100 value: 69.133 - type: mrr_at_1000 value: 69.14099999999999 - type: mrr_at_3 value: 66.917 - type: mrr_at_5 value: 67.742 - type: ndcg_at_1 value: 48.625 - type: ndcg_at_10 value: 36.675999999999995 - type: ndcg_at_100 value: 41.543 - type: ndcg_at_1000 value: 49.241 - type: ndcg_at_3 value: 41.373 - type: ndcg_at_5 value: 38.707 - type: precision_at_1 value: 59.25 - type: precision_at_10 value: 28.525 - type: precision_at_100 value: 9.027000000000001 - type: precision_at_1000 value: 1.8339999999999999 - type: precision_at_3 value: 44.833 - type: precision_at_5 value: 37.35 - type: recall_at_1 value: 8.212 - type: recall_at_10 value: 23.188 - type: recall_at_100 value: 48.613 - type: recall_at_1000 value: 73.093 - type: recall_at_3 value: 14.419 - type: recall_at_5 value: 17.798 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 52.725 - type: f1 value: 46.50743309855908 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 55.086 - type: map_at_10 value: 66.914 - type: map_at_100 value: 67.321 - type: map_at_1000 value: 67.341 - type: map_at_3 value: 64.75800000000001 - type: map_at_5 value: 66.189 - type: mrr_at_1 value: 59.28600000000001 - type: mrr_at_10 value: 71.005 - type: mrr_at_100 value: 71.304 - type: mrr_at_1000 value: 71.313 - type: mrr_at_3 value: 69.037 - type: mrr_at_5 value: 70.35 - type: ndcg_at_1 value: 59.28600000000001 - type: ndcg_at_10 value: 72.695 - type: ndcg_at_100 value: 74.432 - type: ndcg_at_1000 value: 74.868 - type: ndcg_at_3 value: 68.72200000000001 - type: ndcg_at_5 value: 71.081 - type: precision_at_1 value: 59.28600000000001 - type: precision_at_10 value: 9.499 - type: precision_at_100 value: 1.052 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 27.503 - type: precision_at_5 value: 17.854999999999997 - type: recall_at_1 value: 55.086 - type: recall_at_10 value: 86.453 - type: recall_at_100 value: 94.028 - type: recall_at_1000 value: 97.052 - type: recall_at_3 value: 75.821 - type: recall_at_5 value: 81.6 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 22.262999999999998 - type: map_at_10 value: 37.488 - type: map_at_100 value: 39.498 - type: map_at_1000 value: 39.687 - type: map_at_3 value: 32.529 - type: map_at_5 value: 35.455 - type: mrr_at_1 value: 44.907000000000004 - type: mrr_at_10 value: 53.239000000000004 - type: mrr_at_100 value: 54.086 - type: mrr_at_1000 value: 54.122 - type: mrr_at_3 value: 51.235 - type: mrr_at_5 value: 52.415 - type: ndcg_at_1 value: 44.907000000000004 - type: ndcg_at_10 value: 45.446 - type: ndcg_at_100 value: 52.429 - type: ndcg_at_1000 value: 55.169000000000004 - type: ndcg_at_3 value: 41.882000000000005 - type: ndcg_at_5 value: 43.178 - type: precision_at_1 value: 44.907000000000004 - type: precision_at_10 value: 12.931999999999999 - type: precision_at_100 value: 2.025 - type: precision_at_1000 value: 0.248 - type: precision_at_3 value: 28.652 - type: precision_at_5 value: 21.204 - type: recall_at_1 value: 22.262999999999998 - type: recall_at_10 value: 52.447 - type: recall_at_100 value: 78.045 - type: recall_at_1000 value: 94.419 - type: recall_at_3 value: 38.064 - type: recall_at_5 value: 44.769 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 32.519 - type: map_at_10 value: 45.831 - type: map_at_100 value: 46.815 - type: map_at_1000 value: 46.899 - type: map_at_3 value: 42.836 - type: map_at_5 value: 44.65 - type: mrr_at_1 value: 65.037 - type: mrr_at_10 value: 72.16 - type: mrr_at_100 value: 72.51100000000001 - type: mrr_at_1000 value: 72.53 - type: mrr_at_3 value: 70.682 - type: mrr_at_5 value: 71.54599999999999 - type: ndcg_at_1 value: 65.037 - type: ndcg_at_10 value: 55.17999999999999 - type: ndcg_at_100 value: 58.888 - type: ndcg_at_1000 value: 60.648 - type: ndcg_at_3 value: 50.501 - type: ndcg_at_5 value: 52.977 - type: precision_at_1 value: 65.037 - type: precision_at_10 value: 11.530999999999999 - type: precision_at_100 value: 1.4460000000000002 - type: precision_at_1000 value: 0.168 - type: precision_at_3 value: 31.483 - type: precision_at_5 value: 20.845 - type: recall_at_1 value: 32.519 - type: recall_at_10 value: 57.657000000000004 - type: recall_at_100 value: 72.30199999999999 - type: recall_at_1000 value: 84.024 - type: recall_at_3 value: 47.225 - type: recall_at_5 value: 52.113 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 88.3168 - type: ap value: 83.80165516037135 - type: f1 value: 88.29942471066407 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 20.724999999999998 - type: map_at_10 value: 32.736 - type: map_at_100 value: 33.938 - type: map_at_1000 value: 33.991 - type: map_at_3 value: 28.788000000000004 - type: map_at_5 value: 31.016 - type: mrr_at_1 value: 21.361 - type: mrr_at_10 value: 33.323 - type: mrr_at_100 value: 34.471000000000004 - type: mrr_at_1000 value: 34.518 - type: mrr_at_3 value: 29.453000000000003 - type: mrr_at_5 value: 31.629 - type: ndcg_at_1 value: 21.361 - type: ndcg_at_10 value: 39.649 - type: ndcg_at_100 value: 45.481 - type: ndcg_at_1000 value: 46.775 - type: ndcg_at_3 value: 31.594 - type: ndcg_at_5 value: 35.543 - type: precision_at_1 value: 21.361 - type: precision_at_10 value: 6.3740000000000006 - type: precision_at_100 value: 0.931 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 13.514999999999999 - type: precision_at_5 value: 10.100000000000001 - type: recall_at_1 value: 20.724999999999998 - type: recall_at_10 value: 61.034 - type: recall_at_100 value: 88.062 - type: recall_at_1000 value: 97.86399999999999 - type: recall_at_3 value: 39.072 - type: recall_at_5 value: 48.53 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.8919288645691 - type: f1 value: 93.57059586398059 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 67.97993616051072 - type: f1 value: 48.244319183606535 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.90047074646941 - type: f1 value: 66.48999056063725 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.34566240753195 - type: f1 value: 73.54164154290658 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.21866934757011 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.000936217235534 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.68189362520352 - type: mrr value: 32.69603637784303 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.078 - type: map_at_10 value: 12.671 - type: map_at_100 value: 16.291 - type: map_at_1000 value: 17.855999999999998 - type: map_at_3 value: 9.610000000000001 - type: map_at_5 value: 11.152 - type: mrr_at_1 value: 43.963 - type: mrr_at_10 value: 53.173 - type: mrr_at_100 value: 53.718999999999994 - type: mrr_at_1000 value: 53.756 - type: mrr_at_3 value: 50.980000000000004 - type: mrr_at_5 value: 52.42 - type: ndcg_at_1 value: 42.415000000000006 - type: ndcg_at_10 value: 34.086 - type: ndcg_at_100 value: 32.545 - type: ndcg_at_1000 value: 41.144999999999996 - type: ndcg_at_3 value: 39.434999999999995 - type: ndcg_at_5 value: 37.888 - type: precision_at_1 value: 43.653 - type: precision_at_10 value: 25.014999999999997 - type: precision_at_100 value: 8.594 - type: precision_at_1000 value: 2.169 - type: precision_at_3 value: 37.049 - type: precision_at_5 value: 33.065 - type: recall_at_1 value: 6.078 - type: recall_at_10 value: 16.17 - type: recall_at_100 value: 34.512 - type: recall_at_1000 value: 65.447 - type: recall_at_3 value: 10.706 - type: recall_at_5 value: 13.158 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 27.378000000000004 - type: map_at_10 value: 42.178 - type: map_at_100 value: 43.32 - type: map_at_1000 value: 43.358000000000004 - type: map_at_3 value: 37.474000000000004 - type: map_at_5 value: 40.333000000000006 - type: mrr_at_1 value: 30.823 - type: mrr_at_10 value: 44.626 - type: mrr_at_100 value: 45.494 - type: mrr_at_1000 value: 45.519 - type: mrr_at_3 value: 40.585 - type: mrr_at_5 value: 43.146 - type: ndcg_at_1 value: 30.794 - type: ndcg_at_10 value: 50.099000000000004 - type: ndcg_at_100 value: 54.900999999999996 - type: ndcg_at_1000 value: 55.69499999999999 - type: ndcg_at_3 value: 41.238 - type: ndcg_at_5 value: 46.081 - type: precision_at_1 value: 30.794 - type: precision_at_10 value: 8.549 - type: precision_at_100 value: 1.124 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 18.926000000000002 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 27.378000000000004 - type: recall_at_10 value: 71.842 - type: recall_at_100 value: 92.565 - type: recall_at_1000 value: 98.402 - type: recall_at_3 value: 49.053999999999995 - type: recall_at_5 value: 60.207 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.557 - type: map_at_10 value: 84.729 - type: map_at_100 value: 85.369 - type: map_at_1000 value: 85.382 - type: map_at_3 value: 81.72 - type: map_at_5 value: 83.613 - type: mrr_at_1 value: 81.3 - type: mrr_at_10 value: 87.488 - type: mrr_at_100 value: 87.588 - type: mrr_at_1000 value: 87.589 - type: mrr_at_3 value: 86.53 - type: mrr_at_5 value: 87.18599999999999 - type: ndcg_at_1 value: 81.28999999999999 - type: ndcg_at_10 value: 88.442 - type: ndcg_at_100 value: 89.637 - type: ndcg_at_1000 value: 89.70700000000001 - type: ndcg_at_3 value: 85.55199999999999 - type: ndcg_at_5 value: 87.154 - type: precision_at_1 value: 81.28999999999999 - type: precision_at_10 value: 13.489999999999998 - type: precision_at_100 value: 1.54 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.553 - type: precision_at_5 value: 24.708 - type: recall_at_1 value: 70.557 - type: recall_at_10 value: 95.645 - type: recall_at_100 value: 99.693 - type: recall_at_1000 value: 99.995 - type: recall_at_3 value: 87.359 - type: recall_at_5 value: 91.89699999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 63.65060114776209 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 64.63271250680617 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.263 - type: map_at_10 value: 10.801 - type: map_at_100 value: 12.888 - type: map_at_1000 value: 13.224 - type: map_at_3 value: 7.362 - type: map_at_5 value: 9.149000000000001 - type: mrr_at_1 value: 21 - type: mrr_at_10 value: 31.416 - type: mrr_at_100 value: 32.513 - type: mrr_at_1000 value: 32.58 - type: mrr_at_3 value: 28.116999999999997 - type: mrr_at_5 value: 29.976999999999997 - type: ndcg_at_1 value: 21 - type: ndcg_at_10 value: 18.551000000000002 - type: ndcg_at_100 value: 26.657999999999998 - type: ndcg_at_1000 value: 32.485 - type: ndcg_at_3 value: 16.834 - type: ndcg_at_5 value: 15.204999999999998 - type: precision_at_1 value: 21 - type: precision_at_10 value: 9.84 - type: precision_at_100 value: 2.16 - type: precision_at_1000 value: 0.35500000000000004 - type: precision_at_3 value: 15.667 - type: precision_at_5 value: 13.62 - type: recall_at_1 value: 4.263 - type: recall_at_10 value: 19.922 - type: recall_at_100 value: 43.808 - type: recall_at_1000 value: 72.14500000000001 - type: recall_at_3 value: 9.493 - type: recall_at_5 value: 13.767999999999999 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_spearman value: 81.27446313317233 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_spearman value: 76.27963301217527 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_spearman value: 88.18495048450949 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_spearman value: 81.91982338692046 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_spearman value: 89.00896818385291 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_spearman value: 85.48814644586132 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_spearman value: 90.30116926966582 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_spearman value: 67.74132963032342 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_spearman value: 86.87741355780479 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 82.0019012295875 - type: mrr value: 94.70267024188593 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 50.05 - type: map_at_10 value: 59.36 - type: map_at_100 value: 59.967999999999996 - type: map_at_1000 value: 60.023 - type: map_at_3 value: 56.515 - type: map_at_5 value: 58.272999999999996 - type: mrr_at_1 value: 53 - type: mrr_at_10 value: 61.102000000000004 - type: mrr_at_100 value: 61.476 - type: mrr_at_1000 value: 61.523 - type: mrr_at_3 value: 58.778 - type: mrr_at_5 value: 60.128 - type: ndcg_at_1 value: 53 - type: ndcg_at_10 value: 64.43100000000001 - type: ndcg_at_100 value: 66.73599999999999 - type: ndcg_at_1000 value: 68.027 - type: ndcg_at_3 value: 59.279 - type: ndcg_at_5 value: 61.888 - type: precision_at_1 value: 53 - type: precision_at_10 value: 8.767 - type: precision_at_100 value: 1.01 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 23.444000000000003 - type: precision_at_5 value: 15.667 - type: recall_at_1 value: 50.05 - type: recall_at_10 value: 78.511 - type: recall_at_100 value: 88.5 - type: recall_at_1000 value: 98.333 - type: recall_at_3 value: 64.117 - type: recall_at_5 value: 70.867 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.72178217821782 - type: cos_sim_ap value: 93.0728601593541 - type: cos_sim_f1 value: 85.6727976766699 - type: cos_sim_precision value: 83.02063789868667 - type: cos_sim_recall value: 88.5 - type: dot_accuracy value: 99.72178217821782 - type: dot_ap value: 93.07287396168348 - type: dot_f1 value: 85.6727976766699 - type: dot_precision value: 83.02063789868667 - type: dot_recall value: 88.5 - type: euclidean_accuracy value: 99.72178217821782 - type: euclidean_ap value: 93.07285657982895 - type: euclidean_f1 value: 85.6727976766699 - type: euclidean_precision value: 83.02063789868667 - type: euclidean_recall value: 88.5 - type: manhattan_accuracy value: 99.72475247524753 - type: manhattan_ap value: 93.02792973059809 - type: manhattan_f1 value: 85.7727737973388 - type: manhattan_precision value: 87.84067085953879 - type: manhattan_recall value: 83.8 - type: max_accuracy value: 99.72475247524753 - type: max_ap value: 93.07287396168348 - type: max_f1 value: 85.7727737973388 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 68.77583615550819 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.151636938606956 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.16607939471187 - type: mrr value: 52.95172046091163 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.314646669495666 - type: cos_sim_spearman value: 31.83562491439455 - type: dot_pearson value: 31.314590842874157 - type: dot_spearman value: 31.83363065810437 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.198 - type: map_at_10 value: 1.3010000000000002 - type: map_at_100 value: 7.2139999999999995 - type: map_at_1000 value: 20.179 - type: map_at_3 value: 0.528 - type: map_at_5 value: 0.8019999999999999 - type: mrr_at_1 value: 72 - type: mrr_at_10 value: 83.39999999999999 - type: mrr_at_100 value: 83.39999999999999 - type: mrr_at_1000 value: 83.39999999999999 - type: mrr_at_3 value: 81.667 - type: mrr_at_5 value: 83.06700000000001 - type: ndcg_at_1 value: 66 - type: ndcg_at_10 value: 58.059000000000005 - type: ndcg_at_100 value: 44.316 - type: ndcg_at_1000 value: 43.147000000000006 - type: ndcg_at_3 value: 63.815999999999995 - type: ndcg_at_5 value: 63.005 - type: precision_at_1 value: 72 - type: precision_at_10 value: 61.4 - type: precision_at_100 value: 45.62 - type: precision_at_1000 value: 19.866 - type: precision_at_3 value: 70 - type: precision_at_5 value: 68.8 - type: recall_at_1 value: 0.198 - type: recall_at_10 value: 1.517 - type: recall_at_100 value: 10.587 - type: recall_at_1000 value: 41.233 - type: recall_at_3 value: 0.573 - type: recall_at_5 value: 0.907 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.894 - type: map_at_10 value: 8.488999999999999 - type: map_at_100 value: 14.445 - type: map_at_1000 value: 16.078 - type: map_at_3 value: 4.589 - type: map_at_5 value: 6.019 - type: mrr_at_1 value: 22.448999999999998 - type: mrr_at_10 value: 39.82 - type: mrr_at_100 value: 40.752 - type: mrr_at_1000 value: 40.771 - type: mrr_at_3 value: 34.354 - type: mrr_at_5 value: 37.721 - type: ndcg_at_1 value: 19.387999999999998 - type: ndcg_at_10 value: 21.563 - type: ndcg_at_100 value: 33.857 - type: ndcg_at_1000 value: 46.199 - type: ndcg_at_3 value: 22.296 - type: ndcg_at_5 value: 21.770999999999997 - type: precision_at_1 value: 22.448999999999998 - type: precision_at_10 value: 19.796 - type: precision_at_100 value: 7.142999999999999 - type: precision_at_1000 value: 1.541 - type: precision_at_3 value: 24.490000000000002 - type: precision_at_5 value: 22.448999999999998 - type: recall_at_1 value: 1.894 - type: recall_at_10 value: 14.931 - type: recall_at_100 value: 45.524 - type: recall_at_1000 value: 83.243 - type: recall_at_3 value: 5.712 - type: recall_at_5 value: 8.386000000000001 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.049 - type: ap value: 13.85116971310922 - type: f1 value: 54.37504302487686 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 64.1312959818902 - type: f1 value: 64.11413877009383 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 54.13103431861502 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.327889372355 - type: cos_sim_ap value: 77.42059895975699 - type: cos_sim_f1 value: 71.02706903250873 - type: cos_sim_precision value: 69.75324344950394 - type: cos_sim_recall value: 72.34828496042216 - type: dot_accuracy value: 87.327889372355 - type: dot_ap value: 77.4209479346677 - type: dot_f1 value: 71.02706903250873 - type: dot_precision value: 69.75324344950394 - type: dot_recall value: 72.34828496042216 - type: euclidean_accuracy value: 87.327889372355 - type: euclidean_ap value: 77.42096495861037 - type: euclidean_f1 value: 71.02706903250873 - type: euclidean_precision value: 69.75324344950394 - type: euclidean_recall value: 72.34828496042216 - type: manhattan_accuracy value: 87.31000774870358 - type: manhattan_ap value: 77.38930750711619 - type: manhattan_f1 value: 71.07935314027831 - type: manhattan_precision value: 67.70957726295677 - type: manhattan_recall value: 74.80211081794195 - type: max_accuracy value: 87.327889372355 - type: max_ap value: 77.42096495861037 - type: max_f1 value: 71.07935314027831 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.58939729110878 - type: cos_sim_ap value: 87.17594155025475 - type: cos_sim_f1 value: 79.21146953405018 - type: cos_sim_precision value: 76.8918527109307 - type: cos_sim_recall value: 81.67539267015707 - type: dot_accuracy value: 89.58939729110878 - type: dot_ap value: 87.17593963273593 - type: dot_f1 value: 79.21146953405018 - type: dot_precision value: 76.8918527109307 - type: dot_recall value: 81.67539267015707 - type: euclidean_accuracy value: 89.58939729110878 - type: euclidean_ap value: 87.17592466925834 - type: euclidean_f1 value: 79.21146953405018 - type: euclidean_precision value: 76.8918527109307 - type: euclidean_recall value: 81.67539267015707 - type: manhattan_accuracy value: 89.62626615438352 - type: manhattan_ap value: 87.16589873161546 - type: manhattan_f1 value: 79.25143598295348 - type: manhattan_precision value: 76.39494177323712 - type: manhattan_recall value: 82.32984293193716 - type: max_accuracy value: 89.62626615438352 - type: max_ap value: 87.17594155025475 - type: max_f1 value: 79.25143598295348 --- # hkunlp/instructor-large We introduce **Instructor**👨‍🏫, an instruction-finetuned text embedding model that can generate text embeddings tailored to any task (e.g., classification, retrieval, clustering, text evaluation, etc.) and domains (e.g., science, finance, etc.) ***by simply providing the task instruction, without any finetuning***. Instructor👨‍ achieves sota on 70 diverse embedding tasks ([MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard))! The model is easy to use with **our customized** `sentence-transformer` library. For more details, check out [our paper](https://arxiv.org/abs/2212.09741) and [project page](https://instructor-embedding.github.io/)! **************************** **Updates** **************************** * 12/28: We released a new [checkpoint](https://huggingface.co/hkunlp/instructor-large) trained with hard negatives, which gives better performance. * 12/21: We released our [paper](https://arxiv.org/abs/2212.09741), [code](https://github.com/HKUNLP/instructor-embedding), [checkpoint](https://huggingface.co/hkunlp/instructor-large) and [project page](https://instructor-embedding.github.io/)! Check them out! ## Quick start <hr /> ## Installation ```bash pip install InstructorEmbedding ``` ## Compute your customized embeddings Then you can use the model like this to calculate domain-specific and task-aware embeddings: ```python from InstructorEmbedding import INSTRUCTOR model = INSTRUCTOR('hkunlp/instructor-large') sentence = "3D ActionSLAM: wearable person tracking in multi-floor environments" instruction = "Represent the Science title:" embeddings = model.encode([[instruction,sentence]]) print(embeddings) ``` ## Use cases <hr /> ## Calculate embeddings for your customized texts If you want to calculate customized embeddings for specific sentences, you may follow the unified template to write instructions: &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Represent the `domain` `text_type` for `task_objective`: * `domain` is optional, and it specifies the domain of the text, e.g., science, finance, medicine, etc. * `text_type` is required, and it specifies the encoding unit, e.g., sentence, document, paragraph, etc. * `task_objective` is optional, and it specifies the objective of embedding, e.g., retrieve a document, classify the sentence, etc. ## Calculate Sentence similarities You can further use the model to compute similarities between two groups of sentences, with **customized embeddings**. ```python from sklearn.metrics.pairwise import cosine_similarity sentences_a = [['Represent the Science sentence: ','Parton energy loss in QCD matter'], ['Represent the Financial statement: ','The Federal Reserve on Wednesday raised its benchmark interest rate.']] sentences_b = [['Represent the Science sentence: ','The Chiral Phase Transition in Dissipative Dynamics'], ['Represent the Financial statement: ','The funds rose less than 0.5 per cent on Friday']] embeddings_a = model.encode(sentences_a) embeddings_b = model.encode(sentences_b) similarities = cosine_similarity(embeddings_a,embeddings_b) print(similarities) ``` ## Information Retrieval You can also use **customized embeddings** for information retrieval. ```python import numpy as np from sklearn.metrics.pairwise import cosine_similarity query = [['Represent the Wikipedia question for retrieving supporting documents: ','where is the food stored in a yam plant']] corpus = [['Represent the Wikipedia document for retrieval: ','Capitalism has been dominant in the Western world since the end of feudalism, but most feel[who?] that the term "mixed economies" more precisely describes most contemporary economies, due to their containing both private-owned and state-owned enterprises. In capitalism, prices determine the demand-supply scale. For example, higher demand for certain goods and services lead to higher prices and lower demand for certain goods lead to lower prices.'], ['Represent the Wikipedia document for retrieval: ',"The disparate impact theory is especially controversial under the Fair Housing Act because the Act regulates many activities relating to housing, insurance, and mortgage loans—and some scholars have argued that the theory's use under the Fair Housing Act, combined with extensions of the Community Reinvestment Act, contributed to rise of sub-prime lending and the crash of the U.S. housing market and ensuing global economic recession"], ['Represent the Wikipedia document for retrieval: ','Disparate impact in United States labor law refers to practices in employment, housing, and other areas that adversely affect one group of people of a protected characteristic more than another, even though rules applied by employers or landlords are formally neutral. Although the protected classes vary by statute, most federal civil rights laws protect based on race, color, religion, national origin, and sex as protected traits, and some laws include disability status and other traits as well.']] query_embeddings = model.encode(query) corpus_embeddings = model.encode(corpus) similarities = cosine_similarity(query_embeddings,corpus_embeddings) retrieved_doc_id = np.argmax(similarities) print(retrieved_doc_id) ``` ## Clustering Use **customized embeddings** for clustering texts in groups. ```python import sklearn.cluster sentences = [['Represent the Medicine sentence for clustering: ','Dynamical Scalar Degree of Freedom in Horava-Lifshitz Gravity'], ['Represent the Medicine sentence for clustering: ','Comparison of Atmospheric Neutrino Flux Calculations at Low Energies'], ['Represent the Medicine sentence for clustering: ','Fermion Bags in the Massive Gross-Neveu Model'], ['Represent the Medicine sentence for clustering: ',"QCD corrections to Associated t-tbar-H production at the Tevatron"], ['Represent the Medicine sentence for clustering: ','A New Analysis of the R Measurements: Resonance Parameters of the Higher, Vector States of Charmonium']] embeddings = model.encode(sentences) clustering_model = sklearn.cluster.MiniBatchKMeans(n_clusters=2) clustering_model.fit(embeddings) cluster_assignment = clustering_model.labels_ print(cluster_assignment) ```
[ "BIOSSES", "SCIFACT" ]
EleutherAI/pythia-70m-deduped
EleutherAI
text-generation
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "causal-lm", "pythia", "en", "dataset:EleutherAI/the_pile_deduplicated", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2023-02-13T16:01:41Z"
2023-07-09T16:07:33+00:00
191,714
25
--- datasets: - EleutherAI/the_pile_deduplicated language: - en license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-70M-deduped ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-70M-deduped for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-70M-deduped as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-70M-deduped has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-70M-deduped will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-70M-deduped to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-70M-deduped may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-70M-deduped. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data Pythia-70M-deduped was trained on the Pile **after the dataset has been globally deduplicated**.<br> [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/). ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "SCIQ" ]
intfloat/e5-mistral-7b-instruct
intfloat
feature-extraction
[ "sentence-transformers", "pytorch", "safetensors", "mistral", "feature-extraction", "mteb", "transformers", "en", "arxiv:2401.00368", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-12-20T10:17:02Z"
2024-04-23T08:03:51+00:00
183,947
502
--- language: - en license: mit tags: - mteb - sentence-transformers - transformers model-index: - name: e5-mistral-7b-instruct results: - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: None metrics: - type: cos_sim_pearson value: 37.863226091673866 - type: cos_sim_spearman value: 38.98733013335281 - type: euclidean_pearson value: 37.51783380497874 - type: euclidean_spearman value: 38.98733012753365 - type: manhattan_pearson value: 37.26706888081721 - type: manhattan_spearman value: 38.709750161903834 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: None metrics: - type: cos_sim_pearson value: 43.33924583134623 - type: cos_sim_spearman value: 42.84316155158754 - type: euclidean_pearson value: 45.62709879515238 - type: euclidean_spearman value: 42.843155921732404 - type: manhattan_pearson value: 45.4786950991229 - type: manhattan_spearman value: 42.657334751855984 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 78.68656716417911 - type: ap value: 41.71522322900398 - type: f1 value: 72.37207703532552 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 74.04710920770879 - type: ap value: 83.42622221864045 - type: f1 value: 72.14388257905772 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 77.93103448275862 - type: ap value: 26.039284760509513 - type: f1 value: 64.81092954450712 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 77.21627408993577 - type: ap value: 24.876490553983036 - type: f1 value: 63.8773359684989 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 95.90679999999999 - type: ap value: 94.32357863164454 - type: f1 value: 95.90485634708557 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 55.786 - type: f1 value: 55.31211995815146 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.26 - type: f1 value: 52.156230111544986 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 50.33 - type: f1 value: 49.195023008878145 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.3 - type: f1 value: 48.434470184108 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.68599999999999 - type: f1 value: 47.62681775202072 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 46.238 - type: f1 value: 45.014030559653705 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 36.486000000000004 - type: map_at_10 value: 53.076 - type: map_at_100 value: 53.657999999999994 - type: map_at_1000 value: 53.659 - type: map_at_3 value: 48.234 - type: map_at_5 value: 51.121 - type: mrr_at_1 value: 37.269000000000005 - type: mrr_at_10 value: 53.335 - type: mrr_at_100 value: 53.916 - type: mrr_at_1000 value: 53.918 - type: mrr_at_3 value: 48.518 - type: mrr_at_5 value: 51.406 - type: ndcg_at_1 value: 36.486000000000004 - type: ndcg_at_10 value: 61.882000000000005 - type: ndcg_at_100 value: 64.165 - type: ndcg_at_1000 value: 64.203 - type: ndcg_at_3 value: 52.049 - type: ndcg_at_5 value: 57.199 - type: precision_at_1 value: 36.486000000000004 - type: precision_at_10 value: 8.982999999999999 - type: precision_at_100 value: 0.9939999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 21.029 - type: precision_at_5 value: 15.092 - type: recall_at_1 value: 36.486000000000004 - type: recall_at_10 value: 89.82900000000001 - type: recall_at_100 value: 99.36 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 63.087 - type: recall_at_5 value: 75.46199999999999 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 50.45119266859667 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 45.4958298992051 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 66.98177472838887 - type: mrr value: 79.91854636591478 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 87.67086498650698 - type: cos_sim_spearman value: 85.54773239564638 - type: euclidean_pearson value: 86.48229161588425 - type: euclidean_spearman value: 85.54773239564638 - type: manhattan_pearson value: 86.67533327742343 - type: manhattan_spearman value: 85.76099026691983 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: None metrics: - type: cos_sim_pearson value: 50.31998888922809 - type: cos_sim_spearman value: 50.6369940530675 - type: euclidean_pearson value: 50.055544636296055 - type: euclidean_spearman value: 50.63699405154838 - type: manhattan_pearson value: 50.00739378036807 - type: manhattan_spearman value: 50.607237418676945 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.5615866388309 - type: f1 value: 99.49895615866389 - type: precision value: 99.46764091858039 - type: recall value: 99.5615866388309 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.19656614571869 - type: f1 value: 99.08650671362535 - type: precision value: 99.0314769975787 - type: recall value: 99.19656614571869 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.0256321440942 - type: f1 value: 97.83743216718624 - type: precision value: 97.74390947927492 - type: recall value: 98.0256321440942 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 99.26276987888363 - type: f1 value: 99.22766368264 - type: precision value: 99.21011058451816 - type: recall value: 99.26276987888363 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 88.22727272727272 - type: f1 value: 88.17411732496673 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 43.530637846246975 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 40.23505728593893 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: None metrics: - type: v_measure value: 44.419028279451275 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: None metrics: - type: v_measure value: 42.5820277929776 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: None metrics: - type: map value: 77.67811726152972 - type: mrr value: 80.99003968253969 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: None metrics: - type: map value: 78.66055354534922 - type: mrr value: 81.66119047619047 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 27.162333333333333 - type: map_at_10 value: 37.22291666666667 - type: map_at_100 value: 38.56733333333333 - type: map_at_1000 value: 38.684250000000006 - type: map_at_3 value: 34.22858333333333 - type: map_at_5 value: 35.852500000000006 - type: mrr_at_1 value: 32.459833333333336 - type: mrr_at_10 value: 41.65358333333333 - type: mrr_at_100 value: 42.566916666666664 - type: mrr_at_1000 value: 42.61766666666667 - type: mrr_at_3 value: 39.210499999999996 - type: mrr_at_5 value: 40.582166666666666 - type: ndcg_at_1 value: 32.459833333333336 - type: ndcg_at_10 value: 42.96758333333333 - type: ndcg_at_100 value: 48.5065 - type: ndcg_at_1000 value: 50.556583333333336 - type: ndcg_at_3 value: 38.004416666666664 - type: ndcg_at_5 value: 40.25916666666667 - type: precision_at_1 value: 32.459833333333336 - type: precision_at_10 value: 7.664583333333333 - type: precision_at_100 value: 1.2349999999999999 - type: precision_at_1000 value: 0.15966666666666668 - type: precision_at_3 value: 17.731166666666663 - type: precision_at_5 value: 12.575333333333335 - type: recall_at_1 value: 27.162333333333333 - type: recall_at_10 value: 55.44158333333334 - type: recall_at_100 value: 79.56966666666666 - type: recall_at_1000 value: 93.45224999999999 - type: recall_at_3 value: 41.433083333333336 - type: recall_at_5 value: 47.31108333333333 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 16.539 - type: map_at_10 value: 28.494999999999997 - type: map_at_100 value: 30.568 - type: map_at_1000 value: 30.741000000000003 - type: map_at_3 value: 23.846999999999998 - type: map_at_5 value: 26.275 - type: mrr_at_1 value: 37.394 - type: mrr_at_10 value: 50.068 - type: mrr_at_100 value: 50.727 - type: mrr_at_1000 value: 50.751000000000005 - type: mrr_at_3 value: 46.938 - type: mrr_at_5 value: 48.818 - type: ndcg_at_1 value: 37.394 - type: ndcg_at_10 value: 38.349 - type: ndcg_at_100 value: 45.512 - type: ndcg_at_1000 value: 48.321 - type: ndcg_at_3 value: 32.172 - type: ndcg_at_5 value: 34.265 - type: precision_at_1 value: 37.394 - type: precision_at_10 value: 11.927999999999999 - type: precision_at_100 value: 1.966 - type: precision_at_1000 value: 0.25 - type: precision_at_3 value: 24.126 - type: precision_at_5 value: 18.306 - type: recall_at_1 value: 16.539 - type: recall_at_10 value: 44.504 - type: recall_at_100 value: 68.605 - type: recall_at_1000 value: 84.1 - type: recall_at_3 value: 29.008 - type: recall_at_5 value: 35.58 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 19.482 - type: map_at_10 value: 28.622999999999998 - type: map_at_100 value: 30.262 - type: map_at_1000 value: 30.432 - type: map_at_3 value: 25.647 - type: map_at_5 value: 27.128000000000004 - type: mrr_at_1 value: 30.408 - type: mrr_at_10 value: 37.188 - type: mrr_at_100 value: 38.196000000000005 - type: mrr_at_1000 value: 38.273 - type: mrr_at_3 value: 35.067 - type: mrr_at_5 value: 36.124 - type: ndcg_at_1 value: 30.408 - type: ndcg_at_10 value: 34.215 - type: ndcg_at_100 value: 41.349999999999994 - type: ndcg_at_1000 value: 44.689 - type: ndcg_at_3 value: 30.264999999999997 - type: ndcg_at_5 value: 31.572 - type: precision_at_1 value: 30.408 - type: precision_at_10 value: 7.6770000000000005 - type: precision_at_100 value: 1.352 - type: precision_at_1000 value: 0.178 - type: precision_at_3 value: 17.213 - type: precision_at_5 value: 12.198 - type: recall_at_1 value: 19.482 - type: recall_at_10 value: 42.368 - type: recall_at_100 value: 72.694 - type: recall_at_1000 value: 95.602 - type: recall_at_3 value: 30.101 - type: recall_at_5 value: 34.708 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: None metrics: - type: cos_sim_accuracy value: 71.16055321707758 - type: cos_sim_ap value: 80.21073839711723 - type: cos_sim_f1 value: 72.9740932642487 - type: cos_sim_precision value: 65.53136050623488 - type: cos_sim_recall value: 82.3240589198036 - type: dot_accuracy value: 71.16055321707758 - type: dot_ap value: 80.212299264122 - type: dot_f1 value: 72.9740932642487 - type: dot_precision value: 65.53136050623488 - type: dot_recall value: 82.3240589198036 - type: euclidean_accuracy value: 71.16055321707758 - type: euclidean_ap value: 80.21076298680417 - type: euclidean_f1 value: 72.9740932642487 - type: euclidean_precision value: 65.53136050623488 - type: euclidean_recall value: 82.3240589198036 - type: manhattan_accuracy value: 70.71557426337944 - type: manhattan_ap value: 79.93448977199749 - type: manhattan_f1 value: 72.83962726826877 - type: manhattan_precision value: 62.7407908077053 - type: manhattan_recall value: 86.81318681318682 - type: max_accuracy value: 71.16055321707758 - type: max_ap value: 80.212299264122 - type: max_f1 value: 72.9740932642487 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 60.643 - type: map_at_10 value: 69.011 - type: map_at_100 value: 69.533 - type: map_at_1000 value: 69.545 - type: map_at_3 value: 67.167 - type: map_at_5 value: 68.12700000000001 - type: mrr_at_1 value: 60.801 - type: mrr_at_10 value: 69.111 - type: mrr_at_100 value: 69.6 - type: mrr_at_1000 value: 69.611 - type: mrr_at_3 value: 67.229 - type: mrr_at_5 value: 68.214 - type: ndcg_at_1 value: 60.801 - type: ndcg_at_10 value: 73.128 - type: ndcg_at_100 value: 75.614 - type: ndcg_at_1000 value: 75.92 - type: ndcg_at_3 value: 69.261 - type: ndcg_at_5 value: 70.973 - type: precision_at_1 value: 60.801 - type: precision_at_10 value: 8.662 - type: precision_at_100 value: 0.9860000000000001 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 25.149 - type: precision_at_5 value: 15.953999999999999 - type: recall_at_1 value: 60.643 - type: recall_at_10 value: 85.959 - type: recall_at_100 value: 97.576 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 75.184 - type: recall_at_5 value: 79.32000000000001 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 10.183 - type: map_at_10 value: 23.958 - type: map_at_100 value: 34.354 - type: map_at_1000 value: 36.442 - type: map_at_3 value: 16.345000000000002 - type: map_at_5 value: 19.647000000000002 - type: mrr_at_1 value: 74.25 - type: mrr_at_10 value: 80.976 - type: mrr_at_100 value: 81.256 - type: mrr_at_1000 value: 81.262 - type: mrr_at_3 value: 79.958 - type: mrr_at_5 value: 80.37100000000001 - type: ndcg_at_1 value: 62.0 - type: ndcg_at_10 value: 48.894999999999996 - type: ndcg_at_100 value: 53.867 - type: ndcg_at_1000 value: 61.304 - type: ndcg_at_3 value: 53.688 - type: ndcg_at_5 value: 50.900999999999996 - type: precision_at_1 value: 74.25 - type: precision_at_10 value: 39.525 - type: precision_at_100 value: 12.323 - type: precision_at_1000 value: 2.539 - type: precision_at_3 value: 57.49999999999999 - type: precision_at_5 value: 49.1 - type: recall_at_1 value: 10.183 - type: recall_at_10 value: 29.296 - type: recall_at_100 value: 60.394999999999996 - type: recall_at_1000 value: 83.12 - type: recall_at_3 value: 17.495 - type: recall_at_5 value: 22.235 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 26.613999999999997 - type: map_at_10 value: 79.77300000000001 - type: map_at_100 value: 82.71 - type: map_at_1000 value: 82.75 - type: map_at_3 value: 55.92700000000001 - type: map_at_5 value: 70.085 - type: mrr_at_1 value: 90.7 - type: mrr_at_10 value: 93.438 - type: mrr_at_100 value: 93.504 - type: mrr_at_1000 value: 93.50699999999999 - type: mrr_at_3 value: 93.125 - type: mrr_at_5 value: 93.34 - type: ndcg_at_1 value: 90.7 - type: ndcg_at_10 value: 87.023 - type: ndcg_at_100 value: 90.068 - type: ndcg_at_1000 value: 90.43299999999999 - type: ndcg_at_3 value: 86.339 - type: ndcg_at_5 value: 85.013 - type: precision_at_1 value: 90.7 - type: precision_at_10 value: 41.339999999999996 - type: precision_at_100 value: 4.806 - type: precision_at_1000 value: 0.48900000000000005 - type: precision_at_3 value: 76.983 - type: precision_at_5 value: 64.69 - type: recall_at_1 value: 26.613999999999997 - type: recall_at_10 value: 87.681 - type: recall_at_100 value: 97.44699999999999 - type: recall_at_1000 value: 99.348 - type: recall_at_3 value: 57.809999999999995 - type: recall_at_5 value: 74.258 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 30.9 - type: map_at_10 value: 40.467 - type: map_at_100 value: 41.423 - type: map_at_1000 value: 41.463 - type: map_at_3 value: 37.25 - type: map_at_5 value: 39.31 - type: mrr_at_1 value: 30.9 - type: mrr_at_10 value: 40.467 - type: mrr_at_100 value: 41.423 - type: mrr_at_1000 value: 41.463 - type: mrr_at_3 value: 37.25 - type: mrr_at_5 value: 39.31 - type: ndcg_at_1 value: 30.9 - type: ndcg_at_10 value: 45.957 - type: ndcg_at_100 value: 50.735 - type: ndcg_at_1000 value: 51.861999999999995 - type: ndcg_at_3 value: 39.437 - type: ndcg_at_5 value: 43.146 - type: precision_at_1 value: 30.9 - type: precision_at_10 value: 6.35 - type: precision_at_100 value: 0.861 - type: precision_at_1000 value: 0.095 - type: precision_at_3 value: 15.267 - type: precision_at_5 value: 10.96 - type: recall_at_1 value: 30.9 - type: recall_at_10 value: 63.5 - type: recall_at_100 value: 86.1 - type: recall_at_1000 value: 95.1 - type: recall_at_3 value: 45.800000000000004 - type: recall_at_5 value: 54.800000000000004 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 49.765 - type: f1 value: 45.93242203574485 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 75.138 - type: map_at_10 value: 84.21300000000001 - type: map_at_100 value: 84.43 - type: map_at_1000 value: 84.441 - type: map_at_3 value: 83.071 - type: map_at_5 value: 83.853 - type: mrr_at_1 value: 80.948 - type: mrr_at_10 value: 88.175 - type: mrr_at_100 value: 88.24 - type: mrr_at_1000 value: 88.241 - type: mrr_at_3 value: 87.516 - type: mrr_at_5 value: 87.997 - type: ndcg_at_1 value: 80.948 - type: ndcg_at_10 value: 87.84100000000001 - type: ndcg_at_100 value: 88.576 - type: ndcg_at_1000 value: 88.75699999999999 - type: ndcg_at_3 value: 86.176 - type: ndcg_at_5 value: 87.214 - type: precision_at_1 value: 80.948 - type: precision_at_10 value: 10.632 - type: precision_at_100 value: 1.123 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 33.193 - type: precision_at_5 value: 20.663 - type: recall_at_1 value: 75.138 - type: recall_at_10 value: 94.89699999999999 - type: recall_at_100 value: 97.751 - type: recall_at_1000 value: 98.833 - type: recall_at_3 value: 90.455 - type: recall_at_5 value: 93.085 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 29.45 - type: map_at_10 value: 48.596000000000004 - type: map_at_100 value: 50.70400000000001 - type: map_at_1000 value: 50.83800000000001 - type: map_at_3 value: 42.795 - type: map_at_5 value: 46.085 - type: mrr_at_1 value: 56.172999999999995 - type: mrr_at_10 value: 64.35300000000001 - type: mrr_at_100 value: 64.947 - type: mrr_at_1000 value: 64.967 - type: mrr_at_3 value: 62.653999999999996 - type: mrr_at_5 value: 63.534 - type: ndcg_at_1 value: 56.172999999999995 - type: ndcg_at_10 value: 56.593 - type: ndcg_at_100 value: 62.942 - type: ndcg_at_1000 value: 64.801 - type: ndcg_at_3 value: 53.024 - type: ndcg_at_5 value: 53.986999999999995 - type: precision_at_1 value: 56.172999999999995 - type: precision_at_10 value: 15.494 - type: precision_at_100 value: 2.222 - type: precision_at_1000 value: 0.254 - type: precision_at_3 value: 35.185 - type: precision_at_5 value: 25.556 - type: recall_at_1 value: 29.45 - type: recall_at_10 value: 62.882000000000005 - type: recall_at_100 value: 85.56099999999999 - type: recall_at_1000 value: 96.539 - type: recall_at_3 value: 47.911 - type: recall_at_5 value: 54.52 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 39.581 - type: map_at_10 value: 68.401 - type: map_at_100 value: 69.207 - type: map_at_1000 value: 69.25200000000001 - type: map_at_3 value: 64.689 - type: map_at_5 value: 67.158 - type: mrr_at_1 value: 79.163 - type: mrr_at_10 value: 85.22999999999999 - type: mrr_at_100 value: 85.386 - type: mrr_at_1000 value: 85.39099999999999 - type: mrr_at_3 value: 84.432 - type: mrr_at_5 value: 84.952 - type: ndcg_at_1 value: 79.163 - type: ndcg_at_10 value: 75.721 - type: ndcg_at_100 value: 78.411 - type: ndcg_at_1000 value: 79.23599999999999 - type: ndcg_at_3 value: 70.68799999999999 - type: ndcg_at_5 value: 73.694 - type: precision_at_1 value: 79.163 - type: precision_at_10 value: 16.134 - type: precision_at_100 value: 1.821 - type: precision_at_1000 value: 0.193 - type: precision_at_3 value: 46.446 - type: precision_at_5 value: 30.242 - type: recall_at_1 value: 39.581 - type: recall_at_10 value: 80.66799999999999 - type: recall_at_100 value: 91.033 - type: recall_at_1000 value: 96.408 - type: recall_at_3 value: 69.669 - type: recall_at_5 value: 75.604 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: None metrics: - type: accuracy value: 45.04809542131589 - type: f1 value: 37.01181779071118 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 94.78120000000001 - type: ap value: 92.52931921594387 - type: f1 value: 94.77902110732532 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: None metrics: - type: accuracy value: 85.81613508442777 - type: ap value: 52.430320593468394 - type: f1 value: 79.95467268178068 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: None metrics: - type: cos_sim_pearson value: 71.05801751913393 - type: cos_sim_spearman value: 75.47954644971965 - type: euclidean_pearson value: 74.27472296759713 - type: euclidean_spearman value: 75.47954201369866 - type: manhattan_pearson value: 74.30508190186474 - type: manhattan_spearman value: 75.51326518159436 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 24.21110921666315 - type: mrr value: 22.863492063492064 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 61.38400000000001 - type: map_at_10 value: 70.895 - type: map_at_100 value: 71.314 - type: map_at_1000 value: 71.331 - type: map_at_3 value: 69.016 - type: map_at_5 value: 70.179 - type: mrr_at_1 value: 63.481 - type: mrr_at_10 value: 71.543 - type: mrr_at_100 value: 71.91300000000001 - type: mrr_at_1000 value: 71.928 - type: mrr_at_3 value: 69.90899999999999 - type: mrr_at_5 value: 70.907 - type: ndcg_at_1 value: 63.481 - type: ndcg_at_10 value: 74.833 - type: ndcg_at_100 value: 76.705 - type: ndcg_at_1000 value: 77.13600000000001 - type: ndcg_at_3 value: 71.236 - type: ndcg_at_5 value: 73.199 - type: precision_at_1 value: 63.481 - type: precision_at_10 value: 9.179 - type: precision_at_100 value: 1.011 - type: precision_at_1000 value: 0.105 - type: precision_at_3 value: 27.044 - type: precision_at_5 value: 17.272000000000002 - type: recall_at_1 value: 61.38400000000001 - type: recall_at_10 value: 86.318 - type: recall_at_100 value: 94.786 - type: recall_at_1000 value: 98.14500000000001 - type: recall_at_3 value: 76.717 - type: recall_at_5 value: 81.416 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 23.363999999999997 - type: map_at_10 value: 36.022 - type: map_at_100 value: 37.229 - type: map_at_1000 value: 37.274 - type: map_at_3 value: 32.131 - type: map_at_5 value: 34.391 - type: mrr_at_1 value: 24.069 - type: mrr_at_10 value: 36.620000000000005 - type: mrr_at_100 value: 37.769999999999996 - type: mrr_at_1000 value: 37.809 - type: mrr_at_3 value: 32.846 - type: mrr_at_5 value: 35.02 - type: ndcg_at_1 value: 24.069 - type: ndcg_at_10 value: 43.056 - type: ndcg_at_100 value: 48.754 - type: ndcg_at_1000 value: 49.829 - type: ndcg_at_3 value: 35.167 - type: ndcg_at_5 value: 39.168 - type: precision_at_1 value: 24.069 - type: precision_at_10 value: 6.762 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.105 - type: precision_at_3 value: 14.957 - type: precision_at_5 value: 11.023 - type: recall_at_1 value: 23.363999999999997 - type: recall_at_10 value: 64.696 - type: recall_at_100 value: 90.795 - type: recall_at_1000 value: 98.892 - type: recall_at_3 value: 43.247 - type: recall_at_5 value: 52.86300000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.11947104423166 - type: f1 value: 95.89561841159332 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.97548605240912 - type: f1 value: 92.17133696717212 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.37224816544364 - type: f1 value: 93.19978829237863 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 91.28719072972127 - type: f1 value: 91.28448045979604 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 88.8131946934385 - type: f1 value: 88.27883019362747 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 85.52260397830018 - type: f1 value: 85.15528226728568 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 86.10807113543093 - type: f1 value: 70.88498219072167 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.77120315581854 - type: f1 value: 57.97153920153224 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.93995997331554 - type: f1 value: 58.839203810064866 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.801440651425 - type: f1 value: 58.68009647839332 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 72.90785227680172 - type: f1 value: 49.83760954655788 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 73.24050632911391 - type: f1 value: 52.0562553541082 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.47948890383321 - type: f1 value: 63.334877563135485 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.2871553463349 - type: f1 value: 43.17658050605427 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.174176193678555 - type: f1 value: 59.236659587042425 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.226630800269 - type: f1 value: 60.951842696956184 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.94283792871555 - type: f1 value: 61.40057652844215 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.480833893745796 - type: f1 value: 52.5298332072816 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.52858103564223 - type: f1 value: 69.3770851919204 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.09213180901143 - type: f1 value: 71.13518469365879 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.31203765971756 - type: f1 value: 66.05906970865144 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 80.57162071284465 - type: f1 value: 77.7866172598823 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 75.09414929388029 - type: f1 value: 72.5712594833695 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.20914593140553 - type: f1 value: 68.90619124909186 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.74243443174176 - type: f1 value: 64.72743141749955 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 75.11096166778749 - type: f1 value: 72.61849933064694 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.22394082044384 - type: f1 value: 62.43648797607235 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.44855413584399 - type: f1 value: 66.56851670913659 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.4149293880296 - type: f1 value: 66.12960877904776 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.916610625420304 - type: f1 value: 54.02534600927991 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.71351714862138 - type: f1 value: 69.70227985126316 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.91257565568257 - type: f1 value: 57.06811572144974 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 75.25218560860793 - type: f1 value: 72.48057563104247 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.35507733691998 - type: f1 value: 73.03024649541128 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.918628110289184 - type: f1 value: 54.75590124456177 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.548755884330866 - type: f1 value: 51.5356975360209 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.44922663080027 - type: f1 value: 44.561114416830975 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.95763281775386 - type: f1 value: 50.68367245122476 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.20645595158035 - type: f1 value: 71.78450093258185 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.226630800269 - type: f1 value: 57.53988988993337 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.44922663080027 - type: f1 value: 48.58809018065056 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.3752521856086 - type: f1 value: 49.91373941436425 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.85205110961668 - type: f1 value: 67.05660019588582 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 49.1492938802959 - type: f1 value: 46.717578025393195 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.93140551445865 - type: f1 value: 67.45406609372205 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.82851378614662 - type: f1 value: 71.15951964393868 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.84868863483524 - type: f1 value: 71.76056802364877 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 75.27236045729657 - type: f1 value: 72.48733090101163 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 69.63012777404168 - type: f1 value: 66.56444015346203 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.62743779421655 - type: f1 value: 73.82720656992142 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 67.15198386012105 - type: f1 value: 64.41418309797744 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.8399462004035 - type: f1 value: 56.050989519693886 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.86684599865501 - type: f1 value: 70.80682480844303 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.36718224613316 - type: f1 value: 54.998746471013774 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.150638870208475 - type: f1 value: 49.79179342620099 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.50638870208473 - type: f1 value: 49.778960742003555 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.906523201076 - type: f1 value: 66.75784022138245 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.73234700739744 - type: f1 value: 65.75016141148413 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.06792199058508 - type: f1 value: 67.90334782594083 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.09145931405515 - type: f1 value: 58.88703095210731 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.17014122394083 - type: f1 value: 68.43676277921544 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.99327505043712 - type: f1 value: 72.26813373392943 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 71.13987895090787 - type: f1 value: 70.29309514467575 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.37256220578345 - type: f1 value: 72.56456170538992 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 47.205783456624076 - type: f1 value: 45.905999859074434 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.8352387357095 - type: f1 value: 69.43553987525273 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.00403496973773 - type: f1 value: 65.97477215779143 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.04976462676531 - type: f1 value: 67.24581993778398 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.882985877605925 - type: f1 value: 59.995293199988794 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.75857431069267 - type: f1 value: 76.52031675299841 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.03496973772697 - type: f1 value: 79.25548063175344 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.96570275722931 - type: f1 value: 72.19110435289122 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 82.38735709482178 - type: f1 value: 82.34495627619785 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.83994620040352 - type: f1 value: 78.91526355393667 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.7350369872226 - type: f1 value: 75.919437344927 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 71.21721587088096 - type: f1 value: 70.82973286243262 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.59784801613988 - type: f1 value: 78.47383161087423 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.64021519838602 - type: f1 value: 68.45118053027653 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.51042367182245 - type: f1 value: 72.90013022879003 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.0551445864156 - type: f1 value: 73.45871761713292 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.54606590450571 - type: f1 value: 57.72711794953869 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.40753194351042 - type: f1 value: 76.8157455506521 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.58372562205783 - type: f1 value: 65.2654868709758 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.39273705447208 - type: f1 value: 78.3592956594837 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.62004034969739 - type: f1 value: 79.78673754501855 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.29051782111634 - type: f1 value: 63.12502587609454 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.51849361129791 - type: f1 value: 56.32320906403241 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.41761936785474 - type: f1 value: 49.113762010098306 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.547410894418284 - type: f1 value: 56.87580674198118 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.89038332212507 - type: f1 value: 79.09210140529848 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.503698722259585 - type: f1 value: 61.45718858568352 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.02824478816408 - type: f1 value: 52.732738981386504 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.23671822461331 - type: f1 value: 52.688080372545286 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.5312710154674 - type: f1 value: 74.59368478550698 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.192333557498316 - type: f1 value: 50.18302290152229 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.6960322797579 - type: f1 value: 75.25331182714856 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.47679892400808 - type: f1 value: 78.24044732352424 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.36718224613315 - type: f1 value: 77.2714452985389 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.96234028244788 - type: f1 value: 78.21282127011372 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.19435104236717 - type: f1 value: 73.1963711292812 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.52118359112306 - type: f1 value: 80.4179964390288 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.65837256220577 - type: f1 value: 73.07156989634905 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.02824478816409 - type: f1 value: 62.972399027713664 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 78.87020847343645 - type: f1 value: 78.224240866849 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.6570275722932 - type: f1 value: 63.274871811412545 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.760591795561524 - type: f1 value: 56.73711528075771 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.26967047747142 - type: f1 value: 55.74735330863165 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 72.46133154001345 - type: f1 value: 71.9644168952811 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.70880968392737 - type: f1 value: 73.61543141070884 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.0437121721587 - type: f1 value: 74.83359868879921 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.05110961667788 - type: f1 value: 66.25869819274315 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.52118359112306 - type: f1 value: 75.92098546052303 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.92938802958977 - type: f1 value: 79.79833572573796 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.86617350369872 - type: f1 value: 77.42645654909516 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 44.6 - type: map_at_10 value: 50.019000000000005 - type: map_at_100 value: 50.611 - type: map_at_1000 value: 50.67 - type: map_at_3 value: 48.699999999999996 - type: map_at_5 value: 49.455 - type: mrr_at_1 value: 44.800000000000004 - type: mrr_at_10 value: 50.119 - type: mrr_at_100 value: 50.711 - type: mrr_at_1000 value: 50.77 - type: mrr_at_3 value: 48.8 - type: mrr_at_5 value: 49.555 - type: ndcg_at_1 value: 44.6 - type: ndcg_at_10 value: 52.754 - type: ndcg_at_100 value: 55.935 - type: ndcg_at_1000 value: 57.607 - type: ndcg_at_3 value: 50.012 - type: ndcg_at_5 value: 51.393 - type: precision_at_1 value: 44.6 - type: precision_at_10 value: 6.140000000000001 - type: precision_at_100 value: 0.77 - type: precision_at_1000 value: 0.09 - type: precision_at_3 value: 17.933 - type: precision_at_5 value: 11.44 - type: recall_at_1 value: 44.6 - type: recall_at_10 value: 61.4 - type: recall_at_100 value: 77.0 - type: recall_at_1000 value: 90.4 - type: recall_at_3 value: 53.800000000000004 - type: recall_at_5 value: 57.199999999999996 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 38.192667527616315 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 37.44738902946689 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.59661273103955 - type: mrr value: 33.82024242497473 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: None metrics: - type: accuracy value: 73.31333333333335 - type: f1 value: 73.0873466527602 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.471 - type: map_at_10 value: 14.142 - type: map_at_100 value: 18.179000000000002 - type: map_at_1000 value: 19.772000000000002 - type: map_at_3 value: 9.716 - type: map_at_5 value: 11.763 - type: mrr_at_1 value: 51.393 - type: mrr_at_10 value: 58.814 - type: mrr_at_100 value: 59.330000000000005 - type: mrr_at_1000 value: 59.35 - type: mrr_at_3 value: 56.398 - type: mrr_at_5 value: 58.038999999999994 - type: ndcg_at_1 value: 49.69 - type: ndcg_at_10 value: 38.615 - type: ndcg_at_100 value: 35.268 - type: ndcg_at_1000 value: 43.745 - type: ndcg_at_3 value: 43.187 - type: ndcg_at_5 value: 41.528999999999996 - type: precision_at_1 value: 51.083999999999996 - type: precision_at_10 value: 29.474 - type: precision_at_100 value: 9.167 - type: precision_at_1000 value: 2.2089999999999996 - type: precision_at_3 value: 40.351 - type: precision_at_5 value: 36.285000000000004 - type: recall_at_1 value: 5.471 - type: recall_at_10 value: 19.242 - type: recall_at_100 value: 37.14 - type: recall_at_1000 value: 68.35900000000001 - type: recall_at_3 value: 10.896 - type: recall_at_5 value: 14.75 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 39.499 - type: map_at_10 value: 55.862 - type: map_at_100 value: 56.667 - type: map_at_1000 value: 56.684999999999995 - type: map_at_3 value: 51.534 - type: map_at_5 value: 54.2 - type: mrr_at_1 value: 44.351 - type: mrr_at_10 value: 58.567 - type: mrr_at_100 value: 59.099000000000004 - type: mrr_at_1000 value: 59.109 - type: mrr_at_3 value: 55.218999999999994 - type: mrr_at_5 value: 57.391999999999996 - type: ndcg_at_1 value: 44.322 - type: ndcg_at_10 value: 63.535 - type: ndcg_at_100 value: 66.654 - type: ndcg_at_1000 value: 66.991 - type: ndcg_at_3 value: 55.701 - type: ndcg_at_5 value: 60.06700000000001 - type: precision_at_1 value: 44.322 - type: precision_at_10 value: 10.026 - type: precision_at_100 value: 1.18 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 24.865000000000002 - type: precision_at_5 value: 17.48 - type: recall_at_1 value: 39.499 - type: recall_at_10 value: 84.053 - type: recall_at_100 value: 97.11 - type: recall_at_1000 value: 99.493 - type: recall_at_3 value: 64.091 - type: recall_at_5 value: 74.063 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: None metrics: - type: cos_sim_accuracy value: 61.18029236599891 - type: cos_sim_ap value: 64.18398769398412 - type: cos_sim_f1 value: 67.96347757046446 - type: cos_sim_precision value: 54.4529262086514 - type: cos_sim_recall value: 90.3907074973601 - type: dot_accuracy value: 61.18029236599891 - type: dot_ap value: 64.18393484706077 - type: dot_f1 value: 67.96347757046446 - type: dot_precision value: 54.4529262086514 - type: dot_recall value: 90.3907074973601 - type: euclidean_accuracy value: 61.18029236599891 - type: euclidean_ap value: 64.18395024821486 - type: euclidean_f1 value: 67.96347757046446 - type: euclidean_precision value: 54.4529262086514 - type: euclidean_recall value: 90.3907074973601 - type: manhattan_accuracy value: 61.451001624255554 - type: manhattan_ap value: 64.38232708763513 - type: manhattan_f1 value: 68.05860805860804 - type: manhattan_precision value: 52.10319685922602 - type: manhattan_recall value: 98.09926082365365 - type: max_accuracy value: 61.451001624255554 - type: max_ap value: 64.38232708763513 - type: max_f1 value: 68.05860805860804 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: None metrics: - type: accuracy value: 92.19000000000001 - type: ap value: 89.73918431886767 - type: f1 value: 92.17175032574507 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: None metrics: - type: cos_sim_pearson value: 15.079320253752224 - type: cos_sim_spearman value: 16.813772504404263 - type: euclidean_pearson value: 19.476541162041762 - type: euclidean_spearman value: 16.813772498098782 - type: manhattan_pearson value: 19.497429832915277 - type: manhattan_spearman value: 16.869600674180607 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: None metrics: - type: cos_sim_pearson value: 30.36139599797913 - type: cos_sim_spearman value: 31.80296402851347 - type: euclidean_pearson value: 30.10387888252793 - type: euclidean_spearman value: 31.80297780103808 - type: manhattan_pearson value: 30.86720382849436 - type: manhattan_spearman value: 32.70491131366606 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.911 - type: map_at_10 value: 86.087 - type: map_at_100 value: 86.701 - type: map_at_1000 value: 86.715 - type: map_at_3 value: 83.231 - type: map_at_5 value: 85.051 - type: mrr_at_1 value: 82.75 - type: mrr_at_10 value: 88.759 - type: mrr_at_100 value: 88.844 - type: mrr_at_1000 value: 88.844 - type: mrr_at_3 value: 87.935 - type: mrr_at_5 value: 88.504 - type: ndcg_at_1 value: 82.75 - type: ndcg_at_10 value: 89.605 - type: ndcg_at_100 value: 90.664 - type: ndcg_at_1000 value: 90.733 - type: ndcg_at_3 value: 87.03 - type: ndcg_at_5 value: 88.473 - type: precision_at_1 value: 82.75 - type: precision_at_10 value: 13.575000000000001 - type: precision_at_100 value: 1.539 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.153 - type: precision_at_5 value: 25.008000000000003 - type: recall_at_1 value: 71.911 - type: recall_at_10 value: 96.261 - type: recall_at_100 value: 99.72800000000001 - type: recall_at_1000 value: 99.993 - type: recall_at_3 value: 88.762 - type: recall_at_5 value: 92.949 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 57.711581165572376 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 66.48938885750297 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 3.7379999999999995 - type: map_at_10 value: 9.261 - type: map_at_100 value: 11.001 - type: map_at_1000 value: 11.262 - type: map_at_3 value: 6.816 - type: map_at_5 value: 8.0 - type: mrr_at_1 value: 18.4 - type: mrr_at_10 value: 28.755999999999997 - type: mrr_at_100 value: 29.892000000000003 - type: mrr_at_1000 value: 29.961 - type: mrr_at_3 value: 25.467000000000002 - type: mrr_at_5 value: 27.332 - type: ndcg_at_1 value: 18.4 - type: ndcg_at_10 value: 16.296 - type: ndcg_at_100 value: 23.52 - type: ndcg_at_1000 value: 28.504 - type: ndcg_at_3 value: 15.485 - type: ndcg_at_5 value: 13.471 - type: precision_at_1 value: 18.4 - type: precision_at_10 value: 8.469999999999999 - type: precision_at_100 value: 1.8950000000000002 - type: precision_at_1000 value: 0.309 - type: precision_at_3 value: 14.6 - type: precision_at_5 value: 11.84 - type: recall_at_1 value: 3.7379999999999995 - type: recall_at_10 value: 17.185 - type: recall_at_100 value: 38.397 - type: recall_at_1000 value: 62.798 - type: recall_at_3 value: 8.896999999999998 - type: recall_at_5 value: 12.021999999999998 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 86.43977757480083 - type: cos_sim_spearman value: 82.64182475199533 - type: euclidean_pearson value: 83.71756009999591 - type: euclidean_spearman value: 82.64182331395057 - type: manhattan_pearson value: 83.8028936913025 - type: manhattan_spearman value: 82.71024597804252 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.85653060698912 - type: cos_sim_spearman value: 79.65598885228324 - type: euclidean_pearson value: 83.1205137628455 - type: euclidean_spearman value: 79.65629387709038 - type: manhattan_pearson value: 83.71108853545837 - type: manhattan_spearman value: 80.25617619716708 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.22921688565664 - type: cos_sim_spearman value: 88.42662103041957 - type: euclidean_pearson value: 87.91679798473325 - type: euclidean_spearman value: 88.42662103041957 - type: manhattan_pearson value: 88.16927537961303 - type: manhattan_spearman value: 88.81581680062541 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 86.77261424554293 - type: cos_sim_spearman value: 84.53930146434155 - type: euclidean_pearson value: 85.67420491389697 - type: euclidean_spearman value: 84.53929771783851 - type: manhattan_pearson value: 85.74306784515618 - type: manhattan_spearman value: 84.7399304675314 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 89.86138395166455 - type: cos_sim_spearman value: 90.42577823022054 - type: euclidean_pearson value: 89.8787763797515 - type: euclidean_spearman value: 90.42577823022054 - type: manhattan_pearson value: 89.9592937492158 - type: manhattan_spearman value: 90.63535505335524 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 86.5176674585941 - type: cos_sim_spearman value: 87.6842917085397 - type: euclidean_pearson value: 86.70213081520711 - type: euclidean_spearman value: 87.6842917085397 - type: manhattan_pearson value: 86.83702628983627 - type: manhattan_spearman value: 87.87791000374443 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 83.86395454805867 - type: cos_sim_spearman value: 83.69454595252267 - type: euclidean_pearson value: 83.04743892608313 - type: euclidean_spearman value: 83.69454026433006 - type: manhattan_pearson value: 83.4032095553322 - type: manhattan_spearman value: 84.11527379013802 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 81.80249894729546 - type: cos_sim_spearman value: 81.87004960533409 - type: euclidean_pearson value: 80.0392760044179 - type: euclidean_spearman value: 81.87004960533409 - type: manhattan_pearson value: 80.38096542355912 - type: manhattan_spearman value: 82.40774679630341 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 77.6158201787172 - type: cos_sim_spearman value: 77.934651044009 - type: euclidean_pearson value: 77.7874683895269 - type: euclidean_spearman value: 77.934651044009 - type: manhattan_pearson value: 78.36151849193052 - type: manhattan_spearman value: 78.52439586349938 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.04363311392207 - type: cos_sim_spearman value: 87.30483659369973 - type: euclidean_pearson value: 87.62634489502616 - type: euclidean_spearman value: 87.30483659369973 - type: manhattan_pearson value: 88.02340837141445 - type: manhattan_spearman value: 87.55012003294 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 91.69172851958248 - type: cos_sim_spearman value: 91.7546879482416 - type: euclidean_pearson value: 91.84843039183963 - type: euclidean_spearman value: 91.7546879482416 - type: manhattan_pearson value: 91.72325753804357 - type: manhattan_spearman value: 91.55330259513397 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 73.95572901084864 - type: cos_sim_spearman value: 72.56217821552626 - type: euclidean_pearson value: 74.24242980323574 - type: euclidean_spearman value: 72.56217821552626 - type: manhattan_pearson value: 74.57473362519922 - type: manhattan_spearman value: 72.76048826648497 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 86.93329396008296 - type: cos_sim_spearman value: 88.2406635486219 - type: euclidean_pearson value: 87.49687343908533 - type: euclidean_spearman value: 88.2406635486219 - type: manhattan_pearson value: 88.14088309231084 - type: manhattan_spearman value: 88.93314020908534 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.70124451546057 - type: cos_sim_spearman value: 87.45988160052252 - type: euclidean_pearson value: 88.44395505247728 - type: euclidean_spearman value: 87.45988160052252 - type: manhattan_pearson value: 88.69269783495425 - type: manhattan_spearman value: 87.65383425621 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.64109149761346 - type: cos_sim_spearman value: 88.06459637689733 - type: euclidean_pearson value: 88.02313315797703 - type: euclidean_spearman value: 88.06459637689733 - type: manhattan_pearson value: 88.28328539133253 - type: manhattan_spearman value: 88.06605708379142 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.9040028177525 - type: cos_sim_spearman value: 89.68152202933464 - type: euclidean_pearson value: 89.23684469601253 - type: euclidean_spearman value: 89.68152202933464 - type: manhattan_pearson value: 89.59504307277454 - type: manhattan_spearman value: 89.88060100313582 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.69891585325125 - type: cos_sim_spearman value: 88.25252785071736 - type: euclidean_pearson value: 87.99932873748662 - type: euclidean_spearman value: 88.25252785071736 - type: manhattan_pearson value: 88.26959683009446 - type: manhattan_spearman value: 88.32583227300715 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 67.53235909794135 - type: cos_sim_spearman value: 66.97521740529574 - type: euclidean_pearson value: 68.19502223613912 - type: euclidean_spearman value: 66.97521740529574 - type: manhattan_pearson value: 68.39070714774539 - type: manhattan_spearman value: 67.1072812364868 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 43.715742021204775 - type: cos_sim_spearman value: 49.12255971271453 - type: euclidean_pearson value: 40.76848562610837 - type: euclidean_spearman value: 49.12255971271453 - type: manhattan_pearson value: 40.92204625614112 - type: manhattan_spearman value: 49.23333793661129 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.35268345563588 - type: cos_sim_spearman value: 66.99661626042061 - type: euclidean_pearson value: 65.85589122857066 - type: euclidean_spearman value: 66.99661626042061 - type: manhattan_pearson value: 66.78454301512294 - type: manhattan_spearman value: 67.17570330149233 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 33.36599908204445 - type: cos_sim_spearman value: 39.20768331939503 - type: euclidean_pearson value: 22.16066769530468 - type: euclidean_spearman value: 39.20768331939503 - type: manhattan_pearson value: 22.386053195546022 - type: manhattan_spearman value: 39.70172817465986 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.06813956986753 - type: cos_sim_spearman value: 68.72065117995668 - type: euclidean_pearson value: 66.97373456344194 - type: euclidean_spearman value: 68.72065117995668 - type: manhattan_pearson value: 67.34907265771595 - type: manhattan_spearman value: 68.73705769957843 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 47.17664865207108 - type: cos_sim_spearman value: 54.115568323148864 - type: euclidean_pearson value: 48.56418162879182 - type: euclidean_spearman value: 54.115568323148864 - type: manhattan_pearson value: 48.85951643453165 - type: manhattan_spearman value: 54.13599784169052 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 55.87514136275987 - type: cos_sim_spearman value: 60.82923573674973 - type: euclidean_pearson value: 53.724183308215615 - type: euclidean_spearman value: 60.82923573674973 - type: manhattan_pearson value: 53.954305573102445 - type: manhattan_spearman value: 60.957483900644526 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.55001413648593 - type: cos_sim_spearman value: 63.395777040381276 - type: euclidean_pearson value: 59.869972550293305 - type: euclidean_spearman value: 63.395777040381276 - type: manhattan_pearson value: 61.16195496847885 - type: manhattan_spearman value: 63.41968682525581 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 79.13334972675852 - type: cos_sim_spearman value: 79.86263136371802 - type: euclidean_pearson value: 78.2433603592541 - type: euclidean_spearman value: 79.86263136371802 - type: manhattan_pearson value: 78.87337106318412 - type: manhattan_spearman value: 80.31230584758441 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 63.559700748242356 - type: cos_sim_spearman value: 60.92342109509558 - type: euclidean_pearson value: 66.07256437521119 - type: euclidean_spearman value: 60.92342109509558 - type: manhattan_pearson value: 67.72769744612663 - type: manhattan_spearman value: 59.64714507774168 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 73.93491616145891 - type: cos_sim_spearman value: 75.84242594400156 - type: euclidean_pearson value: 74.87279745626121 - type: euclidean_spearman value: 75.84242594400156 - type: manhattan_pearson value: 76.47764144677505 - type: manhattan_spearman value: 77.08411157845183 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 72.75624124540954 - type: cos_sim_spearman value: 75.8667941654703 - type: euclidean_pearson value: 73.74314588451925 - type: euclidean_spearman value: 75.8667941654703 - type: manhattan_pearson value: 73.99641425871518 - type: manhattan_spearman value: 76.1982840205817 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 75.20898141298767 - type: cos_sim_spearman value: 73.18060375331436 - type: euclidean_pearson value: 75.44489280944619 - type: euclidean_spearman value: 73.18060375331436 - type: manhattan_pearson value: 75.65451039552286 - type: manhattan_spearman value: 72.97744006123156 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 72.04278252247816 - type: cos_sim_spearman value: 71.8846446821539 - type: euclidean_pearson value: 73.16043307050612 - type: euclidean_spearman value: 71.8846446821539 - type: manhattan_pearson value: 74.76905116839777 - type: manhattan_spearman value: 72.66237093518471 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 71.71033173838558 - type: cos_sim_spearman value: 75.043122881885 - type: euclidean_pearson value: 72.77579680345087 - type: euclidean_spearman value: 75.043122881885 - type: manhattan_pearson value: 72.99901534854922 - type: manhattan_spearman value: 75.15418335015957 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 55.75733447190482 - type: cos_sim_spearman value: 61.38968334176681 - type: euclidean_pearson value: 55.479231520643744 - type: euclidean_spearman value: 61.38968334176681 - type: manhattan_pearson value: 56.05230571465244 - type: manhattan_spearman value: 62.69383054007398 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 41.72244325050302 - type: cos_sim_spearman value: 54.47476909084119 - type: euclidean_pearson value: 43.94629756436873 - type: euclidean_spearman value: 54.47476909084119 - type: manhattan_pearson value: 46.36533046394657 - type: manhattan_spearman value: 54.87509243633636 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 70.75183711835146 - type: cos_sim_spearman value: 84.51542547285167 - type: euclidean_pearson value: 71.84188960126669 - type: euclidean_spearman value: 84.51542547285167 - type: manhattan_pearson value: 73.94847166379994 - type: manhattan_spearman value: 84.51542547285167 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: None metrics: - type: cos_sim_pearson value: 81.78690149086131 - type: cos_sim_spearman value: 81.81202616916873 - type: euclidean_pearson value: 80.98792254251062 - type: euclidean_spearman value: 81.81202616916873 - type: manhattan_pearson value: 81.46953021346732 - type: manhattan_spearman value: 82.34259562492315 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.68273341294419 - type: cos_sim_spearman value: 88.59927164210958 - type: euclidean_pearson value: 88.10745681818025 - type: euclidean_spearman value: 88.59927164210958 - type: manhattan_pearson value: 88.25166703784649 - type: manhattan_spearman value: 88.85343247873482 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 86.3340463345719 - type: mrr value: 96.5182611506141 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 60.967000000000006 - type: map_at_10 value: 71.873 - type: map_at_100 value: 72.271 - type: map_at_1000 value: 72.292 - type: map_at_3 value: 69.006 - type: map_at_5 value: 70.856 - type: mrr_at_1 value: 63.666999999999994 - type: mrr_at_10 value: 72.929 - type: mrr_at_100 value: 73.26 - type: mrr_at_1000 value: 73.282 - type: mrr_at_3 value: 71.111 - type: mrr_at_5 value: 72.328 - type: ndcg_at_1 value: 63.666999999999994 - type: ndcg_at_10 value: 76.414 - type: ndcg_at_100 value: 78.152 - type: ndcg_at_1000 value: 78.604 - type: ndcg_at_3 value: 71.841 - type: ndcg_at_5 value: 74.435 - type: precision_at_1 value: 63.666999999999994 - type: precision_at_10 value: 10.067 - type: precision_at_100 value: 1.097 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27.667 - type: precision_at_5 value: 18.467 - type: recall_at_1 value: 60.967000000000006 - type: recall_at_10 value: 88.922 - type: recall_at_100 value: 96.667 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 77.228 - type: recall_at_5 value: 83.428 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.82277227722773 - type: cos_sim_ap value: 95.66279851444406 - type: cos_sim_f1 value: 90.9367088607595 - type: cos_sim_precision value: 92.1025641025641 - type: cos_sim_recall value: 89.8 - type: dot_accuracy value: 99.82277227722773 - type: dot_ap value: 95.66279851444406 - type: dot_f1 value: 90.9367088607595 - type: dot_precision value: 92.1025641025641 - type: dot_recall value: 89.8 - type: euclidean_accuracy value: 99.82277227722773 - type: euclidean_ap value: 95.66279851444406 - type: euclidean_f1 value: 90.9367088607595 - type: euclidean_precision value: 92.1025641025641 - type: euclidean_recall value: 89.8 - type: manhattan_accuracy value: 99.82673267326733 - type: manhattan_ap value: 95.86094873177069 - type: manhattan_f1 value: 91.26788357178096 - type: manhattan_precision value: 90.06815968841285 - type: manhattan_recall value: 92.5 - type: max_accuracy value: 99.82673267326733 - type: max_ap value: 95.86094873177069 - type: max_f1 value: 91.26788357178096 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 73.09533925852372 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 45.90745648090035 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.91147686504404 - type: mrr value: 56.03900082760377 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.46908662038217 - type: cos_sim_spearman value: 31.40325730367437 - type: dot_pearson value: 31.469083969291894 - type: dot_spearman value: 31.40325730367437 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: None metrics: - type: map value: 66.90300783402137 - type: mrr value: 77.06451972574179 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 25.82 - type: map_at_10 value: 72.32300000000001 - type: map_at_100 value: 76.198 - type: map_at_1000 value: 76.281 - type: map_at_3 value: 50.719 - type: map_at_5 value: 62.326 - type: mrr_at_1 value: 86.599 - type: mrr_at_10 value: 89.751 - type: mrr_at_100 value: 89.876 - type: mrr_at_1000 value: 89.88000000000001 - type: mrr_at_3 value: 89.151 - type: mrr_at_5 value: 89.519 - type: ndcg_at_1 value: 86.599 - type: ndcg_at_10 value: 80.676 - type: ndcg_at_100 value: 85.03 - type: ndcg_at_1000 value: 85.854 - type: ndcg_at_3 value: 82.057 - type: ndcg_at_5 value: 80.537 - type: precision_at_1 value: 86.599 - type: precision_at_10 value: 40.373 - type: precision_at_100 value: 4.95 - type: precision_at_1000 value: 0.514 - type: precision_at_3 value: 71.918 - type: precision_at_5 value: 60.246 - type: recall_at_1 value: 25.82 - type: recall_at_10 value: 79.905 - type: recall_at_100 value: 93.88499999999999 - type: recall_at_1000 value: 98.073 - type: recall_at_3 value: 52.623 - type: recall_at_5 value: 66.233 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: None metrics: - type: accuracy value: 47.050000000000004 - type: f1 value: 45.704071498353294 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.243 - type: map_at_10 value: 2.278 - type: map_at_100 value: 14.221 - type: map_at_1000 value: 33.474 - type: map_at_3 value: 0.7270000000000001 - type: map_at_5 value: 1.183 - type: mrr_at_1 value: 94.0 - type: mrr_at_10 value: 97.0 - type: mrr_at_100 value: 97.0 - type: mrr_at_1000 value: 97.0 - type: mrr_at_3 value: 97.0 - type: mrr_at_5 value: 97.0 - type: ndcg_at_1 value: 90.0 - type: ndcg_at_10 value: 87.249 - type: ndcg_at_100 value: 67.876 - type: ndcg_at_1000 value: 59.205 - type: ndcg_at_3 value: 90.12299999999999 - type: ndcg_at_5 value: 89.126 - type: precision_at_1 value: 94.0 - type: precision_at_10 value: 90.8 - type: precision_at_100 value: 69.28 - type: precision_at_1000 value: 25.85 - type: precision_at_3 value: 94.667 - type: precision_at_5 value: 92.80000000000001 - type: recall_at_1 value: 0.243 - type: recall_at_10 value: 2.392 - type: recall_at_100 value: 16.982 - type: recall_at_1000 value: 55.214 - type: recall_at_3 value: 0.745 - type: recall_at_5 value: 1.2229999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.5 - type: f1 value: 67.05501804646966 - type: precision value: 65.73261904761904 - type: recall value: 70.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.14450867052022 - type: f1 value: 70.98265895953759 - type: precision value: 69.26782273603082 - type: recall value: 75.14450867052022 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 33.170731707317074 - type: f1 value: 29.92876500193573 - type: precision value: 28.669145894755648 - type: recall value: 33.170731707317074 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.5 - type: f1 value: 94.13333333333333 - type: precision value: 93.46666666666667 - type: recall value: 95.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.6 - type: f1 value: 99.46666666666665 - type: precision value: 99.4 - type: recall value: 99.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.2 - type: f1 value: 96.39999999999999 - type: precision value: 96.0 - type: recall value: 97.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.5 - type: f1 value: 92.99666666666667 - type: precision value: 92.31666666666666 - type: recall value: 94.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.82089552238806 - type: f1 value: 81.59203980099502 - type: precision value: 79.60199004975124 - type: recall value: 85.82089552238806 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.5 - type: f1 value: 75.11246031746032 - type: precision value: 73.38734126984127 - type: recall value: 79.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 44.390243902439025 - type: f1 value: 38.48896631823461 - type: precision value: 36.57220286488579 - type: recall value: 44.390243902439025 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.2 - type: f1 value: 87.57333333333334 - type: precision value: 86.34166666666665 - type: recall value: 90.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.82138517618469 - type: f1 value: 85.98651854423423 - type: precision value: 84.79257073424753 - type: recall value: 88.82138517618469 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.04347826086956 - type: f1 value: 72.32108147606868 - type: precision value: 70.37207357859532 - type: recall value: 77.04347826086956 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 53.04347826086957 - type: f1 value: 46.88868184955141 - type: precision value: 44.71730105643149 - type: recall value: 53.04347826086957 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.0 - type: f1 value: 62.891813186813195 - type: precision value: 61.037906162464985 - type: recall value: 68.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.3 - type: f1 value: 82.82000000000001 - type: precision value: 81.25690476190475 - type: recall value: 86.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.87816646562122 - type: f1 value: 63.53054933272062 - type: precision value: 61.47807816331196 - type: recall value: 68.87816646562122 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.4 - type: f1 value: 68.99388888888889 - type: precision value: 66.81035714285713 - type: recall value: 74.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.5 - type: f1 value: 87.93666666666667 - type: precision value: 86.825 - type: recall value: 90.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.7 - type: f1 value: 88.09 - type: precision value: 86.85833333333333 - type: recall value: 90.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.61904761904762 - type: f1 value: 62.30239247214037 - type: precision value: 60.340702947845806 - type: recall value: 67.61904761904762 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.9 - type: f1 value: 73.81285714285714 - type: precision value: 72.21570818070818 - type: recall value: 77.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.8 - type: f1 value: 89.66666666666667 - type: precision value: 88.66666666666666 - type: recall value: 91.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.6 - type: f1 value: 96.85666666666665 - type: precision value: 96.50833333333333 - type: recall value: 97.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.39999999999999 - type: f1 value: 93.98333333333333 - type: precision value: 93.30000000000001 - type: recall value: 95.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.0 - type: f1 value: 81.31538461538462 - type: precision value: 79.70666666666666 - type: recall value: 85.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.60000000000001 - type: f1 value: 89.81888888888888 - type: precision value: 89.08583333333333 - type: recall value: 91.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 44.3 - type: f1 value: 38.8623088023088 - type: precision value: 37.03755623461505 - type: recall value: 44.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.19999999999999 - type: f1 value: 93.75 - type: precision value: 93.05 - type: recall value: 95.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 99.1 - type: f1 value: 98.8 - type: precision value: 98.65 - type: recall value: 99.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.6765498652291 - type: f1 value: 63.991785393402644 - type: precision value: 61.7343729944808 - type: recall value: 69.6765498652291 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 50.0 - type: f1 value: 42.79341029341029 - type: precision value: 40.25098358431692 - type: recall value: 50.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.7 - type: f1 value: 87.19023809523809 - type: precision value: 86.12595238095237 - type: recall value: 89.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 42.72727272727273 - type: f1 value: 37.78789518562245 - type: precision value: 36.24208471267295 - type: recall value: 42.72727272727273 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.26205450733752 - type: f1 value: 70.72842833849123 - type: precision value: 68.93256464011182 - type: recall value: 75.26205450733752 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.19999999999999 - type: f1 value: 93.96666666666668 - type: precision value: 93.42 - type: recall value: 95.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.26459143968872 - type: f1 value: 72.40190419178747 - type: precision value: 70.84954604409856 - type: recall value: 76.26459143968872 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 59.82905982905983 - type: f1 value: 52.2100122100122 - type: precision value: 49.52516619183286 - type: recall value: 59.82905982905983 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 81.69999999999999 - type: f1 value: 77.41714285714286 - type: precision value: 75.64833333333334 - type: recall value: 81.69999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.5 - type: f1 value: 94.45 - type: precision value: 93.93333333333334 - type: recall value: 95.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 58.41121495327103 - type: f1 value: 52.73495974430554 - type: precision value: 50.717067200712066 - type: recall value: 58.41121495327103 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.3 - type: f1 value: 69.20371794871795 - type: precision value: 67.6597557997558 - type: recall value: 73.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.5 - type: f1 value: 95.51666666666667 - type: precision value: 95.05 - type: recall value: 96.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.4 - type: f1 value: 73.88856643356644 - type: precision value: 72.01373015873016 - type: recall value: 78.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.3 - type: f1 value: 94.09666666666668 - type: precision value: 93.53333333333332 - type: recall value: 95.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.7 - type: f1 value: 91.94 - type: precision value: 91.10833333333333 - type: recall value: 93.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.8 - type: f1 value: 95.89999999999999 - type: precision value: 95.46666666666668 - type: recall value: 96.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.5 - type: f1 value: 66.00635642135641 - type: precision value: 64.36345238095238 - type: recall value: 70.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.4 - type: f1 value: 90.44388888888889 - type: precision value: 89.5767857142857 - type: recall value: 92.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.0 - type: f1 value: 43.15372775372776 - type: precision value: 41.53152510162313 - type: recall value: 48.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 16.7 - type: f1 value: 14.198431372549017 - type: precision value: 13.411765873015872 - type: recall value: 16.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.7 - type: f1 value: 81.81666666666666 - type: precision value: 80.10833333333332 - type: recall value: 85.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.64285714285714 - type: f1 value: 64.745670995671 - type: precision value: 62.916666666666664 - type: recall value: 69.64285714285714 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 54.665203073545555 - type: f1 value: 48.55366630916923 - type: precision value: 46.35683318998357 - type: recall value: 54.665203073545555 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 4.8 - type: f1 value: 3.808587223587223 - type: precision value: 3.5653174603174604 - type: recall value: 4.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.6 - type: f1 value: 95.77333333333333 - type: precision value: 95.39166666666667 - type: recall value: 96.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.39999999999999 - type: f1 value: 94.44 - type: precision value: 93.975 - type: recall value: 95.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 42.0 - type: f1 value: 37.024908424908425 - type: precision value: 35.365992063492065 - type: recall value: 42.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.7 - type: f1 value: 62.20460835058661 - type: precision value: 60.590134587634594 - type: recall value: 66.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.3 - type: f1 value: 96.46666666666667 - type: precision value: 96.06666666666668 - type: recall value: 97.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.3 - type: f1 value: 41.96905408317173 - type: precision value: 40.18741402116402 - type: recall value: 47.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.2 - type: f1 value: 76.22690476190476 - type: precision value: 74.63539682539682 - type: recall value: 80.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.0 - type: f1 value: 94.83333333333333 - type: precision value: 94.26666666666668 - type: recall value: 96.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.7 - type: f1 value: 87.24333333333334 - type: precision value: 86.17 - type: recall value: 89.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 50.36496350364964 - type: f1 value: 44.795520780922246 - type: precision value: 43.09002433090024 - type: recall value: 50.36496350364964 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 18.8 - type: f1 value: 16.242864357864356 - type: precision value: 15.466596638655464 - type: recall value: 18.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.19999999999999 - type: f1 value: 93.92333333333333 - type: precision value: 93.30833333333332 - type: recall value: 95.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.4 - type: f1 value: 91.42333333333333 - type: precision value: 90.50833333333334 - type: recall value: 93.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 26.190476190476193 - type: f1 value: 22.05208151636723 - type: precision value: 21.09292328042328 - type: recall value: 26.190476190476193 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 17.2 - type: f1 value: 14.021009731460952 - type: precision value: 13.1389886698243 - type: recall value: 17.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.67494824016563 - type: f1 value: 74.24430641821947 - type: precision value: 72.50747642051991 - type: recall value: 78.67494824016563 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.19999999999999 - type: f1 value: 92.54 - type: precision value: 91.75833333333334 - type: recall value: 94.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.2 - type: f1 value: 87.78666666666666 - type: precision value: 86.69833333333334 - type: recall value: 90.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.7 - type: f1 value: 12.19206214842218 - type: precision value: 11.526261904761904 - type: recall value: 14.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.16017316017316 - type: f1 value: 67.44858316286889 - type: precision value: 65.23809523809523 - type: recall value: 73.16017316017316 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.19083969465649 - type: f1 value: 70.33078880407125 - type: precision value: 68.3969465648855 - type: recall value: 75.19083969465649 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 62.154294032023294 - type: f1 value: 55.86030821838681 - type: precision value: 53.53509623160277 - type: recall value: 62.154294032023294 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.8 - type: f1 value: 83.9652380952381 - type: precision value: 82.84242424242424 - type: recall value: 86.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.50282485875707 - type: f1 value: 91.54425612052731 - type: precision value: 90.65442561205272 - type: recall value: 93.50282485875707 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 11.4 - type: f1 value: 9.189775870222714 - type: precision value: 8.66189886502811 - type: recall value: 11.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.4 - type: f1 value: 91.88666666666666 - type: precision value: 91.21444444444444 - type: recall value: 93.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 46.0 - type: f1 value: 40.51069226095542 - type: precision value: 38.57804926010808 - type: recall value: 46.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.0 - type: f1 value: 89.11333333333333 - type: precision value: 88.27000000000001 - type: recall value: 91.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.39999999999999 - type: f1 value: 92.95 - type: precision value: 92.27000000000001 - type: recall value: 94.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 14.2 - type: f1 value: 11.73701698770113 - type: precision value: 11.079207014736676 - type: recall value: 14.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.14745308310992 - type: f1 value: 59.665707393589415 - type: precision value: 57.560853653346946 - type: recall value: 65.14745308310992 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.39999999999999 - type: f1 value: 94.0 - type: precision value: 93.33333333333333 - type: recall value: 95.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.56521739130434 - type: f1 value: 62.92490118577074 - type: precision value: 60.27009222661397 - type: recall value: 69.56521739130434 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 40.140845070422536 - type: f1 value: 35.96411804158283 - type: precision value: 34.89075869357559 - type: recall value: 40.140845070422536 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.86826347305389 - type: f1 value: 59.646248628284546 - type: precision value: 57.22982606216139 - type: recall value: 65.86826347305389 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.48333333333333 - type: precision value: 92.83666666666667 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.783251231527096 - type: f1 value: 42.006447302013804 - type: precision value: 40.12747105111637 - type: recall value: 47.783251231527096 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.71830985915493 - type: f1 value: 64.80266212660578 - type: precision value: 63.08098591549296 - type: recall value: 69.71830985915493 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 67.94871794871796 - type: f1 value: 61.59912309912309 - type: precision value: 59.17338217338218 - type: recall value: 67.94871794871796 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.39999999999999 - type: f1 value: 95.28333333333335 - type: precision value: 94.75 - type: recall value: 96.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.14613778705638 - type: f1 value: 65.4349338900487 - type: precision value: 63.57599255302805 - type: recall value: 70.14613778705638 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.2 - type: f1 value: 7.622184434339607 - type: precision value: 7.287048159682417 - type: recall value: 9.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.85016286644951 - type: f1 value: 72.83387622149837 - type: precision value: 70.58450959102424 - type: recall value: 77.85016286644951 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.8 - type: f1 value: 88.84333333333333 - type: precision value: 87.96666666666665 - type: recall value: 90.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.6 - type: f1 value: 93.14 - type: precision value: 92.49833333333333 - type: recall value: 94.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.25196850393701 - type: f1 value: 80.94488188976378 - type: precision value: 79.65879265091863 - type: recall value: 84.25196850393701 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.5 - type: f1 value: 86.89666666666666 - type: precision value: 85.7 - type: recall value: 89.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 42.797783933518005 - type: f1 value: 37.30617360155193 - type: precision value: 35.34933825792552 - type: recall value: 42.797783933518005 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.1 - type: f1 value: 94.93333333333332 - type: precision value: 94.38333333333333 - type: recall value: 96.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 54.807692307692314 - type: f1 value: 49.506903353057204 - type: precision value: 47.54807692307693 - type: recall value: 54.807692307692314 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.1 - type: f1 value: 83.61857142857143 - type: precision value: 81.975 - type: recall value: 87.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.10000000000001 - type: f1 value: 88.76333333333332 - type: precision value: 87.67 - type: recall value: 91.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.10000000000001 - type: f1 value: 91.28999999999999 - type: precision value: 90.44500000000001 - type: recall value: 93.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 39.97641509433962 - type: f1 value: 33.12271889998028 - type: precision value: 30.95185381542554 - type: recall value: 39.97641509433962 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.60000000000001 - type: f1 value: 90.69 - type: precision value: 89.84500000000001 - type: recall value: 92.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 95.07299270072993 - type: f1 value: 93.64355231143554 - type: precision value: 92.94403892944038 - type: recall value: 95.07299270072993 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.9 - type: f1 value: 89.61333333333333 - type: precision value: 88.53333333333333 - type: recall value: 91.9 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: None metrics: - type: v_measure value: 64.68478289806511 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: None metrics: - type: v_measure value: 57.53010296184097 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.519 - type: map_at_10 value: 10.31 - type: map_at_100 value: 16.027 - type: map_at_1000 value: 17.827 - type: map_at_3 value: 5.721 - type: map_at_5 value: 7.7829999999999995 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 52.642999999999994 - type: mrr_at_100 value: 53.366 - type: mrr_at_1000 value: 53.366 - type: mrr_at_3 value: 48.638999999999996 - type: mrr_at_5 value: 50.578 - type: ndcg_at_1 value: 31.633 - type: ndcg_at_10 value: 26.394000000000002 - type: ndcg_at_100 value: 36.41 - type: ndcg_at_1000 value: 49.206 - type: ndcg_at_3 value: 31.694 - type: ndcg_at_5 value: 29.529 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 23.469 - type: precision_at_100 value: 7.286 - type: precision_at_1000 value: 1.5610000000000002 - type: precision_at_3 value: 34.014 - type: precision_at_5 value: 29.796 - type: recall_at_1 value: 2.519 - type: recall_at_10 value: 17.091 - type: recall_at_100 value: 45.429 - type: recall_at_1000 value: 84.621 - type: recall_at_3 value: 7.208 - type: recall_at_5 value: 10.523 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 69.58659999999999 - type: ap value: 14.735696532619 - type: f1 value: 54.23517220069903 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 63.723825693265425 - type: f1 value: 64.02405729449103 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 54.310161547491006 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 88.77630088812064 - type: cos_sim_ap value: 81.61725457333809 - type: cos_sim_f1 value: 74.91373801916932 - type: cos_sim_precision value: 72.63940520446097 - type: cos_sim_recall value: 77.33509234828496 - type: dot_accuracy value: 88.77630088812064 - type: dot_ap value: 81.61725317476251 - type: dot_f1 value: 74.91373801916932 - type: dot_precision value: 72.63940520446097 - type: dot_recall value: 77.33509234828496 - type: euclidean_accuracy value: 88.77630088812064 - type: euclidean_ap value: 81.61724596869566 - type: euclidean_f1 value: 74.91373801916932 - type: euclidean_precision value: 72.63940520446097 - type: euclidean_recall value: 77.33509234828496 - type: manhattan_accuracy value: 88.67497168742922 - type: manhattan_ap value: 81.430251048948 - type: manhattan_f1 value: 74.79593118171543 - type: manhattan_precision value: 71.3635274382938 - type: manhattan_recall value: 78.57519788918206 - type: max_accuracy value: 88.77630088812064 - type: max_ap value: 81.61725457333809 - type: max_f1 value: 74.91373801916932 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.85136026700819 - type: cos_sim_ap value: 87.74656687446567 - type: cos_sim_f1 value: 80.3221673073403 - type: cos_sim_precision value: 76.56871640957633 - type: cos_sim_recall value: 84.46258084385587 - type: dot_accuracy value: 89.85136026700819 - type: dot_ap value: 87.74656471395072 - type: dot_f1 value: 80.3221673073403 - type: dot_precision value: 76.56871640957633 - type: dot_recall value: 84.46258084385587 - type: euclidean_accuracy value: 89.85136026700819 - type: euclidean_ap value: 87.74656885754466 - type: euclidean_f1 value: 80.3221673073403 - type: euclidean_precision value: 76.56871640957633 - type: euclidean_recall value: 84.46258084385587 - type: manhattan_accuracy value: 89.86300306593705 - type: manhattan_ap value: 87.78807479093082 - type: manhattan_f1 value: 80.31663429471911 - type: manhattan_precision value: 76.63472970137772 - type: manhattan_recall value: 84.3701878657222 - type: max_accuracy value: 89.86300306593705 - type: max_ap value: 87.78807479093082 - type: max_f1 value: 80.3221673073403 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: None metrics: - type: map_at_1 value: 32.4 - type: map_at_10 value: 40.961999999999996 - type: map_at_100 value: 41.660000000000004 - type: map_at_1000 value: 41.721000000000004 - type: map_at_3 value: 38.550000000000004 - type: map_at_5 value: 40.06 - type: mrr_at_1 value: 32.4 - type: mrr_at_10 value: 40.961999999999996 - type: mrr_at_100 value: 41.660000000000004 - type: mrr_at_1000 value: 41.721000000000004 - type: mrr_at_3 value: 38.550000000000004 - type: mrr_at_5 value: 40.06 - type: ndcg_at_1 value: 32.4 - type: ndcg_at_10 value: 45.388 - type: ndcg_at_100 value: 49.012 - type: ndcg_at_1000 value: 50.659 - type: ndcg_at_3 value: 40.47 - type: ndcg_at_5 value: 43.232 - type: precision_at_1 value: 32.4 - type: precision_at_10 value: 5.94 - type: precision_at_100 value: 0.769 - type: precision_at_1000 value: 0.09 - type: precision_at_3 value: 15.333 - type: precision_at_5 value: 10.56 - type: recall_at_1 value: 32.4 - type: recall_at_10 value: 59.4 - type: recall_at_100 value: 76.9 - type: recall_at_1000 value: 90.0 - type: recall_at_3 value: 46.0 - type: recall_at_5 value: 52.800000000000004 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: None metrics: - type: accuracy value: 86.94000000000001 - type: ap value: 70.57373468481975 - type: f1 value: 85.26264784928323 --- ## E5-mistral-7b-instruct [Improving Text Embeddings with Large Language Models](https://arxiv.org/pdf/2401.00368.pdf). Liang Wang, Nan Yang, Xiaolong Huang, Linjun Yang, Rangan Majumder, Furu Wei, arXiv 2024 This model has 32 layers and the embedding size is 4096. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ### Sentence Transformers ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("intfloat/e5-mistral-7b-instruct") # In case you want to reduce the maximum sequence length: model.max_seq_length = 4096 queries = [ "how much protein should a female eat", "summit define", ] documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] query_embeddings = model.encode(queries, prompt_name="web_search_query") document_embeddings = model.encode(documents) scores = (query_embeddings @ document_embeddings.T) * 100 print(scores.tolist()) ``` Have a look at [config_sentence_transformers.json](config_sentence_transformers.json) for the prompts that are pre-configured, such as `web_search_query`, `sts_query`, and `summarization_query`. Additionally, check out [unilm/e5/utils.py](https://github.com/microsoft/unilm/blob/9c0f1ff7ca53431fe47d2637dfe253643d94185b/e5/utils.py#L106) for prompts we used for evaluation. You can use these via e.g. `model.encode(queries, prompt="Instruct: Given a claim, find documents that refute the claim\nQuery: ")`. ### Transformers ```python import torch import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: left_padding = (attention_mask[:, -1].sum() == attention_mask.shape[0]) if left_padding: return last_hidden_states[:, -1] else: sequence_lengths = attention_mask.sum(dim=1) - 1 batch_size = last_hidden_states.shape[0] return last_hidden_states[torch.arange(batch_size, device=last_hidden_states.device), sequence_lengths] def get_detailed_instruct(task_description: str, query: str) -> str: return f'Instruct: {task_description}\nQuery: {query}' # Each query must come with a one-sentence instruction that describes the task task = 'Given a web search query, retrieve relevant passages that answer the query' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, 'summit define') ] # No need to add instruction for retrieval documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] input_texts = queries + documents tokenizer = AutoTokenizer.from_pretrained('intfloat/e5-mistral-7b-instruct') model = AutoModel.from_pretrained('intfloat/e5-mistral-7b-instruct') max_length = 4096 # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Supported Languages This model is initialized from [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) and fine-tuned on a mixture of multilingual datasets. As a result, it has some multilingual capability. However, since Mistral-7B-v0.1 is mainly trained on English data, we recommend using this model for English only. For multilingual use cases, please refer to [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large). ## MTEB Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## FAQ **1. Do I need to add instructions to the query?** Yes, this is how the model is trained, otherwise you will see a performance degradation. The task definition should be a one-sentence instruction that describes the task. This is a way to customize text embeddings for different scenarios through natural language instructions. Please check out [unilm/e5/utils.py](https://github.com/microsoft/unilm/blob/9c0f1ff7ca53431fe47d2637dfe253643d94185b/e5/utils.py#L106) for instructions we used for evaluation. On the other hand, there is no need to add instructions to the document side. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Where are the LoRA-only weights?** You can find the LoRA-only weights at [https://huggingface.co/intfloat/e5-mistral-7b-instruct/tree/main/lora](https://huggingface.co/intfloat/e5-mistral-7b-instruct/tree/main/lora). ## Citation If you find our paper or models helpful, please consider cite as follows: ```bibtex @article{wang2023improving, title={Improving Text Embeddings with Large Language Models}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Yang, Linjun and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2401.00368}, year={2023} } @article{wang2022text, title={Text Embeddings by Weakly-Supervised Contrastive Pre-training}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Jiao, Binxing and Yang, Linjun and Jiang, Daxin and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2212.03533}, year={2022} } ``` ## Limitations Using this model for inputs longer than 4096 tokens is not recommended. This model's multilingual capability is still inferior to [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) for some cases.
[ "BIOSSES", "SCIFACT" ]
NovaSearch/stella_en_1.5B_v5
NovaSearch
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "qwen2", "text-generation", "mteb", "transformers", "sentence-similarity", "custom_code", "arxiv:2412.19048", "arxiv:2205.13147", "license:mit", "model-index", "autotrain_compatible", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-07-12T15:52:09Z"
2025-03-05T13:58:41+00:00
179,630
234
--- license: mit tags: - mteb - sentence-transformers - transformers - sentence-similarity model-index: - name: stella_en_1.5B_v5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 92.86567164179104 - type: ap value: 72.13503907102613 - type: ap_weighted value: 72.13503907102613 - type: f1 value: 89.5586886376355 - type: f1_weighted value: 93.13621183004571 - type: main_score value: 92.86567164179104 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.16485 - type: ap value: 96.05546315415225 - type: ap_weighted value: 96.05546315415225 - type: f1 value: 97.16351087403213 - type: f1_weighted value: 97.16351087403213 - type: main_score value: 97.16485 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 59.358 - type: f1 value: 59.0264615883114 - type: f1_weighted value: 59.0264615883114 - type: main_score value: 59.358 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 65.269 - type: map_at_1 value: 41.607 - type: map_at_10 value: 57.104 - type: map_at_100 value: 57.621 - type: map_at_1000 value: 57.621 - type: map_at_20 value: 57.533 - type: map_at_3 value: 52.891999999999996 - type: map_at_5 value: 55.371 - type: mrr_at_1 value: 42.318634423897585 - type: mrr_at_10 value: 57.353970511865406 - type: mrr_at_100 value: 57.88398078476526 - type: mrr_at_1000 value: 57.88467807648422 - type: mrr_at_20 value: 57.796730533206166 - type: mrr_at_3 value: 53.200568990042775 - type: mrr_at_5 value: 55.6330014224753 - type: nauc_map_at_1000_diff1 value: 24.54414600428287 - type: nauc_map_at_1000_max value: -8.389738078358459 - type: nauc_map_at_1000_std value: -18.188787645801366 - type: nauc_map_at_100_diff1 value: 24.543138576462308 - type: nauc_map_at_100_max value: -8.390896839752044 - type: nauc_map_at_100_std value: -18.192549240185247 - type: nauc_map_at_10_diff1 value: 24.219607088995822 - type: nauc_map_at_10_max value: -8.245734391254308 - type: nauc_map_at_10_std value: -18.229706566466447 - type: nauc_map_at_1_diff1 value: 29.325201664812788 - type: nauc_map_at_1_max value: -11.742800494823971 - type: nauc_map_at_1_std value: -18.610215769702528 - type: nauc_map_at_20_diff1 value: 24.471097562798803 - type: nauc_map_at_20_max value: -8.318035874000799 - type: nauc_map_at_20_std value: -18.171541096773108 - type: nauc_map_at_3_diff1 value: 24.275846107642824 - type: nauc_map_at_3_max value: -8.212242049581894 - type: nauc_map_at_3_std value: -17.920379368937496 - type: nauc_map_at_5_diff1 value: 23.873692493209255 - type: nauc_map_at_5_max value: -8.110347163828767 - type: nauc_map_at_5_std value: -18.20863325596931 - type: nauc_mrr_at_1000_diff1 value: 22.656410956419975 - type: nauc_mrr_at_1000_max value: -8.924888102233243 - type: nauc_mrr_at_1000_std value: -18.103674384502526 - type: nauc_mrr_at_100_diff1 value: 22.655448817140968 - type: nauc_mrr_at_100_max value: -8.926034318499038 - type: nauc_mrr_at_100_std value: -18.10743930104164 - type: nauc_mrr_at_10_diff1 value: 22.297536272996872 - type: nauc_mrr_at_10_max value: -8.836407556658274 - type: nauc_mrr_at_10_std value: -18.1598393044477 - type: nauc_mrr_at_1_diff1 value: 27.419572424489708 - type: nauc_mrr_at_1_max value: -11.42241314820691 - type: nauc_mrr_at_1_std value: -18.54893865856313 - type: nauc_mrr_at_20_diff1 value: 22.590227214657418 - type: nauc_mrr_at_20_max value: -8.849986456376993 - type: nauc_mrr_at_20_std value: -18.0862391777352 - type: nauc_mrr_at_3_diff1 value: 22.415270167774988 - type: nauc_mrr_at_3_max value: -8.692871854156435 - type: nauc_mrr_at_3_std value: -17.6740102891955 - type: nauc_mrr_at_5_diff1 value: 21.96284578521464 - type: nauc_mrr_at_5_max value: -8.757031535546025 - type: nauc_mrr_at_5_std value: -18.210766964081294 - type: nauc_ndcg_at_1000_diff1 value: 23.939400161569115 - type: nauc_ndcg_at_1000_max value: -7.866999120512983 - type: nauc_ndcg_at_1000_std value: -17.981457019643617 - type: nauc_ndcg_at_100_diff1 value: 23.920033349619317 - type: nauc_ndcg_at_100_max value: -7.889849409678031 - type: nauc_ndcg_at_100_std value: -18.054931990360537 - type: nauc_ndcg_at_10_diff1 value: 22.543020461303534 - type: nauc_ndcg_at_10_max value: -7.072111788010867 - type: nauc_ndcg_at_10_std value: -18.26397604573537 - type: nauc_ndcg_at_1_diff1 value: 29.325201664812788 - type: nauc_ndcg_at_1_max value: -11.742800494823971 - type: nauc_ndcg_at_1_std value: -18.610215769702528 - type: nauc_ndcg_at_20_diff1 value: 23.551587021207972 - type: nauc_ndcg_at_20_max value: -7.298056222649139 - type: nauc_ndcg_at_20_std value: -18.056004880930608 - type: nauc_ndcg_at_3_diff1 value: 22.669089506345273 - type: nauc_ndcg_at_3_max value: -7.278024373570137 - type: nauc_ndcg_at_3_std value: -17.816657759914193 - type: nauc_ndcg_at_5_diff1 value: 21.72619728226575 - type: nauc_ndcg_at_5_max value: -6.959741647471228 - type: nauc_ndcg_at_5_std value: -18.35173705190235 - type: nauc_precision_at_1000_diff1 value: 5.0388241058076995 - type: nauc_precision_at_1000_max value: 34.439879624882145 - type: nauc_precision_at_1000_std value: 77.22610895194498 - type: nauc_precision_at_100_diff1 value: 1.340670767252794 - type: nauc_precision_at_100_max value: 19.30870025961241 - type: nauc_precision_at_100_std value: 35.37688289157788 - type: nauc_precision_at_10_diff1 value: 7.734227153124332 - type: nauc_precision_at_10_max value: 4.202399088422237 - type: nauc_precision_at_10_std value: -18.383890254046698 - type: nauc_precision_at_1_diff1 value: 29.325201664812788 - type: nauc_precision_at_1_max value: -11.742800494823971 - type: nauc_precision_at_1_std value: -18.610215769702528 - type: nauc_precision_at_20_diff1 value: 9.48070999361637 - type: nauc_precision_at_20_max value: 19.056709637253025 - type: nauc_precision_at_20_std value: -13.266821166159485 - type: nauc_precision_at_3_diff1 value: 17.245260303409747 - type: nauc_precision_at_3_max value: -4.202455033452335 - type: nauc_precision_at_3_std value: -17.514264039955332 - type: nauc_precision_at_5_diff1 value: 12.074628162049974 - type: nauc_precision_at_5_max value: -1.9145501461107832 - type: nauc_precision_at_5_std value: -19.162525528916344 - type: nauc_recall_at_1000_diff1 value: 5.038824105805915 - type: nauc_recall_at_1000_max value: 34.43987962487738 - type: nauc_recall_at_1000_std value: 77.22610895193765 - type: nauc_recall_at_100_diff1 value: 1.3406707672497025 - type: nauc_recall_at_100_max value: 19.30870025960776 - type: nauc_recall_at_100_std value: 35.37688289157515 - type: nauc_recall_at_10_diff1 value: 7.734227153124366 - type: nauc_recall_at_10_max value: 4.202399088421976 - type: nauc_recall_at_10_std value: -18.38389025404673 - type: nauc_recall_at_1_diff1 value: 29.325201664812788 - type: nauc_recall_at_1_max value: -11.742800494823971 - type: nauc_recall_at_1_std value: -18.610215769702528 - type: nauc_recall_at_20_diff1 value: 9.480709993616845 - type: nauc_recall_at_20_max value: 19.05670963725301 - type: nauc_recall_at_20_std value: -13.266821166158651 - type: nauc_recall_at_3_diff1 value: 17.24526030340978 - type: nauc_recall_at_3_max value: -4.202455033452323 - type: nauc_recall_at_3_std value: -17.51426403995538 - type: nauc_recall_at_5_diff1 value: 12.074628162049992 - type: nauc_recall_at_5_max value: -1.914550146110865 - type: nauc_recall_at_5_std value: -19.162525528916362 - type: ndcg_at_1 value: 41.607 - type: ndcg_at_10 value: 65.269 - type: ndcg_at_100 value: 67.289 - type: ndcg_at_1000 value: 67.29899999999999 - type: ndcg_at_20 value: 66.76299999999999 - type: ndcg_at_3 value: 56.604 - type: ndcg_at_5 value: 61.07900000000001 - type: precision_at_1 value: 41.607 - type: precision_at_10 value: 9.118 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.8469999999999995 - type: precision_at_3 value: 22.451 - type: precision_at_5 value: 15.647 - type: recall_at_1 value: 41.607 - type: recall_at_10 value: 91.181 - type: recall_at_100 value: 99.57300000000001 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 96.942 - type: recall_at_3 value: 67.354 - type: recall_at_5 value: 78.236 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 55.437138353189994 - type: v_measure value: 55.437138353189994 - type: v_measure_std value: 14.718556601335491 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 50.65858459544658 - type: v_measure value: 50.65858459544658 - type: v_measure_std value: 14.887033747525146 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 67.32597152838535 - type: map value: 67.32597152838535 - type: mrr value: 78.98683111286988 - type: nAUC_map_diff1 value: 16.8624639710487 - type: nAUC_map_max value: 24.91996491142433 - type: nAUC_map_std value: 17.91865808793225 - type: nAUC_mrr_diff1 value: 25.03766425631947 - type: nAUC_mrr_max value: 41.64561939958336 - type: nAUC_mrr_std value: 23.179909345891968 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 85.790820496042 - type: cosine_spearman value: 83.10731534330517 - type: euclidean_pearson value: 84.61741304343133 - type: euclidean_spearman value: 83.17297949010973 - type: main_score value: 83.10731534330517 - type: manhattan_pearson value: 85.2137696526676 - type: manhattan_spearman value: 84.39168195786738 - type: pearson value: 85.790820496042 - type: spearman value: 83.10731534330517 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 89.78896103896105 - type: f1 value: 89.76107366333488 - type: f1_weighted value: 89.76107366333488 - type: main_score value: 89.78896103896105 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 50.68092296236376 - type: v_measure value: 50.68092296236376 - type: v_measure_std value: 0.7832640983085436 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 46.86629236732983 - type: v_measure value: 46.86629236732983 - type: v_measure_std value: 0.8784322236350974 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 47.74883333333334 - type: map_at_1 value: 30.179249999999996 - type: map_at_10 value: 41.60824999999999 - type: map_at_100 value: 42.94008333333332 - type: map_at_1000 value: 43.04666666666667 - type: map_at_20 value: 42.36833333333334 - type: map_at_3 value: 38.23491666666666 - type: map_at_5 value: 40.10183333333333 - type: mrr_at_1 value: 36.47676085808166 - type: mrr_at_10 value: 46.300991916437155 - type: mrr_at_100 value: 47.12155753713262 - type: mrr_at_1000 value: 47.168033610799945 - type: mrr_at_20 value: 46.80405724560391 - type: mrr_at_3 value: 43.77000352801797 - type: mrr_at_5 value: 45.22295361704542 - type: nauc_map_at_1000_diff1 value: 46.953671666941524 - type: nauc_map_at_1000_max value: 32.260396316089675 - type: nauc_map_at_1000_std value: 0.6657766120094878 - type: nauc_map_at_100_diff1 value: 46.94717463394555 - type: nauc_map_at_100_max value: 32.25088350678177 - type: nauc_map_at_100_std value: 0.6257017014549283 - type: nauc_map_at_10_diff1 value: 46.974678429336464 - type: nauc_map_at_10_max value: 31.862230807295504 - type: nauc_map_at_10_std value: -0.14758828549579284 - type: nauc_map_at_1_diff1 value: 52.48913346466124 - type: nauc_map_at_1_max value: 29.874374024967725 - type: nauc_map_at_1_std value: -2.433547569836134 - type: nauc_map_at_20_diff1 value: 46.96088684217651 - type: nauc_map_at_20_max value: 32.08954208613205 - type: nauc_map_at_20_std value: 0.25946321113436527 - type: nauc_map_at_3_diff1 value: 47.703230121518345 - type: nauc_map_at_3_max value: 30.977880095983107 - type: nauc_map_at_3_std value: -1.342777563991804 - type: nauc_map_at_5_diff1 value: 47.1615010199957 - type: nauc_map_at_5_max value: 31.420885812683284 - type: nauc_map_at_5_std value: -0.8789297099444306 - type: nauc_mrr_at_1000_diff1 value: 46.69178645962615 - type: nauc_mrr_at_1000_max value: 34.392807413340655 - type: nauc_mrr_at_1000_std value: 1.6155464863667934 - type: nauc_mrr_at_100_diff1 value: 46.67417236349189 - type: nauc_mrr_at_100_max value: 34.384607045512624 - type: nauc_mrr_at_100_std value: 1.6259917384109652 - type: nauc_mrr_at_10_diff1 value: 46.60497560446239 - type: nauc_mrr_at_10_max value: 34.32918897817958 - type: nauc_mrr_at_10_std value: 1.39387793769014 - type: nauc_mrr_at_1_diff1 value: 51.61608573254137 - type: nauc_mrr_at_1_max value: 35.18105023234596 - type: nauc_mrr_at_1_std value: 0.17943702145478177 - type: nauc_mrr_at_20_diff1 value: 46.635943069860254 - type: nauc_mrr_at_20_max value: 34.37050973118794 - type: nauc_mrr_at_20_std value: 1.5346464678860607 - type: nauc_mrr_at_3_diff1 value: 47.154389369038334 - type: nauc_mrr_at_3_max value: 34.41036411855465 - type: nauc_mrr_at_3_std value: 0.924551812357872 - type: nauc_mrr_at_5_diff1 value: 46.6690101691763 - type: nauc_mrr_at_5_max value: 34.29740388138466 - type: nauc_mrr_at_5_std value: 1.0567184149139792 - type: nauc_ndcg_at_1000_diff1 value: 45.375448289173264 - type: nauc_ndcg_at_1000_max value: 33.47957083714482 - type: nauc_ndcg_at_1000_std value: 3.192251100225568 - type: nauc_ndcg_at_100_diff1 value: 44.93601014699499 - type: nauc_ndcg_at_100_max value: 33.21249888295249 - type: nauc_ndcg_at_100_std value: 3.609842852934217 - type: nauc_ndcg_at_10_diff1 value: 44.87893284011915 - type: nauc_ndcg_at_10_max value: 32.384885249478515 - type: nauc_ndcg_at_10_std value: 1.454493065035396 - type: nauc_ndcg_at_1_diff1 value: 51.61608573254137 - type: nauc_ndcg_at_1_max value: 35.18105023234596 - type: nauc_ndcg_at_1_std value: 0.17943702145478177 - type: nauc_ndcg_at_20_diff1 value: 44.867752179050605 - type: nauc_ndcg_at_20_max value: 32.689535921840196 - type: nauc_ndcg_at_20_std value: 2.337765158573901 - type: nauc_ndcg_at_3_diff1 value: 45.87485821381341 - type: nauc_ndcg_at_3_max value: 32.33282450558947 - type: nauc_ndcg_at_3_std value: 0.0681643829273283 - type: nauc_ndcg_at_5_diff1 value: 45.202902131892394 - type: nauc_ndcg_at_5_max value: 32.1026971523917 - type: nauc_ndcg_at_5_std value: 0.3565572833774486 - type: nauc_precision_at_1000_diff1 value: -8.935267931198956 - type: nauc_precision_at_1000_max value: 6.464981960169269 - type: nauc_precision_at_1000_std value: 10.662786182234633 - type: nauc_precision_at_100_diff1 value: -1.64091517847155 - type: nauc_precision_at_100_max value: 15.175617871025024 - type: nauc_precision_at_100_std value: 16.924256989248075 - type: nauc_precision_at_10_diff1 value: 15.676651966277047 - type: nauc_precision_at_10_max value: 26.243734188847117 - type: nauc_precision_at_10_std value: 10.601741034956333 - type: nauc_precision_at_1_diff1 value: 51.61608573254137 - type: nauc_precision_at_1_max value: 35.18105023234596 - type: nauc_precision_at_1_std value: 0.17943702145478177 - type: nauc_precision_at_20_diff1 value: 9.447267260198654 - type: nauc_precision_at_20_max value: 23.024130858142723 - type: nauc_precision_at_20_std value: 13.739145648899603 - type: nauc_precision_at_3_diff1 value: 30.11583572134629 - type: nauc_precision_at_3_max value: 31.37321080069495 - type: nauc_precision_at_3_std value: 4.705512374126024 - type: nauc_precision_at_5_diff1 value: 23.192015335996093 - type: nauc_precision_at_5_max value: 29.415746835998764 - type: nauc_precision_at_5_std value: 6.843498772798558 - type: nauc_recall_at_1000_diff1 value: 25.36573313426033 - type: nauc_recall_at_1000_max value: 43.06672256524168 - type: nauc_recall_at_1000_std value: 47.93664853815292 - type: nauc_recall_at_100_diff1 value: 31.222880916617406 - type: nauc_recall_at_100_max value: 31.761159904172658 - type: nauc_recall_at_100_std value: 23.034218976635877 - type: nauc_recall_at_10_diff1 value: 36.23439028915225 - type: nauc_recall_at_10_max value: 28.473458977606438 - type: nauc_recall_at_10_std value: 3.7797969934159 - type: nauc_recall_at_1_diff1 value: 52.48913346466124 - type: nauc_recall_at_1_max value: 29.874374024967725 - type: nauc_recall_at_1_std value: -2.433547569836134 - type: nauc_recall_at_20_diff1 value: 34.678676952584766 - type: nauc_recall_at_20_max value: 29.04638392522168 - type: nauc_recall_at_20_std value: 8.148894982082549 - type: nauc_recall_at_3_diff1 value: 41.31029996231311 - type: nauc_recall_at_3_max value: 28.44199443414157 - type: nauc_recall_at_3_std value: -0.747324057600377 - type: nauc_recall_at_5_diff1 value: 38.535873899920674 - type: nauc_recall_at_5_max value: 27.942667805948375 - type: nauc_recall_at_5_std value: 0.30652206930973686 - type: ndcg_at_1 value: 36.47675 - type: ndcg_at_10 value: 47.74883333333334 - type: ndcg_at_100 value: 52.902416666666674 - type: ndcg_at_1000 value: 54.69116666666667 - type: ndcg_at_20 value: 49.89758333333333 - type: ndcg_at_3 value: 42.462250000000004 - type: ndcg_at_5 value: 44.91841666666667 - type: precision_at_1 value: 36.47675 - type: precision_at_10 value: 8.582416666666665 - type: precision_at_100 value: 1.31475 - type: precision_at_1000 value: 0.16458333333333333 - type: precision_at_20 value: 5.021833333333333 - type: precision_at_3 value: 20.004499999999997 - type: precision_at_5 value: 14.178666666666665 - type: recall_at_1 value: 30.179249999999996 - type: recall_at_10 value: 60.950166666666675 - type: recall_at_100 value: 83.19025 - type: recall_at_1000 value: 95.27774999999998 - type: recall_at_20 value: 68.80175 - type: recall_at_3 value: 46.01841666666666 - type: recall_at_5 value: 52.482416666666666 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 46.113 - type: map_at_1 value: 20.122999999999998 - type: map_at_10 value: 35.474 - type: map_at_100 value: 37.592 - type: map_at_1000 value: 37.773 - type: map_at_20 value: 36.637 - type: map_at_3 value: 29.731 - type: map_at_5 value: 32.964 - type: mrr_at_1 value: 46.71009771986971 - type: mrr_at_10 value: 58.855669303552105 - type: mrr_at_100 value: 59.389249674038425 - type: mrr_at_1000 value: 59.408448104362364 - type: mrr_at_20 value: 59.23881203149016 - type: mrr_at_3 value: 56.18892508143328 - type: mrr_at_5 value: 57.85342019543985 - type: nauc_map_at_1000_diff1 value: 27.047031037721958 - type: nauc_map_at_1000_max value: 43.25240279148033 - type: nauc_map_at_1000_std value: 20.795849418696037 - type: nauc_map_at_100_diff1 value: 27.044739015116452 - type: nauc_map_at_100_max value: 43.24042159787812 - type: nauc_map_at_100_std value: 20.799952124137683 - type: nauc_map_at_10_diff1 value: 27.372696854670338 - type: nauc_map_at_10_max value: 43.054456574721684 - type: nauc_map_at_10_std value: 19.537162110136645 - type: nauc_map_at_1_diff1 value: 43.65424623953092 - type: nauc_map_at_1_max value: 45.17986509998762 - type: nauc_map_at_1_std value: 8.497107052335414 - type: nauc_map_at_20_diff1 value: 27.224535846566074 - type: nauc_map_at_20_max value: 43.12222854561229 - type: nauc_map_at_20_std value: 20.29982972202669 - type: nauc_map_at_3_diff1 value: 30.87847002319001 - type: nauc_map_at_3_max value: 42.890027891707575 - type: nauc_map_at_3_std value: 13.857451947580929 - type: nauc_map_at_5_diff1 value: 27.966867093591542 - type: nauc_map_at_5_max value: 42.35826637592201 - type: nauc_map_at_5_std value: 16.993102524058624 - type: nauc_mrr_at_1000_diff1 value: 30.191544077608164 - type: nauc_mrr_at_1000_max value: 44.959438920351644 - type: nauc_mrr_at_1000_std value: 24.065801376465114 - type: nauc_mrr_at_100_diff1 value: 30.170368115494 - type: nauc_mrr_at_100_max value: 44.955868115761156 - type: nauc_mrr_at_100_std value: 24.093510767847707 - type: nauc_mrr_at_10_diff1 value: 30.128430637520175 - type: nauc_mrr_at_10_max value: 44.97689261350708 - type: nauc_mrr_at_10_std value: 24.037049561818897 - type: nauc_mrr_at_1_diff1 value: 35.323351939108214 - type: nauc_mrr_at_1_max value: 43.85026244855636 - type: nauc_mrr_at_1_std value: 17.040662141218974 - type: nauc_mrr_at_20_diff1 value: 30.192006556160443 - type: nauc_mrr_at_20_max value: 45.02814530774032 - type: nauc_mrr_at_20_std value: 24.20885865448696 - type: nauc_mrr_at_3_diff1 value: 29.88250163424518 - type: nauc_mrr_at_3_max value: 44.25768944883186 - type: nauc_mrr_at_3_std value: 22.804183393364198 - type: nauc_mrr_at_5_diff1 value: 30.269824490420767 - type: nauc_mrr_at_5_max value: 44.97443265796657 - type: nauc_mrr_at_5_std value: 23.894159916141177 - type: nauc_ndcg_at_1000_diff1 value: 24.533764005407356 - type: nauc_ndcg_at_1000_max value: 44.50902713386608 - type: nauc_ndcg_at_1000_std value: 27.589506980238404 - type: nauc_ndcg_at_100_diff1 value: 24.209785073940353 - type: nauc_ndcg_at_100_max value: 44.18257063893669 - type: nauc_ndcg_at_100_std value: 27.963150866401943 - type: nauc_ndcg_at_10_diff1 value: 25.168069201989486 - type: nauc_ndcg_at_10_max value: 43.84940910683214 - type: nauc_ndcg_at_10_std value: 24.810707270956435 - type: nauc_ndcg_at_1_diff1 value: 35.323351939108214 - type: nauc_ndcg_at_1_max value: 43.85026244855636 - type: nauc_ndcg_at_1_std value: 17.040662141218974 - type: nauc_ndcg_at_20_diff1 value: 24.829924800466834 - type: nauc_ndcg_at_20_max value: 43.738574327059716 - type: nauc_ndcg_at_20_std value: 26.252370278684072 - type: nauc_ndcg_at_3_diff1 value: 27.321943393906274 - type: nauc_ndcg_at_3_max value: 42.16584786993447 - type: nauc_ndcg_at_3_std value: 18.24775079455969 - type: nauc_ndcg_at_5_diff1 value: 26.043785418347998 - type: nauc_ndcg_at_5_max value: 42.874593895388344 - type: nauc_ndcg_at_5_std value: 21.294004555506117 - type: nauc_precision_at_1000_diff1 value: -22.073027615308582 - type: nauc_precision_at_1000_max value: -6.549723766317357 - type: nauc_precision_at_1000_std value: 18.301749191241306 - type: nauc_precision_at_100_diff1 value: -15.654286887593619 - type: nauc_precision_at_100_max value: 6.401516251421999 - type: nauc_precision_at_100_std value: 29.170680324929805 - type: nauc_precision_at_10_diff1 value: -4.362381972892247 - type: nauc_precision_at_10_max value: 22.10943515872447 - type: nauc_precision_at_10_std value: 31.869699459530022 - type: nauc_precision_at_1_diff1 value: 35.323351939108214 - type: nauc_precision_at_1_max value: 43.85026244855636 - type: nauc_precision_at_1_std value: 17.040662141218974 - type: nauc_precision_at_20_diff1 value: -7.50749661117875 - type: nauc_precision_at_20_max value: 16.80584016023257 - type: nauc_precision_at_20_std value: 31.976755897112437 - type: nauc_precision_at_3_diff1 value: 7.402667538773083 - type: nauc_precision_at_3_max value: 31.2088401330676 - type: nauc_precision_at_3_std value: 24.287905698405662 - type: nauc_precision_at_5_diff1 value: 0.7479172565343901 - type: nauc_precision_at_5_max value: 26.28427734237825 - type: nauc_precision_at_5_std value: 28.246947120310317 - type: nauc_recall_at_1000_diff1 value: 2.4778431086370496 - type: nauc_recall_at_1000_max value: 40.2231995797509 - type: nauc_recall_at_1000_std value: 52.62124052183862 - type: nauc_recall_at_100_diff1 value: 8.960962419741463 - type: nauc_recall_at_100_max value: 35.81132850291491 - type: nauc_recall_at_100_std value: 40.020903251786166 - type: nauc_recall_at_10_diff1 value: 15.603400751376636 - type: nauc_recall_at_10_max value: 37.570127529136485 - type: nauc_recall_at_10_std value: 28.07128410238545 - type: nauc_recall_at_1_diff1 value: 43.65424623953092 - type: nauc_recall_at_1_max value: 45.17986509998762 - type: nauc_recall_at_1_std value: 8.497107052335414 - type: nauc_recall_at_20_diff1 value: 13.844820282832346 - type: nauc_recall_at_20_max value: 36.0106148516309 - type: nauc_recall_at_20_std value: 31.453103910565254 - type: nauc_recall_at_3_diff1 value: 24.359328154117748 - type: nauc_recall_at_3_max value: 39.93774251377568 - type: nauc_recall_at_3_std value: 16.214921517509648 - type: nauc_recall_at_5_diff1 value: 18.75788451360292 - type: nauc_recall_at_5_max value: 38.177646107055516 - type: nauc_recall_at_5_std value: 22.17196825834675 - type: ndcg_at_1 value: 46.71 - type: ndcg_at_10 value: 46.113 - type: ndcg_at_100 value: 53.035 - type: ndcg_at_1000 value: 55.724 - type: ndcg_at_20 value: 48.929 - type: ndcg_at_3 value: 39.501999999999995 - type: ndcg_at_5 value: 41.792 - type: precision_at_1 value: 46.71 - type: precision_at_10 value: 14.274000000000001 - type: precision_at_100 value: 2.1870000000000003 - type: precision_at_1000 value: 0.269 - type: precision_at_20 value: 8.375 - type: precision_at_3 value: 29.881 - type: precision_at_5 value: 22.697 - type: recall_at_1 value: 20.122999999999998 - type: recall_at_10 value: 52.22 - type: recall_at_100 value: 75.388 - type: recall_at_1000 value: 89.938 - type: recall_at_20 value: 60.077000000000005 - type: recall_at_3 value: 35.150999999999996 - type: recall_at_5 value: 42.748000000000005 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 52.276999999999994 - type: map_at_1 value: 9.949 - type: map_at_10 value: 24.891 - type: map_at_100 value: 37.111 - type: map_at_1000 value: 39.266 - type: map_at_20 value: 29.685 - type: map_at_3 value: 16.586000000000002 - type: map_at_5 value: 19.982 - type: mrr_at_1 value: 76.25 - type: mrr_at_10 value: 82.4518849206349 - type: mrr_at_100 value: 82.70302194564499 - type: mrr_at_1000 value: 82.70909729942254 - type: mrr_at_20 value: 82.60492765962964 - type: mrr_at_3 value: 81.33333333333331 - type: mrr_at_5 value: 82.14583333333331 - type: nauc_map_at_1000_diff1 value: 21.427201262456556 - type: nauc_map_at_1000_max value: 35.357361590816076 - type: nauc_map_at_1000_std value: 24.785419223353717 - type: nauc_map_at_100_diff1 value: 22.82358692021537 - type: nauc_map_at_100_max value: 35.07399692072945 - type: nauc_map_at_100_std value: 22.679878828987025 - type: nauc_map_at_10_diff1 value: 26.491769223479643 - type: nauc_map_at_10_max value: 20.78079385443902 - type: nauc_map_at_10_std value: -4.910406292079661 - type: nauc_map_at_1_diff1 value: 35.20851030208876 - type: nauc_map_at_1_max value: 5.783003346365858 - type: nauc_map_at_1_std value: -21.11679133835354 - type: nauc_map_at_20_diff1 value: 24.80097499300491 - type: nauc_map_at_20_max value: 26.807021360774975 - type: nauc_map_at_20_std value: 4.793103995429955 - type: nauc_map_at_3_diff1 value: 29.238193458890173 - type: nauc_map_at_3_max value: 10.300839972189456 - type: nauc_map_at_3_std value: -17.889666731981592 - type: nauc_map_at_5_diff1 value: 28.773624870573926 - type: nauc_map_at_5_max value: 14.951435645422887 - type: nauc_map_at_5_std value: -13.319697827173565 - type: nauc_mrr_at_1000_diff1 value: 55.232544856708785 - type: nauc_mrr_at_1000_max value: 64.73225637682637 - type: nauc_mrr_at_1000_std value: 37.57480399594188 - type: nauc_mrr_at_100_diff1 value: 55.219251601773735 - type: nauc_mrr_at_100_max value: 64.73305063663611 - type: nauc_mrr_at_100_std value: 37.56458562909293 - type: nauc_mrr_at_10_diff1 value: 55.123463838253464 - type: nauc_mrr_at_10_max value: 64.91914041040233 - type: nauc_mrr_at_10_std value: 37.76482503851598 - type: nauc_mrr_at_1_diff1 value: 56.45461238513347 - type: nauc_mrr_at_1_max value: 63.11782510293676 - type: nauc_mrr_at_1_std value: 33.592561284868985 - type: nauc_mrr_at_20_diff1 value: 55.15401961460458 - type: nauc_mrr_at_20_max value: 64.77145835613156 - type: nauc_mrr_at_20_std value: 37.471561418305804 - type: nauc_mrr_at_3_diff1 value: 54.64387438697658 - type: nauc_mrr_at_3_max value: 64.27618995019164 - type: nauc_mrr_at_3_std value: 39.391637295269014 - type: nauc_mrr_at_5_diff1 value: 55.08702591239485 - type: nauc_mrr_at_5_max value: 64.6071475650635 - type: nauc_mrr_at_5_std value: 37.97185134269896 - type: nauc_ndcg_at_1000_diff1 value: 31.696698876400387 - type: nauc_ndcg_at_1000_max value: 52.12183760001191 - type: nauc_ndcg_at_1000_std value: 40.197596211778716 - type: nauc_ndcg_at_100_diff1 value: 33.253120193433666 - type: nauc_ndcg_at_100_max value: 49.47167758554746 - type: nauc_ndcg_at_100_std value: 32.643833139756204 - type: nauc_ndcg_at_10_diff1 value: 27.065541392580013 - type: nauc_ndcg_at_10_max value: 45.83504281289289 - type: nauc_ndcg_at_10_std value: 27.11739500732328 - type: nauc_ndcg_at_1_diff1 value: 49.42808250022517 - type: nauc_ndcg_at_1_max value: 53.502615048520354 - type: nauc_ndcg_at_1_std value: 27.17555908836708 - type: nauc_ndcg_at_20_diff1 value: 29.374791382330308 - type: nauc_ndcg_at_20_max value: 43.91246842479055 - type: nauc_ndcg_at_20_std value: 23.419410620550316 - type: nauc_ndcg_at_3_diff1 value: 26.71550354496204 - type: nauc_ndcg_at_3_max value: 43.9641457892003 - type: nauc_ndcg_at_3_std value: 27.320024167947686 - type: nauc_ndcg_at_5_diff1 value: 27.020654974589487 - type: nauc_ndcg_at_5_max value: 46.130417266030584 - type: nauc_ndcg_at_5_std value: 28.392009019010068 - type: nauc_precision_at_1000_diff1 value: -21.47455482181002 - type: nauc_precision_at_1000_max value: -9.721907229236024 - type: nauc_precision_at_1000_std value: -1.061132062651487 - type: nauc_precision_at_100_diff1 value: -12.35759246101943 - type: nauc_precision_at_100_max value: 15.509512444892168 - type: nauc_precision_at_100_std value: 36.21183578592014 - type: nauc_precision_at_10_diff1 value: -6.136998947343125 - type: nauc_precision_at_10_max value: 32.30037906748288 - type: nauc_precision_at_10_std value: 41.4500302476981 - type: nauc_precision_at_1_diff1 value: 56.45461238513347 - type: nauc_precision_at_1_max value: 63.11782510293676 - type: nauc_precision_at_1_std value: 33.592561284868985 - type: nauc_precision_at_20_diff1 value: -7.335890123683174 - type: nauc_precision_at_20_max value: 28.31417075291312 - type: nauc_precision_at_20_std value: 41.405935715061815 - type: nauc_precision_at_3_diff1 value: 7.117255890225942 - type: nauc_precision_at_3_max value: 39.19894132683829 - type: nauc_precision_at_3_std value: 38.48255841994843 - type: nauc_precision_at_5_diff1 value: 1.861523090114206 - type: nauc_precision_at_5_max value: 38.11649223007208 - type: nauc_precision_at_5_std value: 40.52993530374645 - type: nauc_recall_at_1000_diff1 value: 26.497648584314636 - type: nauc_recall_at_1000_max value: 44.48069746734414 - type: nauc_recall_at_1000_std value: 53.16438130228715 - type: nauc_recall_at_100_diff1 value: 26.353456899511446 - type: nauc_recall_at_100_max value: 37.57379787884197 - type: nauc_recall_at_100_std value: 29.197468295989548 - type: nauc_recall_at_10_diff1 value: 22.80445738351114 - type: nauc_recall_at_10_max value: 15.895630778449046 - type: nauc_recall_at_10_std value: -8.746224797644501 - type: nauc_recall_at_1_diff1 value: 35.20851030208876 - type: nauc_recall_at_1_max value: 5.783003346365858 - type: nauc_recall_at_1_std value: -21.11679133835354 - type: nauc_recall_at_20_diff1 value: 22.34028867678706 - type: nauc_recall_at_20_max value: 21.42373427646772 - type: nauc_recall_at_20_std value: 0.4533036151015875 - type: nauc_recall_at_3_diff1 value: 24.96853445599229 - type: nauc_recall_at_3_max value: 6.245185375804208 - type: nauc_recall_at_3_std value: -20.200240127099622 - type: nauc_recall_at_5_diff1 value: 24.749259476710623 - type: nauc_recall_at_5_max value: 11.024592845995942 - type: nauc_recall_at_5_std value: -16.15683085641543 - type: ndcg_at_1 value: 64.125 - type: ndcg_at_10 value: 52.276999999999994 - type: ndcg_at_100 value: 57.440000000000005 - type: ndcg_at_1000 value: 64.082 - type: ndcg_at_20 value: 51.383 - type: ndcg_at_3 value: 55.769000000000005 - type: ndcg_at_5 value: 53.978 - type: precision_at_1 value: 76.25 - type: precision_at_10 value: 43.05 - type: precision_at_100 value: 14.09 - type: precision_at_1000 value: 2.662 - type: precision_at_20 value: 33.112 - type: precision_at_3 value: 59.833000000000006 - type: precision_at_5 value: 53.05 - type: recall_at_1 value: 9.949 - type: recall_at_10 value: 30.424 - type: recall_at_100 value: 64.062 - type: recall_at_1000 value: 85.916 - type: recall_at_20 value: 39.895 - type: recall_at_3 value: 17.876 - type: recall_at_5 value: 22.536 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 84.29499999999999 - type: f1 value: 79.76188258172078 - type: f1_weighted value: 84.96026012933847 - type: main_score value: 84.29499999999999 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 94.83200000000001 - type: map_at_1 value: 87.339 - type: map_at_10 value: 92.92099999999999 - type: map_at_100 value: 93.108 - type: map_at_1000 value: 93.116 - type: map_at_20 value: 93.041 - type: map_at_3 value: 92.219 - type: map_at_5 value: 92.664 - type: mrr_at_1 value: 93.99939993999399 - type: mrr_at_10 value: 96.55188137861403 - type: mrr_at_100 value: 96.5652366009286 - type: mrr_at_1000 value: 96.5652625550811 - type: mrr_at_20 value: 96.5601781754844 - type: mrr_at_3 value: 96.45714571457142 - type: mrr_at_5 value: 96.544904490449 - type: nauc_map_at_1000_diff1 value: 51.81676454961933 - type: nauc_map_at_1000_max value: 24.904822914926118 - type: nauc_map_at_1000_std value: -3.8110347821630404 - type: nauc_map_at_100_diff1 value: 51.77514975011158 - type: nauc_map_at_100_max value: 24.912497341800094 - type: nauc_map_at_100_std value: -3.76229517662447 - type: nauc_map_at_10_diff1 value: 51.29608296382479 - type: nauc_map_at_10_max value: 24.78704970246707 - type: nauc_map_at_10_std value: -3.723130815783328 - type: nauc_map_at_1_diff1 value: 59.90813138005125 - type: nauc_map_at_1_max value: 24.58479295693794 - type: nauc_map_at_1_std value: -8.056152492777027 - type: nauc_map_at_20_diff1 value: 51.428639331678326 - type: nauc_map_at_20_max value: 24.849214517705086 - type: nauc_map_at_20_std value: -3.685550123874596 - type: nauc_map_at_3_diff1 value: 50.94399923719279 - type: nauc_map_at_3_max value: 24.359700180006207 - type: nauc_map_at_3_std value: -5.407767408816422 - type: nauc_map_at_5_diff1 value: 50.767302682959546 - type: nauc_map_at_5_max value: 24.491113461892215 - type: nauc_map_at_5_std value: -4.058336127339082 - type: nauc_mrr_at_1000_diff1 value: 79.86042313551833 - type: nauc_mrr_at_1000_max value: 23.20960445633933 - type: nauc_mrr_at_1000_std value: -23.54334295120471 - type: nauc_mrr_at_100_diff1 value: 79.85991247027636 - type: nauc_mrr_at_100_max value: 23.210085926780106 - type: nauc_mrr_at_100_std value: -23.542508200789197 - type: nauc_mrr_at_10_diff1 value: 79.71095155563415 - type: nauc_mrr_at_10_max value: 23.24128650883908 - type: nauc_mrr_at_10_std value: -23.408502781834102 - type: nauc_mrr_at_1_diff1 value: 82.6349900233902 - type: nauc_mrr_at_1_max value: 21.994548214014227 - type: nauc_mrr_at_1_std value: -22.549769792179262 - type: nauc_mrr_at_20_diff1 value: 79.76465012873038 - type: nauc_mrr_at_20_max value: 23.17575026523213 - type: nauc_mrr_at_20_std value: -23.492660166315048 - type: nauc_mrr_at_3_diff1 value: 79.91074933379953 - type: nauc_mrr_at_3_max value: 24.14246499097892 - type: nauc_mrr_at_3_std value: -25.22601708389664 - type: nauc_mrr_at_5_diff1 value: 79.62092651565847 - type: nauc_mrr_at_5_max value: 23.315937737034425 - type: nauc_mrr_at_5_std value: -23.317659360058403 - type: nauc_ndcg_at_1000_diff1 value: 54.404537986779225 - type: nauc_ndcg_at_1000_max value: 25.38408304128995 - type: nauc_ndcg_at_1000_std value: -4.916709117696968 - type: nauc_ndcg_at_100_diff1 value: 53.2448598868241 - type: nauc_ndcg_at_100_max value: 25.75325255295546 - type: nauc_ndcg_at_100_std value: -3.680507005630751 - type: nauc_ndcg_at_10_diff1 value: 50.81057355170232 - type: nauc_ndcg_at_10_max value: 25.006448273343807 - type: nauc_ndcg_at_10_std value: -2.8979899112515577 - type: nauc_ndcg_at_1_diff1 value: 82.6349900233902 - type: nauc_ndcg_at_1_max value: 21.994548214014227 - type: nauc_ndcg_at_1_std value: -22.549769792179262 - type: nauc_ndcg_at_20_diff1 value: 51.205023097166304 - type: nauc_ndcg_at_20_max value: 25.22133626556826 - type: nauc_ndcg_at_20_std value: -2.9506328244150155 - type: nauc_ndcg_at_3_diff1 value: 51.79780256736321 - type: nauc_ndcg_at_3_max value: 24.81137324438439 - type: nauc_ndcg_at_3_std value: -6.881223858227807 - type: nauc_ndcg_at_5_diff1 value: 50.290038260564565 - type: nauc_ndcg_at_5_max value: 24.57250792165796 - type: nauc_ndcg_at_5_std value: -3.5124628344654596 - type: nauc_precision_at_1000_diff1 value: -20.215211396894333 - type: nauc_precision_at_1000_max value: -14.165452298769171 - type: nauc_precision_at_1000_std value: -2.0952871214470816 - type: nauc_precision_at_100_diff1 value: -22.340257474494607 - type: nauc_precision_at_100_max value: -12.697885641360282 - type: nauc_precision_at_100_std value: 1.0688624940286244 - type: nauc_precision_at_10_diff1 value: -24.78271817420798 - type: nauc_precision_at_10_max value: -12.625257500222656 - type: nauc_precision_at_10_std value: 3.223250450607087 - type: nauc_precision_at_1_diff1 value: 82.6349900233902 - type: nauc_precision_at_1_max value: 21.994548214014227 - type: nauc_precision_at_1_std value: -22.549769792179262 - type: nauc_precision_at_20_diff1 value: -24.375756227194177 - type: nauc_precision_at_20_max value: -12.341015011563536 - type: nauc_precision_at_20_std value: 2.7475274619387955 - type: nauc_precision_at_3_diff1 value: -24.8251306777365 - type: nauc_precision_at_3_max value: -13.109579709589042 - type: nauc_precision_at_3_std value: -1.2233442335420748 - type: nauc_precision_at_5_diff1 value: -26.955418583344894 - type: nauc_precision_at_5_max value: -13.598630838071015 - type: nauc_precision_at_5_std value: 2.545780631940738 - type: nauc_recall_at_1000_diff1 value: 0.2542680835344437 - type: nauc_recall_at_1000_max value: 49.38194243035277 - type: nauc_recall_at_1000_std value: 57.021502715846026 - type: nauc_recall_at_100_diff1 value: 5.062154815367015 - type: nauc_recall_at_100_max value: 45.41178380188437 - type: nauc_recall_at_100_std value: 50.78382225901813 - type: nauc_recall_at_10_diff1 value: 20.429153629007818 - type: nauc_recall_at_10_max value: 27.516855026155508 - type: nauc_recall_at_10_std value: 21.367491371755467 - type: nauc_recall_at_1_diff1 value: 59.90813138005125 - type: nauc_recall_at_1_max value: 24.58479295693794 - type: nauc_recall_at_1_std value: -8.056152492777027 - type: nauc_recall_at_20_diff1 value: 13.072430858896942 - type: nauc_recall_at_20_max value: 29.5522659183247 - type: nauc_recall_at_20_std value: 28.70569974090291 - type: nauc_recall_at_3_diff1 value: 30.419084482663617 - type: nauc_recall_at_3_max value: 25.627389580252835 - type: nauc_recall_at_3_std value: 2.5557690877637054 - type: nauc_recall_at_5_diff1 value: 22.92561435069869 - type: nauc_recall_at_5_max value: 25.545265063475455 - type: nauc_recall_at_5_std value: 14.736172663072786 - type: ndcg_at_1 value: 93.999 - type: ndcg_at_10 value: 94.83200000000001 - type: ndcg_at_100 value: 95.363 - type: ndcg_at_1000 value: 95.478 - type: ndcg_at_20 value: 95.077 - type: ndcg_at_3 value: 94.143 - type: ndcg_at_5 value: 94.525 - type: precision_at_1 value: 93.999 - type: precision_at_10 value: 11.029 - type: precision_at_100 value: 1.1560000000000001 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_20 value: 5.62 - type: precision_at_3 value: 35.219 - type: precision_at_5 value: 21.584 - type: recall_at_1 value: 87.339 - type: recall_at_10 value: 97.026 - type: recall_at_100 value: 98.936 - type: recall_at_1000 value: 99.599 - type: recall_at_20 value: 97.744 - type: recall_at_3 value: 95.069 - type: recall_at_5 value: 96.177 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 60.480000000000004 - type: map_at_1 value: 31.529 - type: map_at_10 value: 52.081 - type: map_at_100 value: 54.342 - type: map_at_1000 value: 54.449000000000005 - type: map_at_20 value: 53.479 - type: map_at_3 value: 45.471000000000004 - type: map_at_5 value: 49.164 - type: mrr_at_1 value: 60.03086419753087 - type: mrr_at_10 value: 67.73754409171075 - type: mrr_at_100 value: 68.332432152368 - type: mrr_at_1000 value: 68.34150941774908 - type: mrr_at_20 value: 68.14780993838725 - type: mrr_at_3 value: 65.6378600823045 - type: mrr_at_5 value: 66.88014403292176 - type: nauc_map_at_1000_diff1 value: 45.36598134579052 - type: nauc_map_at_1000_max value: 31.891451119906943 - type: nauc_map_at_1000_std value: -15.41454384137943 - type: nauc_map_at_100_diff1 value: 45.31268291874018 - type: nauc_map_at_100_max value: 31.811055683002092 - type: nauc_map_at_100_std value: -15.348503855591417 - type: nauc_map_at_10_diff1 value: 45.22606983565892 - type: nauc_map_at_10_max value: 30.46108534749699 - type: nauc_map_at_10_std value: -16.618086029682555 - type: nauc_map_at_1_diff1 value: 49.94952823753276 - type: nauc_map_at_1_max value: 13.770377574254548 - type: nauc_map_at_1_std value: -14.946357968858653 - type: nauc_map_at_20_diff1 value: 45.29274207897926 - type: nauc_map_at_20_max value: 31.27332015148257 - type: nauc_map_at_20_std value: -15.782946115613129 - type: nauc_map_at_3_diff1 value: 47.94248233566038 - type: nauc_map_at_3_max value: 24.022838776825456 - type: nauc_map_at_3_std value: -17.103518542262208 - type: nauc_map_at_5_diff1 value: 45.85345590031722 - type: nauc_map_at_5_max value: 27.78341379004547 - type: nauc_map_at_5_std value: -17.490850791756326 - type: nauc_mrr_at_1000_diff1 value: 58.225141047822824 - type: nauc_mrr_at_1000_max value: 43.39606904140525 - type: nauc_mrr_at_1000_std value: -14.64093518199122 - type: nauc_mrr_at_100_diff1 value: 58.22137274179545 - type: nauc_mrr_at_100_max value: 43.39567568136935 - type: nauc_mrr_at_100_std value: -14.62512313985582 - type: nauc_mrr_at_10_diff1 value: 58.03217329957151 - type: nauc_mrr_at_10_max value: 43.633561683075186 - type: nauc_mrr_at_10_std value: -14.563703576023808 - type: nauc_mrr_at_1_diff1 value: 61.48979902647692 - type: nauc_mrr_at_1_max value: 43.1938079066948 - type: nauc_mrr_at_1_std value: -15.808138277440465 - type: nauc_mrr_at_20_diff1 value: 58.13185370150794 - type: nauc_mrr_at_20_max value: 43.35607721183147 - type: nauc_mrr_at_20_std value: -14.635812702971263 - type: nauc_mrr_at_3_diff1 value: 58.698963168321264 - type: nauc_mrr_at_3_max value: 43.633129249785405 - type: nauc_mrr_at_3_std value: -15.733246346983854 - type: nauc_mrr_at_5_diff1 value: 57.94156745229547 - type: nauc_mrr_at_5_max value: 43.14152462640525 - type: nauc_mrr_at_5_std value: -15.318685307750895 - type: nauc_ndcg_at_1000_diff1 value: 47.871896043731496 - type: nauc_ndcg_at_1000_max value: 37.159845167533426 - type: nauc_ndcg_at_1000_std value: -13.067288160833485 - type: nauc_ndcg_at_100_diff1 value: 47.046171407204426 - type: nauc_ndcg_at_100_max value: 36.422514360855835 - type: nauc_ndcg_at_100_std value: -11.636859259571441 - type: nauc_ndcg_at_10_diff1 value: 46.232628149078096 - type: nauc_ndcg_at_10_max value: 34.82402625088358 - type: nauc_ndcg_at_10_std value: -14.768545542980114 - type: nauc_ndcg_at_1_diff1 value: 61.48979902647692 - type: nauc_ndcg_at_1_max value: 43.1938079066948 - type: nauc_ndcg_at_1_std value: -15.808138277440465 - type: nauc_ndcg_at_20_diff1 value: 46.51116172390955 - type: nauc_ndcg_at_20_max value: 35.36362650568298 - type: nauc_ndcg_at_20_std value: -12.849406209182826 - type: nauc_ndcg_at_3_diff1 value: 47.39832263785871 - type: nauc_ndcg_at_3_max value: 35.67466264628456 - type: nauc_ndcg_at_3_std value: -17.257717349296943 - type: nauc_ndcg_at_5_diff1 value: 45.91049493804232 - type: nauc_ndcg_at_5_max value: 33.8405091138445 - type: nauc_ndcg_at_5_std value: -17.477069902735895 - type: nauc_precision_at_1000_diff1 value: -12.037873000917767 - type: nauc_precision_at_1000_max value: 26.043220150002295 - type: nauc_precision_at_1000_std value: 6.84910668321572 - type: nauc_precision_at_100_diff1 value: -9.383403459051864 - type: nauc_precision_at_100_max value: 29.68713170610003 - type: nauc_precision_at_100_std value: 10.079531587056152 - type: nauc_precision_at_10_diff1 value: 3.3433323353925135 - type: nauc_precision_at_10_max value: 38.31790111725993 - type: nauc_precision_at_10_std value: 0.7888123304710856 - type: nauc_precision_at_1_diff1 value: 61.48979902647692 - type: nauc_precision_at_1_max value: 43.1938079066948 - type: nauc_precision_at_1_std value: -15.808138277440465 - type: nauc_precision_at_20_diff1 value: -2.083500986294448 - type: nauc_precision_at_20_max value: 35.77143835726343 - type: nauc_precision_at_20_std value: 5.318547021874003 - type: nauc_precision_at_3_diff1 value: 23.335617788912586 - type: nauc_precision_at_3_max value: 39.81973275320871 - type: nauc_precision_at_3_std value: -8.442769390555561 - type: nauc_precision_at_5_diff1 value: 11.521087842589482 - type: nauc_precision_at_5_max value: 39.527792539828255 - type: nauc_precision_at_5_std value: -5.412729503701626 - type: nauc_recall_at_1000_diff1 value: 10.6830893047453 - type: nauc_recall_at_1000_max value: 8.834504311238423 - type: nauc_recall_at_1000_std value: 24.670754304859692 - type: nauc_recall_at_100_diff1 value: 20.646020385527358 - type: nauc_recall_at_100_max value: 20.121595011523294 - type: nauc_recall_at_100_std value: 19.42307459311791 - type: nauc_recall_at_10_diff1 value: 33.01029313733417 - type: nauc_recall_at_10_max value: 27.948634980368702 - type: nauc_recall_at_10_std value: -10.239767371462975 - type: nauc_recall_at_1_diff1 value: 49.94952823753276 - type: nauc_recall_at_1_max value: 13.770377574254548 - type: nauc_recall_at_1_std value: -14.946357968858653 - type: nauc_recall_at_20_diff1 value: 30.040111045267963 - type: nauc_recall_at_20_max value: 25.984919302418184 - type: nauc_recall_at_20_std value: -1.4998001817460804 - type: nauc_recall_at_3_diff1 value: 42.24410559113653 - type: nauc_recall_at_3_max value: 20.269503583626914 - type: nauc_recall_at_3_std value: -17.09578532600584 - type: nauc_recall_at_5_diff1 value: 36.124149735848945 - type: nauc_recall_at_5_max value: 22.708022306002622 - type: nauc_recall_at_5_std value: -16.966976847236193 - type: ndcg_at_1 value: 60.031 - type: ndcg_at_10 value: 60.480000000000004 - type: ndcg_at_100 value: 66.94099999999999 - type: ndcg_at_1000 value: 68.303 - type: ndcg_at_20 value: 63.536 - type: ndcg_at_3 value: 55.903999999999996 - type: ndcg_at_5 value: 57.387 - type: precision_at_1 value: 60.031 - type: precision_at_10 value: 16.682 - type: precision_at_100 value: 2.336 - type: precision_at_1000 value: 0.259 - type: precision_at_20 value: 9.66 - type: precision_at_3 value: 37.191 - type: precision_at_5 value: 27.253 - type: recall_at_1 value: 31.529 - type: recall_at_10 value: 68.035 - type: recall_at_100 value: 90.925 - type: recall_at_1000 value: 98.688 - type: recall_at_20 value: 77.453 - type: recall_at_3 value: 50.221000000000004 - type: recall_at_5 value: 58.209999999999994 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 76.67399999999999 - type: map_at_1 value: 43.822 - type: map_at_10 value: 68.82000000000001 - type: map_at_100 value: 69.659 - type: map_at_1000 value: 69.714 - type: map_at_20 value: 69.305 - type: map_at_3 value: 65.517 - type: map_at_5 value: 67.633 - type: mrr_at_1 value: 87.643484132343 - type: mrr_at_10 value: 91.28134679485098 - type: mrr_at_100 value: 91.37985230614755 - type: mrr_at_1000 value: 91.38202467630681 - type: mrr_at_20 value: 91.34718855278429 - type: mrr_at_3 value: 90.75849651136599 - type: mrr_at_5 value: 91.10961062345235 - type: nauc_map_at_1000_diff1 value: 3.7670405082837477 - type: nauc_map_at_1000_max value: 14.410594409695182 - type: nauc_map_at_1000_std value: 7.94738583292685 - type: nauc_map_at_100_diff1 value: 3.738796209193936 - type: nauc_map_at_100_max value: 14.408029101534694 - type: nauc_map_at_100_std value: 7.979641077687816 - type: nauc_map_at_10_diff1 value: 3.334917978089454 - type: nauc_map_at_10_max value: 13.975255289147748 - type: nauc_map_at_10_std value: 7.491959628012161 - type: nauc_map_at_1_diff1 value: 75.35066482050009 - type: nauc_map_at_1_max value: 53.573503488571475 - type: nauc_map_at_1_std value: -6.542030594426993 - type: nauc_map_at_20_diff1 value: 3.5197129341582083 - type: nauc_map_at_20_max value: 14.159880698006816 - type: nauc_map_at_20_std value: 7.856574384998483 - type: nauc_map_at_3_diff1 value: 3.0992333232864064 - type: nauc_map_at_3_max value: 12.513959281222112 - type: nauc_map_at_3_std value: 4.352912866014865 - type: nauc_map_at_5_diff1 value: 3.0351688998572537 - type: nauc_map_at_5_max value: 13.21599457624529 - type: nauc_map_at_5_std value: 6.246882983214777 - type: nauc_mrr_at_1000_diff1 value: 75.23953736361132 - type: nauc_mrr_at_1000_max value: 56.64260717262164 - type: nauc_mrr_at_1000_std value: -4.865932053762276 - type: nauc_mrr_at_100_diff1 value: 75.24091372816497 - type: nauc_mrr_at_100_max value: 56.64831104504846 - type: nauc_mrr_at_100_std value: -4.850966297943324 - type: nauc_mrr_at_10_diff1 value: 75.26540178053416 - type: nauc_mrr_at_10_max value: 56.828755673428965 - type: nauc_mrr_at_10_std value: -4.8401126970944635 - type: nauc_mrr_at_1_diff1 value: 75.35066482050009 - type: nauc_mrr_at_1_max value: 53.573503488571475 - type: nauc_mrr_at_1_std value: -6.542030594426993 - type: nauc_mrr_at_20_diff1 value: 75.24453050729845 - type: nauc_mrr_at_20_max value: 56.69220588401435 - type: nauc_mrr_at_20_std value: -4.843700730832108 - type: nauc_mrr_at_3_diff1 value: 74.98411648336175 - type: nauc_mrr_at_3_max value: 56.766537573537114 - type: nauc_mrr_at_3_std value: -4.909712671649337 - type: nauc_mrr_at_5_diff1 value: 75.20599020991028 - type: nauc_mrr_at_5_max value: 56.64236207782237 - type: nauc_mrr_at_5_std value: -5.208907367513977 - type: nauc_ndcg_at_1000_diff1 value: 11.48307079099774 - type: nauc_ndcg_at_1000_max value: 20.893326881675176 - type: nauc_ndcg_at_1000_std value: 10.43489838692119 - type: nauc_ndcg_at_100_diff1 value: 10.395588735754927 - type: nauc_ndcg_at_100_max value: 20.529573302516912 - type: nauc_ndcg_at_100_std value: 11.252973083654268 - type: nauc_ndcg_at_10_diff1 value: 8.596739352741972 - type: nauc_ndcg_at_10_max value: 18.475863682540673 - type: nauc_ndcg_at_10_std value: 9.175831033463352 - type: nauc_ndcg_at_1_diff1 value: 75.35066482050009 - type: nauc_ndcg_at_1_max value: 53.573503488571475 - type: nauc_ndcg_at_1_std value: -6.542030594426993 - type: nauc_ndcg_at_20_diff1 value: 8.998033972471749 - type: nauc_ndcg_at_20_max value: 18.892085875404522 - type: nauc_ndcg_at_20_std value: 10.3241608901084 - type: nauc_ndcg_at_3_diff1 value: 8.796384949533579 - type: nauc_ndcg_at_3_max value: 16.515261419885274 - type: nauc_ndcg_at_3_std value: 4.081902976576701 - type: nauc_ndcg_at_5_diff1 value: 8.277259464605025 - type: nauc_ndcg_at_5_max value: 17.163053202909527 - type: nauc_ndcg_at_5_std value: 6.652669449704474 - type: nauc_precision_at_1000_diff1 value: -3.490556596304827 - type: nauc_precision_at_1000_max value: 31.0473259001597 - type: nauc_precision_at_1000_std value: 52.36921397692622 - type: nauc_precision_at_100_diff1 value: -6.420747959222489 - type: nauc_precision_at_100_max value: 20.555887056005936 - type: nauc_precision_at_100_std value: 36.119132870798495 - type: nauc_precision_at_10_diff1 value: -6.461726057290426 - type: nauc_precision_at_10_max value: 12.161081825341915 - type: nauc_precision_at_10_std value: 17.961318451839993 - type: nauc_precision_at_1_diff1 value: 75.35066482050009 - type: nauc_precision_at_1_max value: 53.573503488571475 - type: nauc_precision_at_1_std value: -6.542030594426993 - type: nauc_precision_at_20_diff1 value: -7.361461296416161 - type: nauc_precision_at_20_max value: 12.663621261696733 - type: nauc_precision_at_20_std value: 23.312476851670286 - type: nauc_precision_at_3_diff1 value: -3.299056912774522 - type: nauc_precision_at_3_max value: 9.85602375812038 - type: nauc_precision_at_3_std value: 6.4962782003155475 - type: nauc_precision_at_5_diff1 value: -5.3155827772027795 - type: nauc_precision_at_5_max value: 10.32907751171833 - type: nauc_precision_at_5_std value: 11.384098087196932 - type: nauc_recall_at_1000_diff1 value: -3.4905565963043332 - type: nauc_recall_at_1000_max value: 31.04732590016041 - type: nauc_recall_at_1000_std value: 52.36921397692641 - type: nauc_recall_at_100_diff1 value: -6.420747959222586 - type: nauc_recall_at_100_max value: 20.55588705600596 - type: nauc_recall_at_100_std value: 36.11913287079825 - type: nauc_recall_at_10_diff1 value: -6.461726057290347 - type: nauc_recall_at_10_max value: 12.161081825342022 - type: nauc_recall_at_10_std value: 17.96131845184002 - type: nauc_recall_at_1_diff1 value: 75.35066482050009 - type: nauc_recall_at_1_max value: 53.573503488571475 - type: nauc_recall_at_1_std value: -6.542030594426993 - type: nauc_recall_at_20_diff1 value: -7.361461296416054 - type: nauc_recall_at_20_max value: 12.66362126169679 - type: nauc_recall_at_20_std value: 23.312476851670382 - type: nauc_recall_at_3_diff1 value: -3.2990569127745886 - type: nauc_recall_at_3_max value: 9.856023758120296 - type: nauc_recall_at_3_std value: 6.496278200315444 - type: nauc_recall_at_5_diff1 value: -5.315582777202729 - type: nauc_recall_at_5_max value: 10.329077511718229 - type: nauc_recall_at_5_std value: 11.384098087196932 - type: ndcg_at_1 value: 87.643 - type: ndcg_at_10 value: 76.67399999999999 - type: ndcg_at_100 value: 79.462 - type: ndcg_at_1000 value: 80.43599999999999 - type: ndcg_at_20 value: 77.83 - type: ndcg_at_3 value: 72.256 - type: ndcg_at_5 value: 74.789 - type: precision_at_1 value: 87.643 - type: precision_at_10 value: 15.726999999999999 - type: precision_at_100 value: 1.791 - type: precision_at_1000 value: 0.192 - type: precision_at_20 value: 8.236 - type: precision_at_3 value: 45.919 - type: precision_at_5 value: 29.558 - type: recall_at_1 value: 43.822 - type: recall_at_10 value: 78.636 - type: recall_at_100 value: 89.527 - type: recall_at_1000 value: 95.868 - type: recall_at_20 value: 82.363 - type: recall_at_3 value: 68.879 - type: recall_at_5 value: 73.896 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.6608 - type: ap value: 95.14657820401189 - type: ap_weighted value: 95.14657820401189 - type: f1 value: 96.66029695623422 - type: f1_weighted value: 96.66029695623423 - type: main_score value: 96.6608 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 45.217 - type: map_at_1 value: 24.728 - type: map_at_10 value: 37.933 - type: map_at_100 value: 39.074999999999996 - type: map_at_1000 value: 39.115 - type: map_at_20 value: 38.663 - type: map_at_3 value: 33.904 - type: map_at_5 value: 36.217 - type: mrr_at_1 value: 25.44412607449857 - type: mrr_at_10 value: 38.52640196479737 - type: mrr_at_100 value: 39.60462889736067 - type: mrr_at_1000 value: 39.638904296248526 - type: mrr_at_20 value: 39.2234365827559 - type: mrr_at_3 value: 34.59646609360076 - type: mrr_at_5 value: 36.8801337153773 - type: nauc_map_at_1000_diff1 value: 37.645652178132174 - type: nauc_map_at_1000_max value: 9.953357023361367 - type: nauc_map_at_1000_std value: -20.800238036721503 - type: nauc_map_at_100_diff1 value: 37.643073495974555 - type: nauc_map_at_100_max value: 9.95921239641703 - type: nauc_map_at_100_std value: -20.76517765535793 - type: nauc_map_at_10_diff1 value: 37.44380763335014 - type: nauc_map_at_10_max value: 9.917273043055342 - type: nauc_map_at_10_std value: -21.467951225710898 - type: nauc_map_at_1_diff1 value: 41.02118887981969 - type: nauc_map_at_1_max value: 8.301113449711778 - type: nauc_map_at_1_std value: -19.436814224415027 - type: nauc_map_at_20_diff1 value: 37.58156586490493 - type: nauc_map_at_20_max value: 9.972927967610659 - type: nauc_map_at_20_std value: -20.951374218839387 - type: nauc_map_at_3_diff1 value: 37.67246795684178 - type: nauc_map_at_3_max value: 9.307031378909478 - type: nauc_map_at_3_std value: -21.77026217965021 - type: nauc_map_at_5_diff1 value: 37.39086482095963 - type: nauc_map_at_5_max value: 9.732739107368566 - type: nauc_map_at_5_std value: -21.8424296893692 - type: nauc_mrr_at_1000_diff1 value: 37.36666719603192 - type: nauc_mrr_at_1000_max value: 9.79040465289953 - type: nauc_mrr_at_1000_std value: -20.590147245965568 - type: nauc_mrr_at_100_diff1 value: 37.36560296629318 - type: nauc_mrr_at_100_max value: 9.798113710672162 - type: nauc_mrr_at_100_std value: -20.556791838504292 - type: nauc_mrr_at_10_diff1 value: 37.19257605840734 - type: nauc_mrr_at_10_max value: 9.749429811638063 - type: nauc_mrr_at_10_std value: -21.206407664327276 - type: nauc_mrr_at_1_diff1 value: 40.98478651095172 - type: nauc_mrr_at_1_max value: 8.173841799119707 - type: nauc_mrr_at_1_std value: -19.530027987868017 - type: nauc_mrr_at_20_diff1 value: 37.29973172861245 - type: nauc_mrr_at_20_max value: 9.815127660001345 - type: nauc_mrr_at_20_std value: -20.700860112175928 - type: nauc_mrr_at_3_diff1 value: 37.282848009425734 - type: nauc_mrr_at_3_max value: 9.172741713108193 - type: nauc_mrr_at_3_std value: -21.563630513502996 - type: nauc_mrr_at_5_diff1 value: 37.08609827303586 - type: nauc_mrr_at_5_max value: 9.604643424273284 - type: nauc_mrr_at_5_std value: -21.580110806494094 - type: nauc_ndcg_at_1000_diff1 value: 37.086587020218545 - type: nauc_ndcg_at_1000_max value: 10.696860688467472 - type: nauc_ndcg_at_1000_std value: -19.50989939916873 - type: nauc_ndcg_at_100_diff1 value: 37.03794531268128 - type: nauc_ndcg_at_100_max value: 10.940820719182339 - type: nauc_ndcg_at_100_std value: -18.28651832370893 - type: nauc_ndcg_at_10_diff1 value: 36.21062857920633 - type: nauc_ndcg_at_10_max value: 10.845172882571733 - type: nauc_ndcg_at_10_std value: -21.454301679510106 - type: nauc_ndcg_at_1_diff1 value: 40.98478651095172 - type: nauc_ndcg_at_1_max value: 8.173841799119707 - type: nauc_ndcg_at_1_std value: -19.530027987868017 - type: nauc_ndcg_at_20_diff1 value: 36.583262733100526 - type: nauc_ndcg_at_20_max value: 11.10492720898974 - type: nauc_ndcg_at_20_std value: -19.41753284137609 - type: nauc_ndcg_at_3_diff1 value: 36.57271365035382 - type: nauc_ndcg_at_3_max value: 9.56073433062999 - type: nauc_ndcg_at_3_std value: -22.324263670932915 - type: nauc_ndcg_at_5_diff1 value: 36.09419372820154 - type: nauc_ndcg_at_5_max value: 10.357384992631271 - type: nauc_ndcg_at_5_std value: -22.389578276324894 - type: nauc_precision_at_1000_diff1 value: -2.7435338714030597 - type: nauc_precision_at_1000_max value: 4.302274933383809 - type: nauc_precision_at_1000_std value: 8.456846348638948 - type: nauc_precision_at_100_diff1 value: 15.149466332615983 - type: nauc_precision_at_100_max value: 12.501013731673163 - type: nauc_precision_at_100_std value: 15.909667509021785 - type: nauc_precision_at_10_diff1 value: 28.699788688314214 - type: nauc_precision_at_10_max value: 13.024586051842347 - type: nauc_precision_at_10_std value: -19.197658937078703 - type: nauc_precision_at_1_diff1 value: 40.98478651095172 - type: nauc_precision_at_1_max value: 8.173841799119707 - type: nauc_precision_at_1_std value: -19.530027987868017 - type: nauc_precision_at_20_diff1 value: 26.519292942353395 - type: nauc_precision_at_20_max value: 14.389979272056438 - type: nauc_precision_at_20_std value: -7.030956994938155 - type: nauc_precision_at_3_diff1 value: 32.87913492278213 - type: nauc_precision_at_3_max value: 9.673660161387776 - type: nauc_precision_at_3_std value: -23.905612656592172 - type: nauc_precision_at_5_diff1 value: 30.903850113238597 - type: nauc_precision_at_5_max value: 11.482375434154898 - type: nauc_precision_at_5_std value: -23.828657095254247 - type: nauc_recall_at_1000_diff1 value: 35.80765639589219 - type: nauc_recall_at_1000_max value: 50.94532805969448 - type: nauc_recall_at_1000_std value: 66.79910877083275 - type: nauc_recall_at_100_diff1 value: 34.96182828311028 - type: nauc_recall_at_100_max value: 21.729699631790556 - type: nauc_recall_at_100_std value: 23.509439011686474 - type: nauc_recall_at_10_diff1 value: 31.88371369567137 - type: nauc_recall_at_10_max value: 14.425389702697073 - type: nauc_recall_at_10_std value: -20.95578001880924 - type: nauc_recall_at_1_diff1 value: 41.02118887981969 - type: nauc_recall_at_1_max value: 8.301113449711778 - type: nauc_recall_at_1_std value: -19.436814224415027 - type: nauc_recall_at_20_diff1 value: 32.42718780622455 - type: nauc_recall_at_20_max value: 16.90686126329399 - type: nauc_recall_at_20_std value: -9.38158227016737 - type: nauc_recall_at_3_diff1 value: 33.68966646043966 - type: nauc_recall_at_3_max value: 10.336277419708532 - type: nauc_recall_at_3_std value: -23.80165869168538 - type: nauc_recall_at_5_diff1 value: 32.26258807452426 - type: nauc_recall_at_5_max value: 12.303713005399935 - type: nauc_recall_at_5_std value: -23.87721891164968 - type: ndcg_at_1 value: 25.444 - type: ndcg_at_10 value: 45.217 - type: ndcg_at_100 value: 50.575 - type: ndcg_at_1000 value: 51.519999999999996 - type: ndcg_at_20 value: 47.786 - type: ndcg_at_3 value: 37.067 - type: ndcg_at_5 value: 41.184 - type: precision_at_1 value: 25.444 - type: precision_at_10 value: 7.07 - type: precision_at_100 value: 0.9730000000000001 - type: precision_at_1000 value: 0.106 - type: precision_at_20 value: 4.072 - type: precision_at_3 value: 15.754999999999999 - type: precision_at_5 value: 11.544 - type: recall_at_1 value: 24.728 - type: recall_at_10 value: 67.607 - type: recall_at_100 value: 92.094 - type: recall_at_1000 value: 99.165 - type: recall_at_20 value: 77.529 - type: recall_at_3 value: 45.535 - type: recall_at_5 value: 55.394 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.01276789785682 - type: f1 value: 98.9288649250924 - type: f1_weighted value: 99.01406884928141 - type: main_score value: 99.01276789785682 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 92.78385772913816 - type: f1 value: 79.78115704297824 - type: f1_weighted value: 93.90424147486428 - type: main_score value: 92.78385772913816 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 85.83053127101546 - type: f1 value: 82.72036139888232 - type: f1_weighted value: 85.81759723866098 - type: main_score value: 85.83053127101546 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 90.19838601210489 - type: f1 value: 89.55260197964978 - type: f1_weighted value: 90.11422965504119 - type: main_score value: 90.19838601210489 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 46.866746897607094 - type: v_measure value: 46.866746897607094 - type: v_measure_std value: 1.0966477896919726 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 44.6538827415503 - type: v_measure value: 44.6538827415503 - type: v_measure_std value: 1.1649569936599116 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 33.05449204940555 - type: map value: 33.05449204940555 - type: mrr value: 34.32562058439585 - type: nAUC_map_diff1 value: 11.465656013162807 - type: nAUC_map_max value: -20.400088169502308 - type: nAUC_map_std value: -2.638964886362445 - type: nAUC_mrr_diff1 value: 10.644290702481207 - type: nAUC_mrr_max value: -15.304687384645769 - type: nAUC_mrr_std value: -0.519919931348978 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 41.998000000000005 - type: map_at_1 value: 6.907000000000001 - type: map_at_10 value: 16.397000000000002 - type: map_at_100 value: 21.69 - type: map_at_1000 value: 23.652 - type: map_at_20 value: 18.629 - type: map_at_3 value: 11.969000000000001 - type: map_at_5 value: 13.894 - type: mrr_at_1 value: 53.25077399380805 - type: mrr_at_10 value: 61.8561108653988 - type: mrr_at_100 value: 62.42447851935404 - type: mrr_at_1000 value: 62.459626424428095 - type: mrr_at_20 value: 62.287236389990696 - type: mrr_at_3 value: 60.42311661506711 - type: mrr_at_5 value: 61.36738906088753 - type: nauc_map_at_1000_diff1 value: 17.159461939643844 - type: nauc_map_at_1000_max value: 32.42764938789903 - type: nauc_map_at_1000_std value: 11.039427848422093 - type: nauc_map_at_100_diff1 value: 19.089532984187503 - type: nauc_map_at_100_max value: 31.96721085058713 - type: nauc_map_at_100_std value: 6.947468655726444 - type: nauc_map_at_10_diff1 value: 25.77255342629802 - type: nauc_map_at_10_max value: 26.163590320961543 - type: nauc_map_at_10_std value: -5.2588093720998375 - type: nauc_map_at_1_diff1 value: 46.31602607957798 - type: nauc_map_at_1_max value: 11.807757660801942 - type: nauc_map_at_1_std value: -13.984889089354317 - type: nauc_map_at_20_diff1 value: 22.308161130465365 - type: nauc_map_at_20_max value: 29.070587307827722 - type: nauc_map_at_20_std value: -1.0103056620851558 - type: nauc_map_at_3_diff1 value: 33.580827849617506 - type: nauc_map_at_3_max value: 17.661630885799042 - type: nauc_map_at_3_std value: -11.463282544041888 - type: nauc_map_at_5_diff1 value: 30.32603342696912 - type: nauc_map_at_5_max value: 20.938905485667245 - type: nauc_map_at_5_std value: -10.537086968155755 - type: nauc_mrr_at_1000_diff1 value: 24.45065397805829 - type: nauc_mrr_at_1000_max value: 48.17519860927417 - type: nauc_mrr_at_1000_std value: 30.350767549118903 - type: nauc_mrr_at_100_diff1 value: 24.444061606534486 - type: nauc_mrr_at_100_max value: 48.1922894212229 - type: nauc_mrr_at_100_std value: 30.379257816584094 - type: nauc_mrr_at_10_diff1 value: 24.25598717198779 - type: nauc_mrr_at_10_max value: 48.10437607774264 - type: nauc_mrr_at_10_std value: 30.090202482685996 - type: nauc_mrr_at_1_diff1 value: 26.907595285201264 - type: nauc_mrr_at_1_max value: 44.006974050369955 - type: nauc_mrr_at_1_std value: 26.921001962861062 - type: nauc_mrr_at_20_diff1 value: 24.462771570553738 - type: nauc_mrr_at_20_max value: 48.264688196799746 - type: nauc_mrr_at_20_std value: 30.498095141265914 - type: nauc_mrr_at_3_diff1 value: 24.76829388237229 - type: nauc_mrr_at_3_max value: 48.213758704739924 - type: nauc_mrr_at_3_std value: 30.1502853918892 - type: nauc_mrr_at_5_diff1 value: 24.476494932330247 - type: nauc_mrr_at_5_max value: 47.977250552198804 - type: nauc_mrr_at_5_std value: 29.65248143104835 - type: nauc_ndcg_at_1000_diff1 value: 13.055818920426246 - type: nauc_ndcg_at_1000_max value: 46.00986444256306 - type: nauc_ndcg_at_1000_std value: 29.622662054922085 - type: nauc_ndcg_at_100_diff1 value: 12.260551238228816 - type: nauc_ndcg_at_100_max value: 39.89783048267698 - type: nauc_ndcg_at_100_std value: 23.806961617956613 - type: nauc_ndcg_at_10_diff1 value: 11.002915931619567 - type: nauc_ndcg_at_10_max value: 39.79323759244374 - type: nauc_ndcg_at_10_std value: 23.053072152911046 - type: nauc_ndcg_at_1_diff1 value: 27.560910719974434 - type: nauc_ndcg_at_1_max value: 41.21084046258119 - type: nauc_ndcg_at_1_std value: 26.112891742912893 - type: nauc_ndcg_at_20_diff1 value: 10.085854089024496 - type: nauc_ndcg_at_20_max value: 37.88629173784684 - type: nauc_ndcg_at_20_std value: 23.17664322248358 - type: nauc_ndcg_at_3_diff1 value: 16.58969583405987 - type: nauc_ndcg_at_3_max value: 41.282222954101435 - type: nauc_ndcg_at_3_std value: 21.080670648392747 - type: nauc_ndcg_at_5_diff1 value: 13.893127947909885 - type: nauc_ndcg_at_5_max value: 40.21188015992804 - type: nauc_ndcg_at_5_std value: 21.417443978842652 - type: nauc_precision_at_1000_diff1 value: -17.227504530334564 - type: nauc_precision_at_1000_max value: 3.798554468439066 - type: nauc_precision_at_1000_std value: 35.73617809452683 - type: nauc_precision_at_100_diff1 value: -17.63388230218776 - type: nauc_precision_at_100_max value: 15.079399882407094 - type: nauc_precision_at_100_std value: 41.83698491321226 - type: nauc_precision_at_10_diff1 value: -11.850925959645156 - type: nauc_precision_at_10_max value: 35.93283968364352 - type: nauc_precision_at_10_std value: 34.391271855921296 - type: nauc_precision_at_1_diff1 value: 27.730860778824823 - type: nauc_precision_at_1_max value: 43.97462471516834 - type: nauc_precision_at_1_std value: 27.491068270978896 - type: nauc_precision_at_20_diff1 value: -14.281328840943347 - type: nauc_precision_at_20_max value: 29.469099781759006 - type: nauc_precision_at_20_std value: 38.54703022340941 - type: nauc_precision_at_3_diff1 value: 3.486986910413196 - type: nauc_precision_at_3_max value: 41.21107780473768 - type: nauc_precision_at_3_std value: 24.057479124531216 - type: nauc_precision_at_5_diff1 value: -3.0623787872866233 - type: nauc_precision_at_5_max value: 37.49266386466702 - type: nauc_precision_at_5_std value: 26.894454268004935 - type: nauc_recall_at_1000_diff1 value: -2.446891864334283 - type: nauc_recall_at_1000_max value: 23.867293584643377 - type: nauc_recall_at_1000_std value: 16.34707128224595 - type: nauc_recall_at_100_diff1 value: 4.891133690841179 - type: nauc_recall_at_100_max value: 24.56727964996522 - type: nauc_recall_at_100_std value: 9.847212953200797 - type: nauc_recall_at_10_diff1 value: 19.211912363585288 - type: nauc_recall_at_10_max value: 24.825344777920737 - type: nauc_recall_at_10_std value: -5.447989195041898 - type: nauc_recall_at_1_diff1 value: 46.31602607957798 - type: nauc_recall_at_1_max value: 11.807757660801942 - type: nauc_recall_at_1_std value: -13.984889089354317 - type: nauc_recall_at_20_diff1 value: 12.233372054304805 - type: nauc_recall_at_20_max value: 22.284108685207148 - type: nauc_recall_at_20_std value: -4.317138366746209 - type: nauc_recall_at_3_diff1 value: 28.394631527225815 - type: nauc_recall_at_3_max value: 15.593864852625462 - type: nauc_recall_at_3_std value: -12.383531804314593 - type: nauc_recall_at_5_diff1 value: 24.457441304950343 - type: nauc_recall_at_5_max value: 19.080049396281623 - type: nauc_recall_at_5_std value: -11.879747703626627 - type: ndcg_at_1 value: 51.548 - type: ndcg_at_10 value: 41.998000000000005 - type: ndcg_at_100 value: 39.626 - type: ndcg_at_1000 value: 48.707 - type: ndcg_at_20 value: 40.181 - type: ndcg_at_3 value: 48.06 - type: ndcg_at_5 value: 45.829 - type: precision_at_1 value: 52.941 - type: precision_at_10 value: 31.330999999999996 - type: precision_at_100 value: 10.421 - type: precision_at_1000 value: 2.428 - type: precision_at_20 value: 24.118000000000002 - type: precision_at_3 value: 45.408 - type: precision_at_5 value: 39.938 - type: recall_at_1 value: 6.907000000000001 - type: recall_at_10 value: 20.51 - type: recall_at_100 value: 40.857 - type: recall_at_1000 value: 73.616 - type: recall_at_20 value: 26.52 - type: recall_at_3 value: 13.267999999999999 - type: recall_at_5 value: 16.141 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 71.8 - type: map_at_1 value: 47.629 - type: map_at_10 value: 64.846 - type: map_at_100 value: 65.40899999999999 - type: map_at_1000 value: 65.416 - type: map_at_20 value: 65.239 - type: map_at_3 value: 61.185 - type: map_at_5 value: 63.583 - type: mrr_at_1 value: 53.15758980301275 - type: mrr_at_10 value: 67.12880961577366 - type: mrr_at_100 value: 67.44006405426018 - type: mrr_at_1000 value: 67.44519150402294 - type: mrr_at_20 value: 67.34317135515428 - type: mrr_at_3 value: 64.5905755117805 - type: mrr_at_5 value: 66.24613750482806 - type: nauc_map_at_1000_diff1 value: 45.73812106517133 - type: nauc_map_at_1000_max value: 35.21262031755756 - type: nauc_map_at_1000_std value: -5.549443574026027 - type: nauc_map_at_100_diff1 value: 45.74254652176879 - type: nauc_map_at_100_max value: 35.22349167515518 - type: nauc_map_at_100_std value: -5.53697496044773 - type: nauc_map_at_10_diff1 value: 45.62837128377087 - type: nauc_map_at_10_max value: 35.3261562342222 - type: nauc_map_at_10_std value: -5.761924414031163 - type: nauc_map_at_1_diff1 value: 48.69187848570499 - type: nauc_map_at_1_max value: 28.687996096473476 - type: nauc_map_at_1_std value: -7.518605958272523 - type: nauc_map_at_20_diff1 value: 45.702303442220035 - type: nauc_map_at_20_max value: 35.30719944705456 - type: nauc_map_at_20_std value: -5.59505654742681 - type: nauc_map_at_3_diff1 value: 45.376813726832474 - type: nauc_map_at_3_max value: 34.68452149643597 - type: nauc_map_at_3_std value: -7.329014950379634 - type: nauc_map_at_5_diff1 value: 45.29528861989316 - type: nauc_map_at_5_max value: 35.35741440869229 - type: nauc_map_at_5_std value: -6.028788612259288 - type: nauc_mrr_at_1000_diff1 value: 46.11808147912517 - type: nauc_mrr_at_1000_max value: 35.59241850411947 - type: nauc_mrr_at_1000_std value: -3.4072428526109317 - type: nauc_mrr_at_100_diff1 value: 46.121345545514046 - type: nauc_mrr_at_100_max value: 35.60147795073431 - type: nauc_mrr_at_100_std value: -3.3965322447588826 - type: nauc_mrr_at_10_diff1 value: 46.0920068210502 - type: nauc_mrr_at_10_max value: 35.79649987854354 - type: nauc_mrr_at_10_std value: -3.339624589368137 - type: nauc_mrr_at_1_diff1 value: 49.101364605656194 - type: nauc_mrr_at_1_max value: 31.500796071482146 - type: nauc_mrr_at_1_std value: -4.183818500718156 - type: nauc_mrr_at_20_diff1 value: 46.088076630465594 - type: nauc_mrr_at_20_max value: 35.682131663053205 - type: nauc_mrr_at_20_std value: -3.35939023178519 - type: nauc_mrr_at_3_diff1 value: 45.47570812708642 - type: nauc_mrr_at_3_max value: 35.741892517632984 - type: nauc_mrr_at_3_std value: -4.135335963822013 - type: nauc_mrr_at_5_diff1 value: 45.78903474184014 - type: nauc_mrr_at_5_max value: 35.91273593700205 - type: nauc_mrr_at_5_std value: -3.467873421286869 - type: nauc_ndcg_at_1000_diff1 value: 45.5056583000012 - type: nauc_ndcg_at_1000_max value: 36.34328379251593 - type: nauc_ndcg_at_1000_std value: -4.0759698229323345 - type: nauc_ndcg_at_100_diff1 value: 45.61918946477166 - type: nauc_ndcg_at_100_max value: 36.675460335836235 - type: nauc_ndcg_at_100_std value: -3.6795334726235986 - type: nauc_ndcg_at_10_diff1 value: 45.15343994274541 - type: nauc_ndcg_at_10_max value: 37.48139242964657 - type: nauc_ndcg_at_10_std value: -4.287039084554882 - type: nauc_ndcg_at_1_diff1 value: 49.101364605656194 - type: nauc_ndcg_at_1_max value: 31.500796071482146 - type: nauc_ndcg_at_1_std value: -4.183818500718156 - type: nauc_ndcg_at_20_diff1 value: 45.310026313402375 - type: nauc_ndcg_at_20_max value: 37.32177497902133 - type: nauc_ndcg_at_20_std value: -3.8214360391282587 - type: nauc_ndcg_at_3_diff1 value: 44.27064370528994 - type: nauc_ndcg_at_3_max value: 36.380294033571396 - type: nauc_ndcg_at_3_std value: -6.844263370898355 - type: nauc_ndcg_at_5_diff1 value: 44.29933499225583 - type: nauc_ndcg_at_5_max value: 37.46477041822136 - type: nauc_ndcg_at_5_std value: -4.866548530467956 - type: nauc_precision_at_1000_diff1 value: -14.666553359142306 - type: nauc_precision_at_1000_max value: -0.5599759853201481 - type: nauc_precision_at_1000_std value: 16.8370925526591 - type: nauc_precision_at_100_diff1 value: -11.816251306246278 - type: nauc_precision_at_100_max value: 2.969819268208207 - type: nauc_precision_at_100_std value: 18.59422946634747 - type: nauc_precision_at_10_diff1 value: 1.2050200086029401 - type: nauc_precision_at_10_max value: 17.59930352911209 - type: nauc_precision_at_10_std value: 13.714495717588985 - type: nauc_precision_at_1_diff1 value: 49.101364605656194 - type: nauc_precision_at_1_max value: 31.500796071482146 - type: nauc_precision_at_1_std value: -4.183818500718156 - type: nauc_precision_at_20_diff1 value: -5.263476664822757 - type: nauc_precision_at_20_max value: 11.42004823600046 - type: nauc_precision_at_20_std value: 16.510514518664994 - type: nauc_precision_at_3_diff1 value: 20.116460379305828 - type: nauc_precision_at_3_max value: 31.32235038301311 - type: nauc_precision_at_3_std value: 2.7486717133871923 - type: nauc_precision_at_5_diff1 value: 9.57451645335723 - type: nauc_precision_at_5_max value: 25.28449126580587 - type: nauc_precision_at_5_std value: 9.955736162466767 - type: nauc_recall_at_1000_diff1 value: -21.632253065978794 - type: nauc_recall_at_1000_max value: 70.14409090958776 - type: nauc_recall_at_1000_std value: 65.61658090892989 - type: nauc_recall_at_100_diff1 value: 51.83161124806711 - type: nauc_recall_at_100_max value: 77.49921361841523 - type: nauc_recall_at_100_std value: 48.352508746719444 - type: nauc_recall_at_10_diff1 value: 39.86695231362791 - type: nauc_recall_at_10_max value: 50.12029094799474 - type: nauc_recall_at_10_std value: 0.1650940628131058 - type: nauc_recall_at_1_diff1 value: 48.69187848570499 - type: nauc_recall_at_1_max value: 28.687996096473476 - type: nauc_recall_at_1_std value: -7.518605958272523 - type: nauc_recall_at_20_diff1 value: 39.14155398061627 - type: nauc_recall_at_20_max value: 56.78559423716229 - type: nauc_recall_at_20_std value: 7.9728224572344075 - type: nauc_recall_at_3_diff1 value: 38.69589523432158 - type: nauc_recall_at_3_max value: 39.53271258375579 - type: nauc_recall_at_3_std value: -8.646925065787512 - type: nauc_recall_at_5_diff1 value: 37.45922652959002 - type: nauc_recall_at_5_max value: 44.4911958995867 - type: nauc_recall_at_5_std value: -3.5659842556375594 - type: ndcg_at_1 value: 53.15800000000001 - type: ndcg_at_10 value: 71.8 - type: ndcg_at_100 value: 73.85199999999999 - type: ndcg_at_1000 value: 74.017 - type: ndcg_at_20 value: 72.933 - type: ndcg_at_3 value: 65.479 - type: ndcg_at_5 value: 69.182 - type: precision_at_1 value: 53.15800000000001 - type: precision_at_10 value: 10.805 - type: precision_at_100 value: 1.2 - type: precision_at_1000 value: 0.122 - type: precision_at_20 value: 5.694 - type: precision_at_3 value: 28.939999999999998 - type: precision_at_5 value: 19.641000000000002 - type: recall_at_1 value: 47.629 - type: recall_at_10 value: 90.204 - type: recall_at_100 value: 98.66 - type: recall_at_1000 value: 99.874 - type: recall_at_20 value: 94.24 - type: recall_at_3 value: 74.394 - type: recall_at_5 value: 82.711 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 90.025 - type: map_at_1 value: 72.222 - type: map_at_10 value: 86.58500000000001 - type: map_at_100 value: 87.176 - type: map_at_1000 value: 87.188 - type: map_at_20 value: 86.97399999999999 - type: map_at_3 value: 83.736 - type: map_at_5 value: 85.554 - type: mrr_at_1 value: 83.04 - type: mrr_at_10 value: 89.05599603174585 - type: mrr_at_100 value: 89.12398891419457 - type: mrr_at_1000 value: 89.12434072241001 - type: mrr_at_20 value: 89.10416280692111 - type: mrr_at_3 value: 88.23833333333312 - type: mrr_at_5 value: 88.82233333333308 - type: nauc_map_at_1000_diff1 value: 78.29348113313218 - type: nauc_map_at_1000_max value: 32.31386754277228 - type: nauc_map_at_1000_std value: -50.47543661484052 - type: nauc_map_at_100_diff1 value: 78.29618548618575 - type: nauc_map_at_100_max value: 32.301475680947846 - type: nauc_map_at_100_std value: -50.50303428814228 - type: nauc_map_at_10_diff1 value: 78.47383776440803 - type: nauc_map_at_10_max value: 31.839339990133563 - type: nauc_map_at_10_std value: -52.832713555976 - type: nauc_map_at_1_diff1 value: 82.46330147467418 - type: nauc_map_at_1_max value: 23.497664918373538 - type: nauc_map_at_1_std value: -43.824657665520704 - type: nauc_map_at_20_diff1 value: 78.34772176474422 - type: nauc_map_at_20_max value: 32.16495182893947 - type: nauc_map_at_20_std value: -51.503292726558605 - type: nauc_map_at_3_diff1 value: 79.07823813069432 - type: nauc_map_at_3_max value: 29.395911687513976 - type: nauc_map_at_3_std value: -54.16377546873304 - type: nauc_map_at_5_diff1 value: 78.73076619520454 - type: nauc_map_at_5_max value: 30.700453118585237 - type: nauc_map_at_5_std value: -54.130514177664054 - type: nauc_mrr_at_1000_diff1 value: 79.04736184471865 - type: nauc_mrr_at_1000_max value: 34.43004593837643 - type: nauc_mrr_at_1000_std value: -46.137269068195316 - type: nauc_mrr_at_100_diff1 value: 79.04698704288086 - type: nauc_mrr_at_100_max value: 34.4305553741175 - type: nauc_mrr_at_100_std value: -46.13786687786434 - type: nauc_mrr_at_10_diff1 value: 79.04490677485934 - type: nauc_mrr_at_10_max value: 34.38170181522227 - type: nauc_mrr_at_10_std value: -46.38129875681807 - type: nauc_mrr_at_1_diff1 value: 79.87159215719124 - type: nauc_mrr_at_1_max value: 34.05882339253136 - type: nauc_mrr_at_1_std value: -43.56093395137571 - type: nauc_mrr_at_20_diff1 value: 79.04384174535653 - type: nauc_mrr_at_20_max value: 34.442136494675005 - type: nauc_mrr_at_20_std value: -46.205458519638654 - type: nauc_mrr_at_3_diff1 value: 78.78154519155487 - type: nauc_mrr_at_3_max value: 34.74995000500305 - type: nauc_mrr_at_3_std value: -46.36264203155416 - type: nauc_mrr_at_5_diff1 value: 79.02631187177 - type: nauc_mrr_at_5_max value: 34.538698249632205 - type: nauc_mrr_at_5_std value: -46.468881576157465 - type: nauc_ndcg_at_1000_diff1 value: 78.25260097014645 - type: nauc_ndcg_at_1000_max value: 33.68584498704271 - type: nauc_ndcg_at_1000_std value: -48.44716779494868 - type: nauc_ndcg_at_100_diff1 value: 78.25115412256716 - type: nauc_ndcg_at_100_max value: 33.63652663447088 - type: nauc_ndcg_at_100_std value: -48.489243909024715 - type: nauc_ndcg_at_10_diff1 value: 78.23875101557334 - type: nauc_ndcg_at_10_max value: 32.65217430043823 - type: nauc_ndcg_at_10_std value: -52.57770468845309 - type: nauc_ndcg_at_1_diff1 value: 79.87159215719124 - type: nauc_ndcg_at_1_max value: 34.05882339253136 - type: nauc_ndcg_at_1_std value: -43.56093395137571 - type: nauc_ndcg_at_20_diff1 value: 78.23478552311765 - type: nauc_ndcg_at_20_max value: 33.30691737901109 - type: nauc_ndcg_at_20_std value: -50.78412614854527 - type: nauc_ndcg_at_3_diff1 value: 77.66134485470224 - type: nauc_ndcg_at_3_max value: 32.19504710373125 - type: nauc_ndcg_at_3_std value: -52.01636728550155 - type: nauc_ndcg_at_5_diff1 value: 78.04734137324255 - type: nauc_ndcg_at_5_max value: 31.94593625591248 - type: nauc_ndcg_at_5_std value: -53.02169800690546 - type: nauc_precision_at_1000_diff1 value: -45.771948123542636 - type: nauc_precision_at_1000_max value: -5.182406190477681 - type: nauc_precision_at_1000_std value: 41.14460438707817 - type: nauc_precision_at_100_diff1 value: -45.64767154261461 - type: nauc_precision_at_100_max value: -5.046308286851713 - type: nauc_precision_at_100_std value: 41.07186716587844 - type: nauc_precision_at_10_diff1 value: -42.26779562305825 - type: nauc_precision_at_10_max value: -1.1264852893323076 - type: nauc_precision_at_10_std value: 27.62275729822392 - type: nauc_precision_at_1_diff1 value: 79.87159215719124 - type: nauc_precision_at_1_max value: 34.05882339253136 - type: nauc_precision_at_1_std value: -43.56093395137571 - type: nauc_precision_at_20_diff1 value: -44.24293221128388 - type: nauc_precision_at_20_max value: -3.1345628837361867 - type: nauc_precision_at_20_std value: 34.23625492740366 - type: nauc_precision_at_3_diff1 value: -24.925251389823348 - type: nauc_precision_at_3_max value: 6.622188833369412 - type: nauc_precision_at_3_std value: 6.424741786858512 - type: nauc_precision_at_5_diff1 value: -36.1407949990387 - type: nauc_precision_at_5_max value: 1.7533948968374462 - type: nauc_precision_at_5_std value: 17.914083278982634 - type: nauc_recall_at_1000_diff1 value: 52.26815466244496 - type: nauc_recall_at_1000_max value: 69.73611104239443 - type: nauc_recall_at_1000_std value: 73.18969965863008 - type: nauc_recall_at_100_diff1 value: 70.80557513785271 - type: nauc_recall_at_100_max value: 33.333440086544556 - type: nauc_recall_at_100_std value: -38.75992366905504 - type: nauc_recall_at_10_diff1 value: 74.45948457438163 - type: nauc_recall_at_10_max value: 26.64948512428989 - type: nauc_recall_at_10_std value: -82.90334292052363 - type: nauc_recall_at_1_diff1 value: 82.46330147467418 - type: nauc_recall_at_1_max value: 23.497664918373538 - type: nauc_recall_at_1_std value: -43.824657665520704 - type: nauc_recall_at_20_diff1 value: 73.80140280887753 - type: nauc_recall_at_20_max value: 30.361616426734965 - type: nauc_recall_at_20_std value: -81.1418804447414 - type: nauc_recall_at_3_diff1 value: 75.19854736087834 - type: nauc_recall_at_3_max value: 26.12298005045584 - type: nauc_recall_at_3_std value: -63.42583714745169 - type: nauc_recall_at_5_diff1 value: 74.16423451950358 - type: nauc_recall_at_5_max value: 25.552390331018987 - type: nauc_recall_at_5_std value: -71.15891947773912 - type: ndcg_at_1 value: 83.04 - type: ndcg_at_10 value: 90.025 - type: ndcg_at_100 value: 91.006 - type: ndcg_at_1000 value: 91.061 - type: ndcg_at_20 value: 90.556 - type: ndcg_at_3 value: 87.493 - type: ndcg_at_5 value: 88.955 - type: precision_at_1 value: 83.04 - type: precision_at_10 value: 13.667000000000002 - type: precision_at_100 value: 1.542 - type: precision_at_1000 value: 0.157 - type: precision_at_20 value: 7.221 - type: precision_at_3 value: 38.433 - type: precision_at_5 value: 25.228 - type: recall_at_1 value: 72.222 - type: recall_at_10 value: 96.604 - type: recall_at_100 value: 99.786 - type: recall_at_1000 value: 99.996 - type: recall_at_20 value: 98.253 - type: recall_at_3 value: 89.276 - type: recall_at_5 value: 93.46 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 72.86492101891123 - type: v_measure value: 72.86492101891123 - type: v_measure_std value: 2.778711445144635 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 75.27316726548479 - type: v_measure value: 75.27316726548479 - type: v_measure_std value: 8.87871936725338 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 26.638 - type: map_at_1 value: 6.128 - type: map_at_10 value: 16.472 - type: map_at_100 value: 19.522000000000002 - type: map_at_1000 value: 19.898 - type: map_at_20 value: 18.098 - type: map_at_3 value: 11.283 - type: map_at_5 value: 13.771 - type: mrr_at_1 value: 30.2 - type: mrr_at_10 value: 42.621150793650735 - type: mrr_at_100 value: 43.740858712021954 - type: mrr_at_1000 value: 43.762699500220904 - type: mrr_at_20 value: 43.383639927753634 - type: mrr_at_3 value: 38.83333333333331 - type: mrr_at_5 value: 41.14833333333326 - type: nauc_map_at_1000_diff1 value: 13.13534664124808 - type: nauc_map_at_1000_max value: 29.346654566149795 - type: nauc_map_at_1000_std value: 18.08121186982413 - type: nauc_map_at_100_diff1 value: 13.098072728041538 - type: nauc_map_at_100_max value: 29.299084480697523 - type: nauc_map_at_100_std value: 17.961620202918464 - type: nauc_map_at_10_diff1 value: 14.001743720394682 - type: nauc_map_at_10_max value: 28.04128290996403 - type: nauc_map_at_10_std value: 13.744481555974716 - type: nauc_map_at_1_diff1 value: 22.1926640424872 - type: nauc_map_at_1_max value: 21.32609279586034 - type: nauc_map_at_1_std value: 6.566596302915438 - type: nauc_map_at_20_diff1 value: 13.57313142419664 - type: nauc_map_at_20_max value: 28.93840146319476 - type: nauc_map_at_20_std value: 16.50869367365676 - type: nauc_map_at_3_diff1 value: 17.707700541948462 - type: nauc_map_at_3_max value: 26.058174051376238 - type: nauc_map_at_3_std value: 9.943924560735267 - type: nauc_map_at_5_diff1 value: 17.11844492157723 - type: nauc_map_at_5_max value: 27.865247403049388 - type: nauc_map_at_5_std value: 11.372588172121546 - type: nauc_mrr_at_1000_diff1 value: 21.11248719936198 - type: nauc_mrr_at_1000_max value: 26.734172102201466 - type: nauc_mrr_at_1000_std value: 11.766121765437228 - type: nauc_mrr_at_100_diff1 value: 21.107109982277702 - type: nauc_mrr_at_100_max value: 26.741616065723267 - type: nauc_mrr_at_100_std value: 11.789802686224208 - type: nauc_mrr_at_10_diff1 value: 20.74108639793207 - type: nauc_mrr_at_10_max value: 26.920838463358333 - type: nauc_mrr_at_10_std value: 11.849217361926522 - type: nauc_mrr_at_1_diff1 value: 22.177437860573356 - type: nauc_mrr_at_1_max value: 21.88074521417754 - type: nauc_mrr_at_1_std value: 6.776011900101789 - type: nauc_mrr_at_20_diff1 value: 21.126633710175994 - type: nauc_mrr_at_20_max value: 26.860736480370974 - type: nauc_mrr_at_20_std value: 11.815411633726338 - type: nauc_mrr_at_3_diff1 value: 21.689245200066466 - type: nauc_mrr_at_3_max value: 26.187305092831625 - type: nauc_mrr_at_3_std value: 10.895380313134332 - type: nauc_mrr_at_5_diff1 value: 20.898811082479778 - type: nauc_mrr_at_5_max value: 26.939217247104036 - type: nauc_mrr_at_5_std value: 11.77832949822472 - type: nauc_ndcg_at_1000_diff1 value: 13.251184947898546 - type: nauc_ndcg_at_1000_max value: 30.879594164526146 - type: nauc_ndcg_at_1000_std value: 23.125206047366625 - type: nauc_ndcg_at_100_diff1 value: 12.549100649053676 - type: nauc_ndcg_at_100_max value: 30.634680845419123 - type: nauc_ndcg_at_100_std value: 23.296226055422984 - type: nauc_ndcg_at_10_diff1 value: 14.475144549294322 - type: nauc_ndcg_at_10_max value: 29.450349815417336 - type: nauc_ndcg_at_10_std value: 15.94068314781612 - type: nauc_ndcg_at_1_diff1 value: 22.177437860573356 - type: nauc_ndcg_at_1_max value: 21.88074521417754 - type: nauc_ndcg_at_1_std value: 6.776011900101789 - type: nauc_ndcg_at_20_diff1 value: 14.173669585802266 - type: nauc_ndcg_at_20_max value: 30.475890854725 - type: nauc_ndcg_at_20_std value: 19.863898148221704 - type: nauc_ndcg_at_3_diff1 value: 18.93971261196868 - type: nauc_ndcg_at_3_max value: 27.3707298720736 - type: nauc_ndcg_at_3_std value: 11.439810510051224 - type: nauc_ndcg_at_5_diff1 value: 17.89535958094687 - type: nauc_ndcg_at_5_max value: 29.272740466638425 - type: nauc_ndcg_at_5_std value: 13.402467626635909 - type: nauc_precision_at_1000_diff1 value: -3.811547048784123 - type: nauc_precision_at_1000_max value: 22.55165337197117 - type: nauc_precision_at_1000_std value: 35.98524999650108 - type: nauc_precision_at_100_diff1 value: 0.6474234774922896 - type: nauc_precision_at_100_max value: 25.06920726527032 - type: nauc_precision_at_100_std value: 32.31439698982313 - type: nauc_precision_at_10_diff1 value: 7.943127218139508 - type: nauc_precision_at_10_max value: 28.571937636787197 - type: nauc_precision_at_10_std value: 18.8472620918488 - type: nauc_precision_at_1_diff1 value: 22.177437860573356 - type: nauc_precision_at_1_max value: 21.88074521417754 - type: nauc_precision_at_1_std value: 6.776011900101789 - type: nauc_precision_at_20_diff1 value: 6.981574259607366 - type: nauc_precision_at_20_max value: 28.986094397038727 - type: nauc_precision_at_20_std value: 25.83129974001146 - type: nauc_precision_at_3_diff1 value: 17.197490724039355 - type: nauc_precision_at_3_max value: 29.17569320583099 - type: nauc_precision_at_3_std value: 13.430554945991846 - type: nauc_precision_at_5_diff1 value: 14.952364330739362 - type: nauc_precision_at_5_max value: 31.053243354846977 - type: nauc_precision_at_5_std value: 15.856312752807822 - type: nauc_recall_at_1000_diff1 value: -4.8224253128926975 - type: nauc_recall_at_1000_max value: 21.3989024429911 - type: nauc_recall_at_1000_std value: 39.152234275603604 - type: nauc_recall_at_100_diff1 value: 0.11936808422867201 - type: nauc_recall_at_100_max value: 24.261739241957823 - type: nauc_recall_at_100_std value: 32.62984573938928 - type: nauc_recall_at_10_diff1 value: 7.851256165018388 - type: nauc_recall_at_10_max value: 27.936406600938746 - type: nauc_recall_at_10_std value: 18.683634320636113 - type: nauc_recall_at_1_diff1 value: 22.1926640424872 - type: nauc_recall_at_1_max value: 21.32609279586034 - type: nauc_recall_at_1_std value: 6.566596302915438 - type: nauc_recall_at_20_diff1 value: 6.8107211705182165 - type: nauc_recall_at_20_max value: 28.286284094687787 - type: nauc_recall_at_20_std value: 25.932013268120862 - type: nauc_recall_at_3_diff1 value: 17.04156818427151 - type: nauc_recall_at_3_max value: 28.645439108719216 - type: nauc_recall_at_3_std value: 13.346047828494411 - type: nauc_recall_at_5_diff1 value: 14.906284329771822 - type: nauc_recall_at_5_max value: 30.58628602415921 - type: nauc_recall_at_5_std value: 15.755157478191755 - type: ndcg_at_1 value: 30.2 - type: ndcg_at_10 value: 26.638 - type: ndcg_at_100 value: 37.135 - type: ndcg_at_1000 value: 42.576 - type: ndcg_at_20 value: 30.75 - type: ndcg_at_3 value: 24.675 - type: ndcg_at_5 value: 21.836 - type: precision_at_1 value: 30.2 - type: precision_at_10 value: 14.06 - type: precision_at_100 value: 2.904 - type: precision_at_1000 value: 0.42 - type: precision_at_20 value: 9.4 - type: precision_at_3 value: 23.233 - type: precision_at_5 value: 19.439999999999998 - type: recall_at_1 value: 6.128 - type: recall_at_10 value: 28.471999999999998 - type: recall_at_100 value: 58.952000000000005 - type: recall_at_1000 value: 85.137 - type: recall_at_20 value: 38.17 - type: recall_at_3 value: 14.127999999999998 - type: recall_at_5 value: 19.673 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 86.86608529160739 - type: cosine_spearman value: 82.88625166203383 - type: euclidean_pearson value: 84.15494418856142 - type: euclidean_spearman value: 82.88449294676421 - type: main_score value: 82.88625166203383 - type: manhattan_pearson value: 84.39068623474428 - type: manhattan_spearman value: 82.88065412169463 - type: pearson value: 86.86608529160739 - type: spearman value: 82.88625166203383 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 87.0445014940449 - type: cosine_spearman value: 80.0880365116599 - type: euclidean_pearson value: 83.80250772928852 - type: euclidean_spearman value: 80.0892465260778 - type: main_score value: 80.0880365116599 - type: manhattan_pearson value: 83.96793981929336 - type: manhattan_spearman value: 80.24881789268238 - type: pearson value: 87.0445014940449 - type: spearman value: 80.0880365116599 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 89.33900828959968 - type: cosine_spearman value: 89.68256358526733 - type: euclidean_pearson value: 89.29188708262265 - type: euclidean_spearman value: 89.68204344658601 - type: main_score value: 89.68256358526733 - type: manhattan_pearson value: 89.13996588193149 - type: manhattan_spearman value: 89.61372804425623 - type: pearson value: 89.33900828959968 - type: spearman value: 89.68256358526733 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 86.42029843639123 - type: cosine_spearman value: 85.0707889220723 - type: euclidean_pearson value: 85.75114239552562 - type: euclidean_spearman value: 85.06858160270725 - type: main_score value: 85.0707889220723 - type: manhattan_pearson value: 85.86461900459038 - type: manhattan_spearman value: 85.28671103475605 - type: pearson value: 86.42029843639123 - type: spearman value: 85.0707889220723 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 88.3660081271444 - type: cosine_spearman value: 89.39375083609528 - type: euclidean_pearson value: 89.21818482894895 - type: euclidean_spearman value: 89.39361588875443 - type: main_score value: 89.39375083609528 - type: manhattan_pearson value: 89.53535068014057 - type: manhattan_spearman value: 89.81077130567752 - type: pearson value: 88.3660081271444 - type: spearman value: 89.39375083609528 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 85.60708247171874 - type: cosine_spearman value: 87.15234952832193 - type: euclidean_pearson value: 86.21743555548137 - type: euclidean_spearman value: 87.14450217418016 - type: main_score value: 87.15234952832193 - type: manhattan_pearson value: 86.2467748746084 - type: manhattan_spearman value: 87.2197479717654 - type: pearson value: 85.60708247171874 - type: spearman value: 87.15234952832193 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 91.25898556808458 - type: cosine_spearman value: 91.35372390581641 - type: euclidean_pearson value: 91.319520321348 - type: euclidean_spearman value: 91.30821135416925 - type: main_score value: 91.35372390581641 - type: manhattan_pearson value: 91.14800959939069 - type: manhattan_spearman value: 91.09775424245629 - type: pearson value: 91.25898556808458 - type: spearman value: 91.35372390581641 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 67.61637111515797 - type: cosine_spearman value: 68.10379096526697 - type: euclidean_pearson value: 69.2652309491375 - type: euclidean_spearman value: 68.18436357033228 - type: main_score value: 68.10379096526697 - type: manhattan_pearson value: 69.52531340510775 - type: manhattan_spearman value: 68.17874790391862 - type: pearson value: 67.61637111515797 - type: spearman value: 68.10379096526697 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 87.81592853782297 - type: cosine_spearman value: 88.2302550329183 - type: euclidean_pearson value: 88.01165144519526 - type: euclidean_spearman value: 88.23342148890097 - type: main_score value: 88.2302550329183 - type: manhattan_pearson value: 88.148592564938 - type: manhattan_spearman value: 88.49226317320988 - type: pearson value: 87.81592853782297 - type: spearman value: 88.2302550329183 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 89.196009707431 - type: map value: 89.196009707431 - type: mrr value: 97.07198121413808 - type: nAUC_map_diff1 value: -14.066667940115352 - type: nAUC_map_max value: 49.73702475027407 - type: nAUC_map_std value: 64.0986775782592 - type: nAUC_mrr_diff1 value: 21.96846389417319 - type: nAUC_mrr_max value: 86.38341077184032 - type: nAUC_mrr_std value: 75.38945014727746 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 80.08999999999999 - type: map_at_1 value: 63.161 - type: map_at_10 value: 75.163 - type: map_at_100 value: 75.408 - type: map_at_1000 value: 75.409 - type: map_at_20 value: 75.332 - type: map_at_3 value: 71.839 - type: map_at_5 value: 74.32600000000001 - type: mrr_at_1 value: 66.33333333333333 - type: mrr_at_10 value: 75.95978835978836 - type: mrr_at_100 value: 76.15647881281473 - type: mrr_at_1000 value: 76.15736533763744 - type: mrr_at_20 value: 76.08557368557368 - type: mrr_at_3 value: 73.55555555555556 - type: mrr_at_5 value: 75.4888888888889 - type: nauc_map_at_1000_diff1 value: 77.31229383811176 - type: nauc_map_at_1000_max value: 58.848319058605156 - type: nauc_map_at_1000_std value: -14.290090263454985 - type: nauc_map_at_100_diff1 value: 77.31325400213969 - type: nauc_map_at_100_max value: 58.848885054155275 - type: nauc_map_at_100_std value: -14.285806618869273 - type: nauc_map_at_10_diff1 value: 77.1806705504232 - type: nauc_map_at_10_max value: 59.02905805134415 - type: nauc_map_at_10_std value: -14.132954900037467 - type: nauc_map_at_1_diff1 value: 81.03932970557837 - type: nauc_map_at_1_max value: 49.02073230264529 - type: nauc_map_at_1_std value: -22.977452975845512 - type: nauc_map_at_20_diff1 value: 77.22581364818562 - type: nauc_map_at_20_max value: 58.90740400399768 - type: nauc_map_at_20_std value: -14.245079150986745 - type: nauc_map_at_3_diff1 value: 76.99793243255563 - type: nauc_map_at_3_max value: 54.9930733886623 - type: nauc_map_at_3_std value: -19.297708446082407 - type: nauc_map_at_5_diff1 value: 77.1671608360295 - type: nauc_map_at_5_max value: 57.27757489519526 - type: nauc_map_at_5_std value: -15.446338357667708 - type: nauc_mrr_at_1000_diff1 value: 77.4806080821202 - type: nauc_mrr_at_1000_max value: 60.9213776129792 - type: nauc_mrr_at_1000_std value: -12.139599632228343 - type: nauc_mrr_at_100_diff1 value: 77.48158073865281 - type: nauc_mrr_at_100_max value: 60.9218657185361 - type: nauc_mrr_at_100_std value: -12.13532070453677 - type: nauc_mrr_at_10_diff1 value: 77.32428546014407 - type: nauc_mrr_at_10_max value: 61.018407010343466 - type: nauc_mrr_at_10_std value: -12.143193773309347 - type: nauc_mrr_at_1_diff1 value: 80.99806778887115 - type: nauc_mrr_at_1_max value: 59.17855969530095 - type: nauc_mrr_at_1_std value: -12.30545640831458 - type: nauc_mrr_at_20_diff1 value: 77.3811067653992 - type: nauc_mrr_at_20_max value: 60.9648880366335 - type: nauc_mrr_at_20_std value: -12.124066076541853 - type: nauc_mrr_at_3_diff1 value: 77.31304316321959 - type: nauc_mrr_at_3_max value: 60.75536766404163 - type: nauc_mrr_at_3_std value: -12.997876030849623 - type: nauc_mrr_at_5_diff1 value: 77.12952864141742 - type: nauc_mrr_at_5_max value: 60.995943754968685 - type: nauc_mrr_at_5_std value: -11.353447465605694 - type: nauc_ndcg_at_1000_diff1 value: 76.81788665683746 - type: nauc_ndcg_at_1000_max value: 60.35947755262391 - type: nauc_ndcg_at_1000_std value: -12.884942372460362 - type: nauc_ndcg_at_100_diff1 value: 76.87388230365198 - type: nauc_ndcg_at_100_max value: 60.38813162962434 - type: nauc_ndcg_at_100_std value: -12.64384717800478 - type: nauc_ndcg_at_10_diff1 value: 75.87713506026317 - type: nauc_ndcg_at_10_max value: 61.39356554675667 - type: nauc_ndcg_at_10_std value: -12.144227584144218 - type: nauc_ndcg_at_1_diff1 value: 80.99806778887115 - type: nauc_ndcg_at_1_max value: 59.17855969530095 - type: nauc_ndcg_at_1_std value: -12.30545640831458 - type: nauc_ndcg_at_20_diff1 value: 76.09913944506627 - type: nauc_ndcg_at_20_max value: 61.01644448834147 - type: nauc_ndcg_at_20_std value: -12.456209267623857 - type: nauc_ndcg_at_3_diff1 value: 75.52717946614608 - type: nauc_ndcg_at_3_max value: 58.96433090721983 - type: nauc_ndcg_at_3_std value: -15.849280494339556 - type: nauc_ndcg_at_5_diff1 value: 75.69026981016921 - type: nauc_ndcg_at_5_max value: 58.924044405851326 - type: nauc_ndcg_at_5_std value: -13.182728827923107 - type: nauc_precision_at_1000_diff1 value: -31.634022001609914 - type: nauc_precision_at_1000_max value: 31.46271490784504 - type: nauc_precision_at_1000_std value: 60.44801276891442 - type: nauc_precision_at_100_diff1 value: -29.722363469948103 - type: nauc_precision_at_100_max value: 32.05464592020074 - type: nauc_precision_at_100_std value: 60.832570595613554 - type: nauc_precision_at_10_diff1 value: -11.91731376599939 - type: nauc_precision_at_10_max value: 45.43646553157129 - type: nauc_precision_at_10_std value: 52.962408871791276 - type: nauc_precision_at_1_diff1 value: 80.99806778887115 - type: nauc_precision_at_1_max value: 59.17855969530095 - type: nauc_precision_at_1_std value: -12.30545640831458 - type: nauc_precision_at_20_diff1 value: -18.43293701721667 - type: nauc_precision_at_20_max value: 39.53434874203934 - type: nauc_precision_at_20_std value: 53.6291982468461 - type: nauc_precision_at_3_diff1 value: 30.84789043003892 - type: nauc_precision_at_3_max value: 55.660727758110376 - type: nauc_precision_at_3_std value: 17.87243920840355 - type: nauc_precision_at_5_diff1 value: 4.099395181445625 - type: nauc_precision_at_5_max value: 50.346770968709386 - type: nauc_precision_at_5_std value: 44.66722483255029 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 100.0 - type: nauc_recall_at_100_max value: 72.2222222222207 - type: nauc_recall_at_100_std value: 86.92810457516407 - type: nauc_recall_at_10_diff1 value: 62.18887555022005 - type: nauc_recall_at_10_max value: 75.14339068960916 - type: nauc_recall_at_10_std value: -1.4912631719357108 - type: nauc_recall_at_1_diff1 value: 81.03932970557837 - type: nauc_recall_at_1_max value: 49.02073230264529 - type: nauc_recall_at_1_std value: -22.977452975845512 - type: nauc_recall_at_20_diff1 value: 59.27414444038499 - type: nauc_recall_at_20_max value: 76.32241302318047 - type: nauc_recall_at_20_std value: -0.8322169447488666 - type: nauc_recall_at_3_diff1 value: 69.58783002593157 - type: nauc_recall_at_3_max value: 55.89660919896563 - type: nauc_recall_at_3_std value: -21.183005510917862 - type: nauc_recall_at_5_diff1 value: 65.53660499878802 - type: nauc_recall_at_5_max value: 58.218018535135805 - type: nauc_recall_at_5_std value: -8.328952210032455 - type: ndcg_at_1 value: 66.333 - type: ndcg_at_10 value: 80.08999999999999 - type: ndcg_at_100 value: 81.24900000000001 - type: ndcg_at_1000 value: 81.28800000000001 - type: ndcg_at_20 value: 80.625 - type: ndcg_at_3 value: 74.98700000000001 - type: ndcg_at_5 value: 78.553 - type: precision_at_1 value: 66.333 - type: precision_at_10 value: 10.667 - type: precision_at_100 value: 1.127 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.45 - type: precision_at_3 value: 29.555999999999997 - type: precision_at_5 value: 20.133000000000003 - type: recall_at_1 value: 63.161 - type: recall_at_10 value: 94.167 - type: recall_at_100 value: 99.667 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 96.167 - type: recall_at_3 value: 80.972 - type: recall_at_5 value: 89.90599999999999 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.81881188118813 - type: cosine_accuracy_threshold value: 85.55081486701965 - type: cosine_ap value: 96.0359661816236 - type: cosine_f1 value: 90.6584992343032 - type: cosine_f1_threshold value: 84.82859134674072 - type: cosine_precision value: 92.59645464025026 - type: cosine_recall value: 88.8 - type: dot_accuracy value: 99.81881188118813 - type: dot_accuracy_threshold value: 84.91908311843872 - type: dot_ap value: 96.05740121094365 - type: dot_f1 value: 90.81885856079404 - type: dot_f1_threshold value: 83.84919166564941 - type: dot_precision value: 90.14778325123153 - type: dot_recall value: 91.5 - type: euclidean_accuracy value: 99.82079207920792 - type: euclidean_accuracy_threshold value: 54.49706315994263 - type: euclidean_ap value: 96.03223527068818 - type: euclidean_f1 value: 90.72270630445925 - type: euclidean_f1_threshold value: 54.49706315994263 - type: euclidean_precision value: 93.05993690851734 - type: euclidean_recall value: 88.5 - type: main_score value: 96.32671902439806 - type: manhattan_accuracy value: 99.83267326732673 - type: manhattan_accuracy_threshold value: 3818.192672729492 - type: manhattan_ap value: 96.32671902439806 - type: manhattan_f1 value: 91.52032112393378 - type: manhattan_f1_threshold value: 3818.192672729492 - type: manhattan_precision value: 91.8429003021148 - type: manhattan_recall value: 91.2 - type: max_ap value: 96.32671902439806 - type: max_f1 value: 91.52032112393378 - type: max_precision value: 93.05993690851734 - type: max_recall value: 91.5 - type: similarity_accuracy value: 99.81881188118813 - type: similarity_accuracy_threshold value: 85.55081486701965 - type: similarity_ap value: 96.0359661816236 - type: similarity_f1 value: 90.6584992343032 - type: similarity_f1_threshold value: 84.82859134674072 - type: similarity_precision value: 92.59645464025026 - type: similarity_recall value: 88.8 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 80.28558559137414 - type: v_measure value: 80.28558559137414 - type: v_measure_std value: 2.795276520287584 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 49.57135582416209 - type: v_measure value: 49.57135582416209 - type: v_measure_std value: 1.6414135468423754 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 55.253002583598644 - type: map value: 55.253002583598644 - type: mrr value: 56.24172396231219 - type: nAUC_map_diff1 value: 40.00053248203427 - type: nAUC_map_max value: 10.05441740585869 - type: nAUC_map_std value: 8.227169286387552 - type: nAUC_mrr_diff1 value: 40.250446264233744 - type: nAUC_mrr_max value: 10.586310195339053 - type: nAUC_mrr_std value: 8.47326494370076 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 31.19874648747059 - type: cosine_spearman value: 31.493550648844863 - type: dot_pearson value: 31.157847680289407 - type: dot_spearman value: 31.575299712180538 - type: main_score value: 31.493550648844863 - type: pearson value: 31.19874648747059 - type: spearman value: 31.493550648844863 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 85.983 - type: map_at_1 value: 0.247 - type: map_at_10 value: 2.177 - type: map_at_100 value: 14.804 - type: map_at_1000 value: 37.045 - type: map_at_20 value: 4.12 - type: map_at_3 value: 0.7000000000000001 - type: map_at_5 value: 1.1320000000000001 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 98.0 - type: mrr_at_100 value: 98.0 - type: mrr_at_1000 value: 98.0 - type: mrr_at_20 value: 98.0 - type: mrr_at_3 value: 98.0 - type: mrr_at_5 value: 98.0 - type: nauc_map_at_1000_diff1 value: -0.9165125200337213 - type: nauc_map_at_1000_max value: 40.260117798042764 - type: nauc_map_at_1000_std value: 71.72789335831554 - type: nauc_map_at_100_diff1 value: 20.493827311583953 - type: nauc_map_at_100_max value: 21.005742079276462 - type: nauc_map_at_100_std value: 62.53815607831659 - type: nauc_map_at_10_diff1 value: 31.289297684528215 - type: nauc_map_at_10_max value: 7.86554294370268 - type: nauc_map_at_10_std value: 37.26191657133897 - type: nauc_map_at_1_diff1 value: 25.57568148849456 - type: nauc_map_at_1_max value: -5.9767435623941445 - type: nauc_map_at_1_std value: 30.849871717506755 - type: nauc_map_at_20_diff1 value: 30.896018204532087 - type: nauc_map_at_20_max value: 8.667077299744314 - type: nauc_map_at_20_std value: 41.512687168412924 - type: nauc_map_at_3_diff1 value: 29.44724521006598 - type: nauc_map_at_3_max value: 1.597496889532064 - type: nauc_map_at_3_std value: 32.25013773854697 - type: nauc_map_at_5_diff1 value: 27.387036605618825 - type: nauc_map_at_5_max value: 5.402983746211454 - type: nauc_map_at_5_std value: 33.940523962472184 - type: nauc_mrr_at_1000_diff1 value: -14.122315592903503 - type: nauc_mrr_at_1000_max value: 33.84687208216605 - type: nauc_mrr_at_1000_std value: 86.11111111111092 - type: nauc_mrr_at_100_diff1 value: -14.122315592903503 - type: nauc_mrr_at_100_max value: 33.84687208216605 - type: nauc_mrr_at_100_std value: 86.11111111111092 - type: nauc_mrr_at_10_diff1 value: -14.122315592903503 - type: nauc_mrr_at_10_max value: 33.84687208216605 - type: nauc_mrr_at_10_std value: 86.11111111111092 - type: nauc_mrr_at_1_diff1 value: -14.122315592903831 - type: nauc_mrr_at_1_max value: 33.84687208216637 - type: nauc_mrr_at_1_std value: 86.11111111111124 - type: nauc_mrr_at_20_diff1 value: -14.122315592903503 - type: nauc_mrr_at_20_max value: 33.84687208216605 - type: nauc_mrr_at_20_std value: 86.11111111111092 - type: nauc_mrr_at_3_diff1 value: -14.122315592903503 - type: nauc_mrr_at_3_max value: 33.84687208216605 - type: nauc_mrr_at_3_std value: 86.11111111111092 - type: nauc_mrr_at_5_diff1 value: -14.122315592903503 - type: nauc_mrr_at_5_max value: 33.84687208216605 - type: nauc_mrr_at_5_std value: 86.11111111111092 - type: nauc_ndcg_at_1000_diff1 value: 8.745907669561928 - type: nauc_ndcg_at_1000_max value: 45.43307237994533 - type: nauc_ndcg_at_1000_std value: 74.93357447176336 - type: nauc_ndcg_at_100_diff1 value: -3.9719350773353765 - type: nauc_ndcg_at_100_max value: 44.43705332397461 - type: nauc_ndcg_at_100_std value: 61.59493812371758 - type: nauc_ndcg_at_10_diff1 value: 15.230915878367348 - type: nauc_ndcg_at_10_max value: 48.332840970836635 - type: nauc_ndcg_at_10_std value: 46.888785065125774 - type: nauc_ndcg_at_1_diff1 value: 13.219732337379442 - type: nauc_ndcg_at_1_max value: 45.19919078742603 - type: nauc_ndcg_at_1_std value: 64.68253968253977 - type: nauc_ndcg_at_20_diff1 value: 12.479648691964865 - type: nauc_ndcg_at_20_max value: 48.76688248450331 - type: nauc_ndcg_at_20_std value: 51.450399755887545 - type: nauc_ndcg_at_3_diff1 value: 6.165414201871464 - type: nauc_ndcg_at_3_max value: 45.089689347691035 - type: nauc_ndcg_at_3_std value: 41.08249161845213 - type: nauc_ndcg_at_5_diff1 value: 7.411245806844721 - type: nauc_ndcg_at_5_max value: 47.818748093538076 - type: nauc_ndcg_at_5_std value: 45.907685763676575 - type: nauc_precision_at_1000_diff1 value: -30.574290219847345 - type: nauc_precision_at_1000_max value: 32.56926126118719 - type: nauc_precision_at_1000_std value: 14.584504392628874 - type: nauc_precision_at_100_diff1 value: -10.199740234718847 - type: nauc_precision_at_100_max value: 41.0213226769777 - type: nauc_precision_at_100_std value: 56.975760776771324 - type: nauc_precision_at_10_diff1 value: 7.865792689701161 - type: nauc_precision_at_10_max value: 52.00432275201737 - type: nauc_precision_at_10_std value: 43.89512276413724 - type: nauc_precision_at_1_diff1 value: -14.122315592903831 - type: nauc_precision_at_1_max value: 33.84687208216637 - type: nauc_precision_at_1_std value: 86.11111111111124 - type: nauc_precision_at_20_diff1 value: 5.481424191880084 - type: nauc_precision_at_20_max value: 46.86629331792725 - type: nauc_precision_at_20_std value: 49.245692667517496 - type: nauc_precision_at_3_diff1 value: -5.870408807869163 - type: nauc_precision_at_3_max value: 48.73657612128875 - type: nauc_precision_at_3_std value: 41.15152062088262 - type: nauc_precision_at_5_diff1 value: -4.550610529125413 - type: nauc_precision_at_5_max value: 60.390115878205386 - type: nauc_precision_at_5_std value: 44.16494295055696 - type: nauc_recall_at_1000_diff1 value: 8.047794367079034 - type: nauc_recall_at_1000_max value: 37.07551482870489 - type: nauc_recall_at_1000_std value: 66.20862163364201 - type: nauc_recall_at_100_diff1 value: 25.08104923597475 - type: nauc_recall_at_100_max value: 9.971294642165734 - type: nauc_recall_at_100_std value: 51.737814074891254 - type: nauc_recall_at_10_diff1 value: 32.33148478369628 - type: nauc_recall_at_10_max value: 1.3767192150014917 - type: nauc_recall_at_10_std value: 30.801926742876308 - type: nauc_recall_at_1_diff1 value: 25.57568148849456 - type: nauc_recall_at_1_max value: -5.9767435623941445 - type: nauc_recall_at_1_std value: 30.849871717506755 - type: nauc_recall_at_20_diff1 value: 31.716580022934654 - type: nauc_recall_at_20_max value: -0.1281270579464631 - type: nauc_recall_at_20_std value: 33.76185294993676 - type: nauc_recall_at_3_diff1 value: 29.758810004388348 - type: nauc_recall_at_3_max value: -1.9442985017191816 - type: nauc_recall_at_3_std value: 27.45550076962206 - type: nauc_recall_at_5_diff1 value: 27.047710181576672 - type: nauc_recall_at_5_max value: 1.5237000700880248 - type: nauc_recall_at_5_std value: 28.235297950159698 - type: ndcg_at_1 value: 94.0 - type: ndcg_at_10 value: 85.983 - type: ndcg_at_100 value: 69.195 - type: ndcg_at_1000 value: 62.541000000000004 - type: ndcg_at_20 value: 83.405 - type: ndcg_at_3 value: 89.98899999999999 - type: ndcg_at_5 value: 87.905 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 89.4 - type: precision_at_100 value: 71.54 - type: precision_at_1000 value: 27.594 - type: precision_at_20 value: 87.2 - type: precision_at_3 value: 92.667 - type: precision_at_5 value: 90.8 - type: recall_at_1 value: 0.247 - type: recall_at_10 value: 2.315 - type: recall_at_100 value: 17.574 - type: recall_at_1000 value: 59.336999999999996 - type: recall_at_20 value: 4.491 - type: recall_at_3 value: 0.7250000000000001 - type: recall_at_5 value: 1.1820000000000002 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 29.944 - type: map_at_1 value: 3.064 - type: map_at_10 value: 11.501999999999999 - type: map_at_100 value: 18.736 - type: map_at_1000 value: 20.333000000000002 - type: map_at_20 value: 14.057 - type: map_at_3 value: 6.300999999999999 - type: map_at_5 value: 8.463 - type: mrr_at_1 value: 44.89795918367347 - type: mrr_at_10 value: 58.41188856494979 - type: mrr_at_100 value: 58.93964266413245 - type: mrr_at_1000 value: 58.93964266413245 - type: mrr_at_20 value: 58.767485349118 - type: mrr_at_3 value: 54.42176870748299 - type: mrr_at_5 value: 56.666666666666664 - type: nauc_map_at_1000_diff1 value: 11.478593385608479 - type: nauc_map_at_1000_max value: 10.309889845044324 - type: nauc_map_at_1000_std value: 21.16721939940238 - type: nauc_map_at_100_diff1 value: 11.570438543562418 - type: nauc_map_at_100_max value: 8.426183648064834 - type: nauc_map_at_100_std value: 18.56231985033613 - type: nauc_map_at_10_diff1 value: 22.37735506247481 - type: nauc_map_at_10_max value: 5.455946239060806 - type: nauc_map_at_10_std value: -4.2848826518388154 - type: nauc_map_at_1_diff1 value: 27.853645380676824 - type: nauc_map_at_1_max value: 7.30739948053113 - type: nauc_map_at_1_std value: -0.2773663157814586 - type: nauc_map_at_20_diff1 value: 14.724669779924648 - type: nauc_map_at_20_max value: 10.12882779173533 - type: nauc_map_at_20_std value: 4.4803777672120875 - type: nauc_map_at_3_diff1 value: 31.891173385921263 - type: nauc_map_at_3_max value: 4.889652271827218 - type: nauc_map_at_3_std value: -9.477460238651643 - type: nauc_map_at_5_diff1 value: 31.489012040465003 - type: nauc_map_at_5_max value: 1.7330092417337482 - type: nauc_map_at_5_std value: -8.137018608469637 - type: nauc_mrr_at_1000_diff1 value: 24.411522237082416 - type: nauc_mrr_at_1000_max value: 11.286971076556688 - type: nauc_mrr_at_1000_std value: 23.443174210894043 - type: nauc_mrr_at_100_diff1 value: 24.411522237082416 - type: nauc_mrr_at_100_max value: 11.286971076556688 - type: nauc_mrr_at_100_std value: 23.443174210894043 - type: nauc_mrr_at_10_diff1 value: 23.948152308265186 - type: nauc_mrr_at_10_max value: 12.22420979621155 - type: nauc_mrr_at_10_std value: 23.557939024705544 - type: nauc_mrr_at_1_diff1 value: 17.902334894536107 - type: nauc_mrr_at_1_max value: 17.36969662861018 - type: nauc_mrr_at_1_std value: 19.425714969048734 - type: nauc_mrr_at_20_diff1 value: 24.635893795899797 - type: nauc_mrr_at_20_max value: 11.330541067194913 - type: nauc_mrr_at_20_std value: 23.74518583400233 - type: nauc_mrr_at_3_diff1 value: 25.045536328282587 - type: nauc_mrr_at_3_max value: 7.497967004732733 - type: nauc_mrr_at_3_std value: 24.167153007320078 - type: nauc_mrr_at_5_diff1 value: 24.328479930592454 - type: nauc_mrr_at_5_max value: 10.037126854938336 - type: nauc_mrr_at_5_std value: 25.236208055346136 - type: nauc_ndcg_at_1000_diff1 value: 15.555347444667389 - type: nauc_ndcg_at_1000_max value: 13.356591700655718 - type: nauc_ndcg_at_1000_std value: 42.42395845935052 - type: nauc_ndcg_at_100_diff1 value: 13.110526060413708 - type: nauc_ndcg_at_100_max value: 3.140006440162515 - type: nauc_ndcg_at_100_std value: 39.02733288398033 - type: nauc_ndcg_at_10_diff1 value: 20.68853369009725 - type: nauc_ndcg_at_10_max value: 2.435389817058852 - type: nauc_ndcg_at_10_std value: 10.038202768784316 - type: nauc_ndcg_at_1_diff1 value: 20.17287594582385 - type: nauc_ndcg_at_1_max value: 12.487205168273196 - type: nauc_ndcg_at_1_std value: 20.639827614373075 - type: nauc_ndcg_at_20_diff1 value: 16.987577348502985 - type: nauc_ndcg_at_20_max value: 2.9978717644469266 - type: nauc_ndcg_at_20_std value: 13.015690866750354 - type: nauc_ndcg_at_3_diff1 value: 32.392223079245575 - type: nauc_ndcg_at_3_max value: 1.587587110582544 - type: nauc_ndcg_at_3_std value: 12.850592473446609 - type: nauc_ndcg_at_5_diff1 value: 32.80244517369626 - type: nauc_ndcg_at_5_max value: 5.8939933777508084 - type: nauc_ndcg_at_5_std value: 15.779687411463414 - type: nauc_precision_at_1000_diff1 value: -14.314031720452537 - type: nauc_precision_at_1000_max value: 32.87886666567266 - type: nauc_precision_at_1000_std value: 21.49347046886851 - type: nauc_precision_at_100_diff1 value: -9.4034008613839 - type: nauc_precision_at_100_max value: 16.784075123309645 - type: nauc_precision_at_100_std value: 73.14688535393604 - type: nauc_precision_at_10_diff1 value: 6.855101404043058 - type: nauc_precision_at_10_max value: 6.52491228645612 - type: nauc_precision_at_10_std value: 16.104602266016744 - type: nauc_precision_at_1_diff1 value: 17.902334894536107 - type: nauc_precision_at_1_max value: 17.36969662861018 - type: nauc_precision_at_1_std value: 19.425714969048734 - type: nauc_precision_at_20_diff1 value: -5.337534613602212 - type: nauc_precision_at_20_max value: 17.722925454767218 - type: nauc_precision_at_20_std value: 34.26680462132849 - type: nauc_precision_at_3_diff1 value: 31.054623397809255 - type: nauc_precision_at_3_max value: -0.92038600946826 - type: nauc_precision_at_3_std value: 8.326997076862916 - type: nauc_precision_at_5_diff1 value: 29.784942296920462 - type: nauc_precision_at_5_max value: 6.337469263434779 - type: nauc_precision_at_5_std value: 12.789597196020974 - type: nauc_recall_at_1000_diff1 value: -3.8177981862041364 - type: nauc_recall_at_1000_max value: 14.206064332229163 - type: nauc_recall_at_1000_std value: 74.18853420771269 - type: nauc_recall_at_100_diff1 value: 0.7677996771461106 - type: nauc_recall_at_100_max value: -4.139924106878441 - type: nauc_recall_at_100_std value: 48.319930706362896 - type: nauc_recall_at_10_diff1 value: 12.038835537494322 - type: nauc_recall_at_10_max value: -2.0498983557854418 - type: nauc_recall_at_10_std value: -2.0339180690854493 - type: nauc_recall_at_1_diff1 value: 27.853645380676824 - type: nauc_recall_at_1_max value: 7.30739948053113 - type: nauc_recall_at_1_std value: -0.2773663157814586 - type: nauc_recall_at_20_diff1 value: 0.7907893667756708 - type: nauc_recall_at_20_max value: 0.8795499810558195 - type: nauc_recall_at_20_std value: 11.512483291688282 - type: nauc_recall_at_3_diff1 value: 33.19440392639576 - type: nauc_recall_at_3_max value: -1.5494237697432613 - type: nauc_recall_at_3_std value: -8.560408808376984 - type: nauc_recall_at_5_diff1 value: 27.42193873870941 - type: nauc_recall_at_5_max value: -4.74350293281128 - type: nauc_recall_at_5_std value: -7.618060131179654 - type: ndcg_at_1 value: 42.857 - type: ndcg_at_10 value: 29.944 - type: ndcg_at_100 value: 42.624 - type: ndcg_at_1000 value: 53.384 - type: ndcg_at_20 value: 30.135 - type: ndcg_at_3 value: 34.847 - type: ndcg_at_5 value: 32.573 - type: precision_at_1 value: 44.897999999999996 - type: precision_at_10 value: 25.306 - type: precision_at_100 value: 8.694 - type: precision_at_1000 value: 1.616 - type: precision_at_20 value: 19.082 - type: precision_at_3 value: 34.014 - type: precision_at_5 value: 31.019999999999996 - type: recall_at_1 value: 3.064 - type: recall_at_10 value: 17.849999999999998 - type: recall_at_100 value: 53.217999999999996 - type: recall_at_1000 value: 87.095 - type: recall_at_20 value: 26.111 - type: recall_at_3 value: 7.383000000000001 - type: recall_at_5 value: 11.434 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 88.759765625 - type: ap value: 36.49152357863017 - type: ap_weighted value: 36.49152357863017 - type: f1 value: 74.4692714448641 - type: f1_weighted value: 90.54372649306606 - type: main_score value: 88.759765625 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 74.8443689869836 - type: f1 value: 75.1139662898148 - type: f1_weighted value: 74.7369003946243 - type: main_score value: 74.8443689869836 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 61.42918790942448 - type: v_measure value: 61.42918790942448 - type: v_measure_std value: 1.0156550098843082 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 88.22197055492639 - type: cosine_accuracy_threshold value: 83.30042362213135 - type: cosine_ap value: 80.57754959194938 - type: cosine_f1 value: 73.70579190158894 - type: cosine_f1_threshold value: 81.04978799819946 - type: cosine_precision value: 71.64922770303936 - type: cosine_recall value: 75.8839050131926 - type: dot_accuracy value: 88.23985217857782 - type: dot_accuracy_threshold value: 83.31039547920227 - type: dot_ap value: 80.57533213448181 - type: dot_f1 value: 73.61309601143302 - type: dot_f1_threshold value: 81.33968114852905 - type: dot_precision value: 72.51087791144101 - type: dot_recall value: 74.74934036939314 - type: euclidean_accuracy value: 88.22197055492639 - type: euclidean_accuracy_threshold value: 58.290231227874756 - type: euclidean_ap value: 80.57982723880139 - type: euclidean_f1 value: 73.63426519620417 - type: euclidean_f1_threshold value: 61.55576705932617 - type: euclidean_precision value: 71.63173652694611 - type: euclidean_recall value: 75.75197889182058 - type: main_score value: 80.57982723880139 - type: manhattan_accuracy value: 88.14448351910353 - type: manhattan_accuracy_threshold value: 3907.2471618652344 - type: manhattan_ap value: 80.3538079655539 - type: manhattan_f1 value: 73.40466675261054 - type: manhattan_f1_threshold value: 4103.794097900391 - type: manhattan_precision value: 71.76707839677337 - type: manhattan_recall value: 75.11873350923483 - type: max_ap value: 80.57982723880139 - type: max_f1 value: 73.70579190158894 - type: max_precision value: 72.51087791144101 - type: max_recall value: 75.8839050131926 - type: similarity_accuracy value: 88.22197055492639 - type: similarity_accuracy_threshold value: 83.30042362213135 - type: similarity_ap value: 80.57754959194938 - type: similarity_f1 value: 73.70579190158894 - type: similarity_f1_threshold value: 81.04978799819946 - type: similarity_precision value: 71.64922770303936 - type: similarity_recall value: 75.8839050131926 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 89.88628866379477 - type: cosine_accuracy_threshold value: 80.8050274848938 - type: cosine_ap value: 87.57594591596816 - type: cosine_f1 value: 80.0812257707218 - type: cosine_f1_threshold value: 77.990061044693 - type: cosine_precision value: 76.93126197063205 - type: cosine_recall value: 83.50015398829689 - type: dot_accuracy value: 89.87852679784221 - type: dot_accuracy_threshold value: 80.84419965744019 - type: dot_ap value: 87.56136742222151 - type: dot_f1 value: 80.05898617511521 - type: dot_f1_threshold value: 77.92385816574097 - type: dot_precision value: 76.80554573106035 - type: dot_recall value: 83.60024638127503 - type: euclidean_accuracy value: 89.86882446540149 - type: euclidean_accuracy_threshold value: 62.08193898200989 - type: euclidean_ap value: 87.57517549192228 - type: euclidean_f1 value: 80.05286925872892 - type: euclidean_f1_threshold value: 66.65036082267761 - type: euclidean_precision value: 76.51063232507545 - type: euclidean_recall value: 83.93902063443178 - type: main_score value: 87.64162614197194 - type: manhattan_accuracy value: 89.8959909962355 - type: manhattan_accuracy_threshold value: 4176.108169555664 - type: manhattan_ap value: 87.64162614197194 - type: manhattan_f1 value: 80.17116279069768 - type: manhattan_f1_threshold value: 4433.153533935547 - type: manhattan_precision value: 77.57615035644848 - type: manhattan_recall value: 82.94579611949491 - type: max_ap value: 87.64162614197194 - type: max_f1 value: 80.17116279069768 - type: max_precision value: 77.57615035644848 - type: max_recall value: 83.93902063443178 - type: similarity_accuracy value: 89.88628866379477 - type: similarity_accuracy_threshold value: 80.8050274848938 - type: similarity_ap value: 87.57594591596816 - type: similarity_f1 value: 80.0812257707218 - type: similarity_f1_threshold value: 77.990061044693 - type: similarity_precision value: 76.93126197063205 - type: similarity_recall value: 83.50015398829689 --- # Updates We released a Jasper and Stella model technology report and code.(2025.1) **Report:** https://arxiv.org/abs/2412.19048 **Codes:** https://github.com/NLPJCL/RAG-Retrieval # Introduction The models are trained based on `Alibaba-NLP/gte-large-en-v1.5` and `Alibaba-NLP/gte-Qwen2-1.5B-instruct`. Thanks for their contributions! **We simplify usage of prompts, providing two prompts for most general tasks, one is for s2p, another one is for s2s.** Prompt of s2p task(e.g. retrieve task): ```text Instruct: Given a web search query, retrieve relevant passages that answer the query.\nQuery: {query} ``` Prompt of s2s task(e.g. semantic textual similarity task): ```text Instruct: Retrieve semantically similar text.\nQuery: {query} ``` The models are finally trained by [MRL]((https://arxiv.org/abs/2205.13147)), so they have multiple dimensions: 512, 768, 1024, 2048, 4096, 6144 and 8192. The higher the dimension, the better the performance. **Generally speaking, 1024d is good enough.** The MTEB score of 1024d is only 0.001 lower than 8192d. # Model directory structure The model directory structure is very simple, it is a standard SentenceTransformer directory **with a series of `2_Dense_{dims}` folders**, where `dims` represents the final vector dimension. For example, the `2_Dense_256` folder stores Linear weights that convert vector dimensions to 256 dimensions. Please refer to the following chapters for specific instructions on how to use them. # Usage You can use `SentenceTransformers` or `transformers` library to encode text. ## Sentence Transformers ```python from sentence_transformers import SentenceTransformer # This model supports two prompts: "s2p_query" and "s2s_query" for sentence-to-passage and sentence-to-sentence tasks, respectively. # They are defined in `config_sentence_transformers.json` query_prompt_name = "s2p_query" queries = [ "What are some ways to reduce stress?", "What are the benefits of drinking green tea?", ] # docs do not need any prompts docs = [ "There are many effective ways to reduce stress. Some common techniques include deep breathing, meditation, and physical activity. Engaging in hobbies, spending time in nature, and connecting with loved ones can also help alleviate stress. Additionally, setting boundaries, practicing self-care, and learning to say no can prevent stress from building up.", "Green tea has been consumed for centuries and is known for its potential health benefits. It contains antioxidants that may help protect the body against damage caused by free radicals. Regular consumption of green tea has been associated with improved heart health, enhanced cognitive function, and a reduced risk of certain types of cancer. The polyphenols in green tea may also have anti-inflammatory and weight loss properties.", ] # !The default dimension is 1024, if you need other dimensions, please clone the model and modify `modules.json` to replace `2_Dense_1024` with another dimension, e.g. `2_Dense_256` or `2_Dense_8192` ! model = SentenceTransformer("dunzhang/stella_en_1.5B_v5", trust_remote_code=True).cuda() query_embeddings = model.encode(queries, prompt_name=query_prompt_name) doc_embeddings = model.encode(docs) print(query_embeddings.shape, doc_embeddings.shape) # (2, 1024) (2, 1024) similarities = model.similarity(query_embeddings, doc_embeddings) print(similarities) # tensor([[0.8179, 0.2958], # [0.3194, 0.7854]]) ``` ## Transformers ```python import os import torch from transformers import AutoModel, AutoTokenizer from sklearn.preprocessing import normalize query_prompt = "Instruct: Given a web search query, retrieve relevant passages that answer the query.\nQuery: " queries = [ "What are some ways to reduce stress?", "What are the benefits of drinking green tea?", ] queries = [query_prompt + query for query in queries] # docs do not need any prompts docs = [ "There are many effective ways to reduce stress. Some common techniques include deep breathing, meditation, and physical activity. Engaging in hobbies, spending time in nature, and connecting with loved ones can also help alleviate stress. Additionally, setting boundaries, practicing self-care, and learning to say no can prevent stress from building up.", "Green tea has been consumed for centuries and is known for its potential health benefits. It contains antioxidants that may help protect the body against damage caused by free radicals. Regular consumption of green tea has been associated with improved heart health, enhanced cognitive function, and a reduced risk of certain types of cancer. The polyphenols in green tea may also have anti-inflammatory and weight loss properties.", ] # The path of your model after cloning it model_dir = "{Your MODEL_PATH}" vector_dim = 1024 vector_linear_directory = f"2_Dense_{vector_dim}" model = AutoModel.from_pretrained(model_dir, trust_remote_code=True).cuda().eval() tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True) vector_linear = torch.nn.Linear(in_features=model.config.hidden_size, out_features=vector_dim) vector_linear_dict = { k.replace("linear.", ""): v for k, v in torch.load(os.path.join(model_dir, f"{vector_linear_directory}/pytorch_model.bin")).items() } vector_linear.load_state_dict(vector_linear_dict) vector_linear.cuda() # Embed the queries with torch.no_grad(): input_data = tokenizer(queries, padding="longest", truncation=True, max_length=512, return_tensors="pt") input_data = {k: v.cuda() for k, v in input_data.items()} attention_mask = input_data["attention_mask"] last_hidden_state = model(**input_data)[0] last_hidden = last_hidden_state.masked_fill(~attention_mask[..., None].bool(), 0.0) query_vectors = last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] query_vectors = normalize(vector_linear(query_vectors).cpu().numpy()) # Embed the documents with torch.no_grad(): input_data = tokenizer(docs, padding="longest", truncation=True, max_length=512, return_tensors="pt") input_data = {k: v.cuda() for k, v in input_data.items()} attention_mask = input_data["attention_mask"] last_hidden_state = model(**input_data)[0] last_hidden = last_hidden_state.masked_fill(~attention_mask[..., None].bool(), 0.0) docs_vectors = last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] docs_vectors = normalize(vector_linear(docs_vectors).cpu().numpy()) print(query_vectors.shape, docs_vectors.shape) # (2, 1024) (2, 1024) similarities = query_vectors @ docs_vectors.T print(similarities) # [[0.8178789 0.2958377 ] # [0.31938642 0.7853526 ]] ``` ## Infinity Usage with [Infinity, MIT Licensed Inference Server](https://github.com/michaelfeil/infinity) and Docker. ```bash docker run --gpus all -v $PWD/data:/app/.cache \ michaelf34/infinity:0.0.69-trt-onnx \ v2 --model-id dunzhang/stella_en_1.5B_v5 --batch-size 16 --device cuda --engine torch --port 7997 ``` # Citation ``` @misc{zhang2025jasperstelladistillationsota, title={Jasper and Stella: distillation of SOTA embedding models}, author={Dun Zhang and Jiacheng Li and Ziyang Zeng and Fulong Wang}, year={2025}, eprint={2412.19048}, archivePrefix={arXiv}, primaryClass={cs.IR}, url={https://arxiv.org/abs/2412.19048}, } ``` # FAQ Q: The details of training? A: The training method and datasets will be released in the future. (specific time unknown, may be provided in a paper) Q: How to choose a suitable prompt for my own task? A: In most cases, please use the s2p and s2s prompts. These two prompts account for the vast majority of the training data. Q: How to reproduce MTEB results? A: Please use evaluation scripts in `Alibaba-NLP/gte-Qwen2-1.5B-instruct` or `intfloat/e5-mistral-7b-instruct` Q: Why each dimension has a linear weight? A: MRL has multiple training methods, we choose this method which has the best performance. Q: What is the sequence length of models? A: 512 is recommended, in our experiments, almost all models perform poorly on specialized long text retrieval datasets. Besides, the model is trained on datasets of 512 length. This may be an optimization term. If you have any questions, please start a discussion on community.
[ "BIOSSES", "SCIFACT" ]
microsoft/BiomedParse
microsoft
null
[ "dataset:microsoft/BiomedParseData", "license:cc-by-nc-sa-4.0", "region:us" ]
"2024-11-04T21:32:56Z"
2024-12-20T20:15:52+00:00
178,879
68
--- datasets: - microsoft/BiomedParseData license: cc-by-nc-sa-4.0 --- This is the official model checkpoint repo for "A foundation model for joint segmentation, detection and recognition of biomedical objects across nine modalities". [[`Code`](https://github.com/microsoft/BiomedParse)] [[`Paper`](https://aka.ms/biomedparse-paper)] [[`Demo`](https://microsoft.github.io/BiomedParse/)] [[`Data`](https://huggingface.co/datasets/microsoft/BiomedParseData)] Biomedical image analysis is fundamental for biomedical discovery in cell biology, pathology, radiology, and many other biomedical domains. BiomedParse is a biomedical foundation model for imaging parsing that can jointly conduct segmentation, detection, and recognition across 9 imaging modalities. Through joint learning, we can improve accuracy for individual tasks and enable novel applications such as segmenting all relevant objects in an image through a text prompt, rather than requiring users to laboriously specify the bounding box for each object. BiomedParse is broadly applicable, performing image segmentation across 9 imaging modalities. ### Installation ```sh git clone https://github.com/microsoft/BiomedParse.git cd BiomedParse conda create -n biomedparse python=3.9.19 conda activate biomedparse ``` Install Pytorch ```sh conda install pytorch torchvision torchaudio pytorch-cuda=12.4 -c pytorch -c nvidia ``` In case there is issue with detectron2 installation, make sure your pytorch version is compatible with CUDA version on your machine at https://pytorch.org/. Install dependencies ```sh pip install -r assets/requirements/requirements.txt ``` ### Model Setup ```sh from PIL import Image import torch from modeling.BaseModel import BaseModel from modeling import build_model from utilities.distributed import init_distributed from utilities.arguments import load_opt_from_config_files from utilities.constants import BIOMED_CLASSES from inference_utils.inference import interactive_infer_image from inference_utils.output_processing import check_mask_stats import numpy as np # Build model config opt = load_opt_from_config_files(["configs/biomedparse_inference.yaml"]) opt = init_distributed(opt) # Load model from pretrained weights pretrained_pth = 'hf_hub:microsoft/BiomedParse' model = BaseModel(opt, build_model(opt)).from_pretrained(pretrained_pth).eval().cuda() with torch.no_grad(): model.model.sem_seg_head.predictor.lang_encoder.get_text_embeddings(BIOMED_CLASSES + ["background"], is_eval=True) ``` ### Segmentation On Example Images ```sh # RGB image input of shape (H, W, 3). Currently only batch size 1 is supported. image = Image.open('examples/Part_1_516_pathology_breast.png', formats=['png']) image = image.convert('RGB') # text prompts querying objects in the image. Multiple ones can be provided. prompts = ['neoplastic cells', 'inflammatory cells'] # load ground truth mask gt_masks = [] for prompt in prompts: gt_mask = Image.open(f"examples/Part_1_516_pathology_breast_{prompt.replace(' ', '+')}.png", formats=['png']) gt_mask = 1*(np.array(gt_mask.convert('RGB'))[:,:,0] > 0) gt_masks.append(gt_mask) pred_mask = interactive_infer_image(model, image, prompts) # prediction with ground truth mask for i, pred in enumerate(pred_mask): gt = gt_masks[i] dice = (1*(pred>0.5) & gt).sum() * 2.0 / (1*(pred>0.5).sum() + gt.sum()) print(f'Dice score for {prompts[i]}: {dice:.4f}') check_mask_stats(image, pred_mask[i]*255, 'X-Ray-Chest', text_prompt[i]) print(f'p-value for {prompts[i]}: {p_value:.4f}') ``` ### Usage and License Notices The model described in this repository is provided for research and development use only. The model is not intended for use in clinical decision-making or for any other clinical use, and the performance of the model for clinical use has not been established. You bear sole responsibility for any use of this model, including incorporation into any product intended for clinical use. ### Citation Please cite our paper if you use the code, model, or data. Zhao, T., Gu, Y., Yang, J. et al. A foundation model for joint segmentation, detection and recognition of biomedical objects across nine modalities. Nat Methods (2024). https://doi.org/10.1038/s41592-024-02499-w ``` @article{zhao2024biomedparse, title = {A foundation model for joint segmentation, detection, and recognition of biomedical objects across nine modalities}, author = {Zhao, Theodore and Gu, Yu and Yang, Jianwei and Usuyama, Naoto and Lee, Ho Hin and Kiblawi, Sid and Naumann, Tristan and Gao, Jianfeng and Crabtree, Angela and Abel, Jacob and Moung-Wen, Christine and Piening, Brian and Bifulco, Carlo and Wei, Mu and Poon, Hoifung and Wang, Sheng}, journal = {Nature Methods}, year = {2024}, publisher = {Nature Publishing Group UK London}, url = {https://www.nature.com/articles/s41592-024-02499-w}, doi = {10.1038/s41592-024-02499-w} } ``` ### Model Architecture BiomedParse is built upon a transformer-based architecture, optimized for processing large biomedical corpora. Leveraging multi-head attention mechanisms, it excels at identifying and understanding biomedical terminology, as well as extracting contextually relevant information from dense scientific texts. The model is pre-trained on vast biomedical datasets, allowing it to generalize across various biomedical domains with high accuracy. ### Evaluation Results Please see the paper for detailed information about methods and results. https://microsoft.github.io/BiomedParse/assets/BiomedParse_arxiv.pdf ### Fairness evaluation We conducted fairness evaluation for different sex and age groups. Two-sided independent t-test shows non-significant differences between female and male and between different age groups, with p-value > 5% for all imaging modalities and segmentation targets evaluated. ### Ethical Considerations and Limitations Microsoft believes Responsible AI is a shared responsibility and we have identified six principles and practices to help organizations address risks, innovate, and create value: fairness, reliability and safety, privacy and security, inclusiveness, transparency, and accountability. When downloaded or used in accordance with our terms of service, developers should work with their supporting model team to ensure this model meets requirements for the relevant use case and addresses unforeseen product misuse.  While testing the model with images and/or text, ensure that the data is PHI free and that there are no patient information or information that can be tracked to a patient identity. The model is not designed for the following use cases: - Use by clinicians to inform clinical decision-making, as a diagnostic tool or as a medical device - Although MedImageParse is highly accurate in parsing biomedical data, it is not desgined or intended to be deployed in clinical settings as-is not is it for use in the diagnosis, cure, mitigation, treatment, or prevention of disease or other conditions (including to support clinical decision-making), or as a substitute of professional medical advice, diagnosis, treatment, or clinical judgment of a healthcare professional.  - Scenarios without consent for data - Any scenario that uses health data for a purpose for which consent was not obtained.   - Use outside of health scenarios - Any scenario that uses non-medical related image and/or serving purposes outside of the healthcare domain.  Please see Microsoft's Responsible AI Principles and approach available at https://www.microsoft.com/en-us/ai/principles-and-approach/ ### Data Specification for Deployment - The model expect 2D 8-bit RGB or grayscale images by default, with pixel values ranging from 0 to 255 and resolution 1024*1024. - The model outputs pixel probabilities in the same shape as the input image. We convert the floating point probabilities to 8-bit grayscale outputs. The probability threshold for segmentation mask is 0.5, which corresponds to 127.5 in 8-bit grayscale output. - The model takes in text prompts for segmentation and doesn't have a fixed number of targets to handle. However, to ensure quality performance, we recommend the following tasks based on evaluation results. However, as we only evaluated the model on the test split of BiomedParseData, there is no guarantee for the same performance on external datasets even for the same task, due to variation in device, preprocessing, resolution and other distribution shifts. For best performance, we recommend finetuning on your specific tasks. - CT: - abdomen: adrenal gland, aorta, bladder, duodenum, esophagus, gallbladder, kidney, kidney cyst, kidney tumor, left adrenal gland, left kidney, liver, pancreas, postcava, right adrenal gland, right kidney, spleen, stomach, tumor - colon: tumor - liver: liver, tumor - lung: COVID-19 infection, nodule - pelvis: uterus - MRI-FLAIR: brain: edema, lower-grade glioma, tumor, tumor core, whole tumor - MRI-T1-Gd: brain: enhancing tumor, tumor core - MRI-T2: prostate: prostate peripheral zone, prostate transitional zone, - MRI: - abdomen: aorta, esophagus, gallbladder, kidney, left kidney, liver, pancreas, postcava, right kidney, spleen, stomach - brain: anterior hippocampus, posterior hippocampus - heart: left heart atrium, left heart ventricle, myocardium, right heart ventricle - prostate: prostate - OCT: retinal: edema - X-Ray: chest: COVID-19 infection, left lung, lung, lung opacity, right lung, viral pneumonia - Dermoscopy: skin: lesion, melanoma - Endoscope: colon: neoplastic polyp, non-neoplastic polyp, polyp - Fundus: retinal: optic cup, optic disc, - Pathology: - bladder: neoplastic cells - breast: epithelial cells, neoplastic cells - cervix: neoplastic cells - colon: glandular structure, neoplastic cells - esophagus: neoplastic cells - kidney: neoplastic cells - liver: epithelial cells, neoplastic cells - ovarian: epithelial cells, neoplastic cells - prostate: neoplastic cells skin: neoplastic cells - stomach: neoplastic cells - testis: epithelial cells - thyroid: epithelial cells, neoplastic cells - uterus: neoplastic cells - Ultrasound: - breast: benign tumor, malignant tumor, tumor - heart: left heart atrium, left heart ventricle - transperineal: fetal head, public symphysis
[ "BEAR" ]
LongSafari/evo-1-8k-crispr
LongSafari
text-generation
[ "transformers", "safetensors", "stripedhyena", "text-generation", "long context", "deep signal processing", "hybrid", "biology", "genomics", "custom_code", "arxiv:2302.10866", "arxiv:2203.14343", "arxiv:2310.18780", "arxiv:2206.11893", "arxiv:2303.06349", "arxiv:2102.02611", "arxiv:2210.09298", "license:apache-2.0", "autotrain_compatible", "region:us" ]
"2024-06-20T04:13:38Z"
2024-06-20T06:12:51+00:00
176,498
2
--- license: apache-2.0 tags: - stripedhyena - long context - deep signal processing - hybrid - biology - genomics --- ## Evo-1 (CRISPR-Cas) <p align="center"> <img src="https://cdn-uploads.huggingface.co/production/uploads/62a1306bbe7fa896d2c8de44/JoEHcvLTUlHoMcgh3mmAz.png" width="70%" /> </p> ### News We identified and fixed an issue related to a wrong permutation of some projections, which affects generation quality. To use the new model revision, please load as follows: ```python config = AutoConfig.from_pretrained(model_name, trust_remote_code=True, revision="1.1_fix") model = AutoModelForCausalLM.from_pretrained( model_name, config=config, trust_remote_code=True, revision="1.1_fix" ) ``` ### About Evo is a biological foundation model capable of long-context modeling and design. Evo uses the [StripedHyena architecture](https://github.com/togethercomputer/stripedhyena) to enable modeling of sequences at a single-nucleotide, byte-level resolution with near-linear scaling of compute and memory relative to context length. Evo has 7 billion parameters and is trained on OpenGenome, a prokaryotic whole-genome dataset containing ~300 billion tokens. Technical details about Evo can be found in our preprint and our accompanying blog posts. Evo was collaboratively developed by the [Arc Institute](https://arcinstitute.org/) and TogetherAI. As part of our commitment to open science, we release **weights of 15 intermediate pretraining checkpoints** for phase 1 and phase 2 of pretraining. The checkpoints are available as branches of the corresponding HuggingFace repository. **Evo-1 (CRISPR-Cas)** is our fine-tuned model used to generate CRISPR-Cas systems, trained at a context length of 8k. | Checkpoint Name | Description | |----------------------------------------|-------------| | `evo-1-8k-base` | A model pretrained with 8,192 context. We use this model as the base model for molecular-scale finetuning tasks. | | `evo-1-131k-base` | A model pretrained with 131,072 context using `evo-1-8k-base` as the initialization. We use this model to reason about and generate sequences at the genome scale. | | `evo-1-8k-crispr` | A model fine-tuned on `evo-1-8k-base` specifically on CRISPR-Cas systems. We use this model to generate Cas9/12/13 systems. | | `evo-1-8k-transposon` | A model fine-tuned on `evo-1-8k-base` specifically on transposons. We use this to generate IS200/IS605. | ### Model Architecture StripedHyena is a deep signal processing, hybrid architecture composed of multi-head attention and gated convolutions arranged in [Hyena](https://arxiv.org/abs/2302.10866) blocks, improving over decoder-only Transformers. StripedHyena is designed to leverage the specialization of each of its layer classes, with Hyena layers implementing the bulk of the computation required for sequence processing and attention layers supplementing the ability to perform targeted pattern recall. Some highlights of the architecture: - **Efficient autoregressive generation** via a recurrent mode (>500k generation with a single 80GB GPU) - **Significantly faster training and finetuning** at long context (>3x at 131k) - **Improved scaling laws over state-of-the-art architectures** (e.g., Transformer++) on both natural language and biological sequences. - **Robust to training beyond the compute-optimal frontier** e.g., training way beyond Chinchilla-optimal token amounts (see preprint for details -- more details to come) ### How to use Evo Example usage is provided in the [standalone repo](https://github.com/evo-design/evo). #### Parametrization for Inference and Finetuning One of the advantages of deep signal processing models is their flexibility. Different parametrizations of convolutions can be used depending on the memory, expressivity and causality requirements of pretraining, finetuning or inference workloads. The main classes are: - Modal canonical: unconstrained poles ([reference](https://arxiv.org/pdf/2203.14343.pdf), [reference](https://arxiv.org/abs/2310.18780)), or constrained poles ([reference](https://arxiv.org/abs/2206.11893), [reference](https://arxiv.org/pdf/2303.06349.pdf)). - Companion canonical / rational: TBA. - Hypernetworks: hypernetwork ([reference](https://arxiv.org/abs/2102.02611)), modulated hypernetwork ([reference](https://arxiv.org/abs/2302.10866)). - Explicit: modulated explicit ([reference](https://arxiv.org/pdf/2210.09298.pdf)). StripedHyena is a mixed precision model. Make sure to keep your `poles` and `residues` in `float32` precision, especially for longer prompts or training. ### Disclaimer To use StripedHyena outside of the playground, you will need to install custom kernels. Please follow the instructions from the [standalone repository](https://github.com/togethercomputer/stripedhyena). ## Cite ``` @article{nguyen2024sequence, author = {Eric Nguyen and Michael Poli and Matthew G. Durrant and Armin W. Thomas and Brian Kang and Jeremy Sullivan and Madelena Y. Ng and Ashley Lewis and Aman Patel and Aaron Lou and Stefano Ermon and Stephen A. Baccus and Tina Hernandez-Boussard and Christopher Ré and Patrick D. Hsu and Brian L. Hie}, journal = {Arc Institute manuscripts}, title = {Sequence modeling and design from molecular to genome scale with Evo}, url = {https://arcinstitute.org/manuscripts/Evo}, year = {2024}, } ```
[ "CAS" ]
BAAI/bge-multilingual-gemma2
BAAI
feature-extraction
[ "sentence-transformers", "safetensors", "gemma2", "feature-extraction", "sentence-similarity", "transformers", "mteb", "arxiv:2402.03216", "arxiv:2309.07597", "license:gemma", "model-index", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2024-07-25T16:55:46Z"
2024-07-31T08:07:09+00:00
167,139
174
--- license: gemma tags: - feature-extraction - sentence-similarity - sentence-transformers - transformers - mteb model-index: - name: bge-multilingual-gemma2 results: - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 38.11433513284057 - type: ndcg_at_1 value: 48.45201238390093 - type: ndcg_at_3 value: 44.451438575534574 - type: ndcg_at_5 value: 41.13929990797894 - type: ndcg_at_10 value: 38.11433513284057 - type: ndcg_at_100 value: 35.36065387898559 - type: ndcg_at_1000 value: 44.01125752781003 - type: map_at_1 value: 5.638004398054564 - type: map_at_3 value: 10.375632572339333 - type: map_at_5 value: 11.820531148202422 - type: map_at_10 value: 14.087436978063389 - type: map_at_100 value: 18.25397463114958 - type: map_at_1000 value: 19.868440221606203 - type: precision_at_1 value: 49.84520123839009 - type: precision_at_3 value: 41.89886480908153 - type: precision_at_5 value: 35.356037151702814 - type: precision_at_10 value: 28.513931888544857 - type: precision_at_100 value: 9.337461300309604 - type: precision_at_1000 value: 2.210216718266251 - type: recall_at_1 value: 5.638004398054564 - type: recall_at_3 value: 11.938154656310312 - type: recall_at_5 value: 14.06183119422843 - type: recall_at_10 value: 18.506397834147705 - type: recall_at_100 value: 35.96995569451433 - type: recall_at_1000 value: 68.31771509404795 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 45.70688915742828 - type: ndcg_at_1 value: 26.002865329512893 - type: ndcg_at_3 value: 37.49665652114275 - type: ndcg_at_5 value: 41.684045067615834 - type: ndcg_at_10 value: 45.70688915742828 - type: ndcg_at_100 value: 51.08932609519671 - type: ndcg_at_1000 value: 51.98806137292924 - type: map_at_1 value: 25.35219675262655 - type: map_at_3 value: 34.39549506526583 - type: map_at_5 value: 36.74936326010824 - type: map_at_10 value: 38.44429852488596 - type: map_at_100 value: 39.60260286311527 - type: map_at_1000 value: 39.64076154054021 - type: precision_at_1 value: 26.002865329512893 - type: precision_at_3 value: 15.840496657115954 - type: precision_at_5 value: 11.647564469914684 - type: precision_at_10 value: 7.1275071633243705 - type: precision_at_100 value: 0.9782234957019871 - type: precision_at_1000 value: 0.10565902578797497 - type: recall_at_1 value: 25.35219675262655 - type: recall_at_3 value: 45.78438395415474 - type: recall_at_5 value: 55.83213944603631 - type: recall_at_10 value: 68.08500477554918 - type: recall_at_100 value: 92.55133715377269 - type: recall_at_1000 value: 99.29083094555875 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 60.04205769404706 - type: ndcg_at_1 value: 59.25925925925925 - type: ndcg_at_3 value: 55.96637679199298 - type: ndcg_at_5 value: 56.937223390223956 - type: ndcg_at_10 value: 60.04205769404706 - type: ndcg_at_100 value: 66.01619664462949 - type: ndcg_at_1000 value: 67.59651529720728 - type: map_at_1 value: 31.5081163692275 - type: map_at_3 value: 45.7486689836227 - type: map_at_5 value: 48.944906602314 - type: map_at_10 value: 51.85427043799874 - type: map_at_100 value: 53.92920237379484 - type: map_at_1000 value: 54.04694438963671 - type: precision_at_1 value: 59.25925925925925 - type: precision_at_3 value: 37.44855967078195 - type: precision_at_5 value: 26.913580246913547 - type: precision_at_10 value: 16.52777777777774 - type: precision_at_100 value: 2.2962962962962754 - type: precision_at_1000 value: 0.2566358024691334 - type: recall_at_1 value: 31.5081163692275 - type: recall_at_3 value: 50.71759045138676 - type: recall_at_5 value: 57.49321152098932 - type: recall_at_10 value: 67.36356750245642 - type: recall_at_100 value: 88.67335767798735 - type: recall_at_1000 value: 97.83069725199356 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 26.93150756480961 - type: ndcg_at_1 value: 30.8 - type: ndcg_at_3 value: 25.048085553386628 - type: ndcg_at_5 value: 22.351207380852305 - type: ndcg_at_10 value: 26.93150756480961 - type: ndcg_at_100 value: 37.965486832874014 - type: ndcg_at_1000 value: 43.346046425140244 - type: map_at_1 value: 6.238333333333366 - type: map_at_3 value: 11.479166666666679 - type: map_at_5 value: 14.215999999999983 - type: map_at_10 value: 16.774632936507945 - type: map_at_100 value: 20.148869158557293 - type: map_at_1000 value: 20.528644104490823 - type: precision_at_1 value: 30.8 - type: precision_at_3 value: 23.466666666666736 - type: precision_at_5 value: 19.899999999999967 - type: precision_at_10 value: 14.069999999999938 - type: precision_at_100 value: 2.9770000000000065 - type: precision_at_1000 value: 0.42569999999999486 - type: recall_at_1 value: 6.238333333333366 - type: recall_at_3 value: 14.29333333333338 - type: recall_at_5 value: 20.206666666666628 - type: recall_at_10 value: 28.573333333333224 - type: recall_at_100 value: 60.43666666666675 - type: recall_at_1000 value: 86.3649999999997 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 90.38165339181239 - type: ndcg_at_1 value: 84.86348634863486 - type: ndcg_at_3 value: 88.98667069230609 - type: ndcg_at_5 value: 89.86028996734895 - type: ndcg_at_10 value: 90.38165339181239 - type: ndcg_at_100 value: 90.99655378684439 - type: ndcg_at_1000 value: 91.15536362599602 - type: map_at_1 value: 78.8556296105801 - type: map_at_3 value: 86.24061810942983 - type: map_at_5 value: 86.94776680048933 - type: map_at_10 value: 87.26956235873007 - type: map_at_100 value: 87.47986397174834 - type: map_at_1000 value: 87.4897076664281 - type: precision_at_1 value: 84.86348634863486 - type: precision_at_3 value: 34.02340234023296 - type: precision_at_5 value: 21.10411041104359 - type: precision_at_10 value: 10.828082808282083 - type: precision_at_100 value: 1.1381638163816703 - type: precision_at_1000 value: 0.11662166216622569 - type: recall_at_1 value: 78.8556296105801 - type: recall_at_3 value: 92.34465708475605 - type: recall_at_5 value: 94.58010682020583 - type: recall_at_10 value: 96.10713452297611 - type: recall_at_100 value: 98.31672452959585 - type: recall_at_1000 value: 99.25967001462051 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 77.36555747844541 - type: ndcg_at_1 value: 57.681365576102415 - type: ndcg_at_3 value: 72.01664798084765 - type: ndcg_at_5 value: 75.26345973082836 - type: ndcg_at_10 value: 77.36555747844541 - type: ndcg_at_100 value: 78.15567833673768 - type: ndcg_at_1000 value: 78.16528851292641 - type: map_at_1 value: 57.681365576102415 - type: map_at_3 value: 68.59886201991475 - type: map_at_5 value: 70.38051209103858 - type: map_at_10 value: 71.26684955632336 - type: map_at_100 value: 71.4637216600468 - type: map_at_1000 value: 71.46414501573332 - type: precision_at_1 value: 57.681365576102415 - type: precision_at_3 value: 27.287814129919084 - type: precision_at_5 value: 17.965860597439132 - type: precision_at_10 value: 9.623044096728066 - type: precision_at_100 value: 0.995732574679925 - type: precision_at_1000 value: 0.09964438122332549 - type: recall_at_1 value: 57.681365576102415 - type: recall_at_3 value: 81.86344238975818 - type: recall_at_5 value: 89.82930298719772 - type: recall_at_10 value: 96.23044096728307 - type: recall_at_100 value: 99.57325746799431 - type: recall_at_1000 value: 99.6443812233286 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 72.0465439956427 - type: ndcg_at_1 value: 58.666666666666664 - type: ndcg_at_3 value: 66.84566274610046 - type: ndcg_at_5 value: 69.46578881873717 - type: ndcg_at_10 value: 72.0465439956427 - type: ndcg_at_100 value: 74.25705461923272 - type: ndcg_at_1000 value: 74.63689058493014 - type: map_at_1 value: 55.59444444444445 - type: map_at_3 value: 63.71851851851852 - type: map_at_5 value: 65.5362962962963 - type: map_at_10 value: 66.84112433862435 - type: map_at_100 value: 67.36269426417417 - type: map_at_1000 value: 67.37568665562833 - type: precision_at_1 value: 58.666666666666664 - type: precision_at_3 value: 26.444444444444425 - type: precision_at_5 value: 17.66666666666672 - type: precision_at_10 value: 9.866666666666706 - type: precision_at_100 value: 1.0966666666666596 - type: precision_at_1000 value: 0.11266666666666675 - type: recall_at_1 value: 55.59444444444445 - type: recall_at_3 value: 72.72777777777777 - type: recall_at_5 value: 79.31666666666666 - type: recall_at_10 value: 86.75 - type: recall_at_100 value: 96.66666666666667 - type: recall_at_1000 value: 99.66666666666667 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 64.26928884606035 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_3 value: 64.18432764386345 - type: ndcg_at_5 value: 64.73235515799435 - type: ndcg_at_10 value: 64.26928884606035 - type: ndcg_at_100 value: 52.39807133285409 - type: ndcg_at_1000 value: 52.19937563361241 - type: map_at_1 value: 0.18483494997310454 - type: map_at_3 value: 0.5139705769331114 - type: map_at_5 value: 0.8245601222717243 - type: map_at_10 value: 1.5832530269558573 - type: map_at_100 value: 9.664760850102393 - type: map_at_1000 value: 25.568347406468334 - type: precision_at_1 value: 70.0 - type: precision_at_3 value: 71.33333333333333 - type: precision_at_5 value: 71.60000000000001 - type: precision_at_10 value: 70.99999999999996 - type: precision_at_100 value: 55.140000000000015 - type: precision_at_1000 value: 23.857999999999997 - type: recall_at_1 value: 0.18483494997310454 - type: recall_at_3 value: 0.5584287301859913 - type: recall_at_5 value: 0.9489025953807098 - type: recall_at_10 value: 1.9023711039425688 - type: recall_at_100 value: 13.596810701594226 - type: recall_at_1000 value: 50.92058432920189 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 39.37204193531481 - type: ndcg_at_1 value: 35.11400651465798 - type: ndcg_at_3 value: 32.36672790229743 - type: ndcg_at_5 value: 34.79369234162357 - type: ndcg_at_10 value: 39.37204193531481 - type: ndcg_at_100 value: 47.544500439419124 - type: ndcg_at_1000 value: 50.305733346049855 - type: map_at_1 value: 15.516829533116216 - type: map_at_3 value: 23.73669923995656 - type: map_at_5 value: 26.43208469055373 - type: map_at_10 value: 28.912036175309773 - type: map_at_100 value: 31.413762299240894 - type: map_at_1000 value: 31.596796093997014 - type: precision_at_1 value: 35.11400651465798 - type: precision_at_3 value: 24.994571118349487 - type: precision_at_5 value: 19.231270358305956 - type: precision_at_10 value: 12.690553745928165 - type: precision_at_100 value: 2.1576547231270466 - type: precision_at_1000 value: 0.2676221498371306 - type: recall_at_1 value: 15.516829533116216 - type: recall_at_3 value: 29.994571118349512 - type: recall_at_5 value: 37.14223669923993 - type: recall_at_10 value: 47.29207383279043 - type: recall_at_100 value: 74.37133550488598 - type: recall_at_1000 value: 89.41585233441913 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 83.26282954330777 - type: ndcg_at_1 value: 87.5489534098582 - type: ndcg_at_3 value: 78.7646435855166 - type: ndcg_at_5 value: 81.41629077444277 - type: ndcg_at_10 value: 83.26282954330777 - type: ndcg_at_100 value: 85.2771369900158 - type: ndcg_at_1000 value: 85.77519303747493 - type: map_at_1 value: 43.7744767049291 - type: map_at_3 value: 73.4661264911093 - type: map_at_5 value: 75.7169705154168 - type: map_at_10 value: 76.89183627536043 - type: map_at_100 value: 77.53680315727078 - type: map_at_1000 value: 77.5649311522075 - type: precision_at_1 value: 87.5489534098582 - type: precision_at_3 value: 51.74881836596788 - type: precision_at_5 value: 33.13977042539127 - type: precision_at_10 value: 17.492234976369023 - type: precision_at_100 value: 1.9030384875084312 - type: precision_at_1000 value: 0.19679945982446267 - type: recall_at_1 value: 43.7744767049291 - type: recall_at_3 value: 77.62322754895341 - type: recall_at_5 value: 82.84942606347063 - type: recall_at_10 value: 87.4611748818366 - type: recall_at_100 value: 95.15192437542201 - type: recall_at_1000 value: 98.39972991222147 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 71.44670934705796 - type: ndcg_at_1 value: 54.026651216685984 - type: ndcg_at_3 value: 65.1267452491225 - type: ndcg_at_5 value: 68.6696802020747 - type: ndcg_at_10 value: 71.44670934705796 - type: ndcg_at_100 value: 73.74642927386503 - type: ndcg_at_1000 value: 73.90908268307331 - type: map_at_1 value: 48.50086906141366 - type: map_at_3 value: 61.07691193510995 - type: map_at_5 value: 63.36580243337187 - type: map_at_10 value: 64.74485498782997 - type: map_at_100 value: 65.34329174534082 - type: map_at_1000 value: 65.35107870745652 - type: precision_at_1 value: 54.026651216685984 - type: precision_at_3 value: 28.437620702974996 - type: precision_at_5 value: 19.20625724217861 - type: precision_at_10 value: 10.67207415990753 - type: precision_at_100 value: 1.1987253765932955 - type: precision_at_1000 value: 0.12143684820393259 - type: recall_at_1 value: 48.50086906141366 - type: recall_at_3 value: 73.19428350714561 - type: recall_at_5 value: 81.19689069138664 - type: recall_at_10 value: 89.04741212823485 - type: recall_at_100 value: 98.58053302433372 - type: recall_at_1000 value: 99.75376593279258 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 90.03760323006117 - type: ndcg_at_1 value: 83.53 - type: ndcg_at_3 value: 87.53800795646302 - type: ndcg_at_5 value: 88.92909168525203 - type: ndcg_at_10 value: 90.03760323006117 - type: ndcg_at_100 value: 91.08558507332712 - type: ndcg_at_1000 value: 91.1430039358834 - type: map_at_1 value: 72.61760432018744 - type: map_at_3 value: 83.8457060028347 - type: map_at_5 value: 85.6228412692169 - type: map_at_10 value: 86.67700531365115 - type: map_at_100 value: 87.29851728827602 - type: map_at_1000 value: 87.31014621733333 - type: precision_at_1 value: 83.53 - type: precision_at_3 value: 38.33666666667159 - type: precision_at_5 value: 25.12599999999881 - type: precision_at_10 value: 13.629999999998683 - type: precision_at_100 value: 1.5431999999999773 - type: precision_at_1000 value: 0.15671999999997974 - type: recall_at_1 value: 72.61760432018744 - type: recall_at_3 value: 89.06736052932686 - type: recall_at_5 value: 93.09634203522849 - type: recall_at_10 value: 96.35128012894234 - type: recall_at_100 value: 99.7740237858541 - type: recall_at_1000 value: 99.99690476190477 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/webis-touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 30.2563523019649 - type: ndcg_at_1 value: 37.755102040816325 - type: ndcg_at_3 value: 34.45349994459905 - type: ndcg_at_5 value: 32.508805919063086 - type: ndcg_at_10 value: 30.2563523019649 - type: ndcg_at_100 value: 40.538336664503746 - type: ndcg_at_1000 value: 52.2066951614923 - type: map_at_1 value: 2.75537988273998 - type: map_at_3 value: 6.011397290504469 - type: map_at_5 value: 8.666495836494098 - type: map_at_10 value: 12.17701515007822 - type: map_at_100 value: 18.789086471205852 - type: map_at_1000 value: 20.42972375502502 - type: precision_at_1 value: 40.816326530612244 - type: precision_at_3 value: 35.37414965986394 - type: precision_at_5 value: 32.244897959183675 - type: precision_at_10 value: 26.93877551020408 - type: precision_at_100 value: 8.163265306122451 - type: precision_at_1000 value: 1.5979591836734703 - type: recall_at_1 value: 2.75537988273998 - type: recall_at_3 value: 7.254270324385098 - type: recall_at_5 value: 11.580137100328589 - type: recall_at_10 value: 18.745232816450553 - type: recall_at_100 value: 50.196809658622755 - type: recall_at_1000 value: 85.87317364148332 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 51.36940792375597 - type: ndcg_at_1 value: 65.125 - type: ndcg_at_3 value: 55.3967569049025 - type: ndcg_at_5 value: 53.09668587926677 - type: ndcg_at_10 value: 51.36940792375597 - type: ndcg_at_100 value: 56.69623269243084 - type: ndcg_at_1000 value: 63.481061270842 - type: map_at_1 value: 10.265595545755545 - type: map_at_3 value: 16.776544233350698 - type: map_at_5 value: 20.184523605272798 - type: map_at_10 value: 24.772797659849264 - type: map_at_100 value: 36.72689012514183 - type: map_at_1000 value: 38.73869985105569 - type: precision_at_1 value: 77.5 - type: precision_at_3 value: 59.75000000000003 - type: precision_at_5 value: 52.849999999999994 - type: precision_at_10 value: 42.47499999999995 - type: precision_at_100 value: 13.614999999999993 - type: precision_at_1000 value: 2.500749999999998 - type: recall_at_1 value: 10.265595545755545 - type: recall_at_3 value: 17.819804963534246 - type: recall_at_5 value: 22.46124219601634 - type: recall_at_10 value: 30.44583516613163 - type: recall_at_100 value: 63.84118006287797 - type: recall_at_1000 value: 85.06450356093833 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 47.93921415959017 - type: ndcg_at_1 value: 36.526219490536015 - type: ndcg_at_3 value: 42.35099043224295 - type: ndcg_at_5 value: 44.989685312964156 - type: ndcg_at_10 value: 47.93921415959017 - type: ndcg_at_100 value: 53.05390282389675 - type: ndcg_at_1000 value: 54.776052731794266 - type: map_at_1 value: 30.818605279548184 - type: map_at_3 value: 38.363350019087974 - type: map_at_5 value: 40.295203936887226 - type: map_at_10 value: 41.81978941662592 - type: map_at_100 value: 43.13300727554278 - type: map_at_1000 value: 43.2351061120207 - type: precision_at_1 value: 36.526219490536015 - type: precision_at_3 value: 19.550515857206346 - type: precision_at_5 value: 13.958783060831967 - type: precision_at_10 value: 8.498592395773393 - type: precision_at_100 value: 1.3024888941713948 - type: precision_at_1000 value: 0.1630253057414617 - type: recall_at_1 value: 30.818605279548184 - type: recall_at_3 value: 45.9132085981904 - type: recall_at_5 value: 52.6851323959227 - type: recall_at_10 value: 61.39718618970463 - type: recall_at_100 value: 83.30757187969981 - type: recall_at_1000 value: 94.9192024147964 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 89.47761194029852 - type: accuracy_stderr value: 1.6502495811564162 - type: ap value: 62.20813715457866 - type: ap_stderr value: 3.7902166647587854 - type: f1 value: 84.91493292274734 - type: f1_stderr value: 1.9572239640276208 - type: main_score value: 89.47761194029852 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.89569999999999 - type: accuracy_stderr value: 0.6886368582206464 - type: ap value: 95.38531339207739 - type: ap_stderr value: 0.9009257949898158 - type: f1 value: 96.8941935264779 - type: f1_stderr value: 0.6908609132985931 - type: main_score value: 96.89569999999999 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 61.602000000000004 - type: accuracy_stderr value: 1.4532019818318436 - type: f1 value: 60.96100449021481 - type: f1_stderr value: 1.8031398419765765 - type: main_score value: 61.602000000000004 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 54.906319409992 - type: v_measure value: 54.906319409992 - type: v_measure_std value: 14.382682652951683 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 50.27779516565727 - type: v_measure value: 50.27779516565727 - type: v_measure_std value: 14.463711418590636 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.59457317979604 - type: mrr value: 78.05214791364376 - type: main_score value: 64.59457317979604 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 86.5833945335644 - type: cosine_spearman value: 85.74472483606 - type: manhattan_pearson value: 85.07748703871708 - type: manhattan_spearman value: 85.1459160110718 - type: euclidean_pearson value: 85.14704290043478 - type: euclidean_spearman value: 85.10073425868336 - type: main_score value: 85.74472483606 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 92.53246753246755 - type: accuracy_stderr value: 0.5488837781559508 - type: f1 value: 92.5143182074032 - type: f1_stderr value: 0.5657577980223147 - type: main_score value: 92.53246753246755 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 52.64099497480452 - type: v_measure value: 52.64099497480452 - type: v_measure_std value: 1.081892399559334 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 49.1972734308178 - type: v_measure value: 49.1972734308178 - type: v_measure_std value: 0.9081245477708283 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 92.975 - type: accuracy_stderr value: 0.5287958017987677 - type: f1 value: 89.29755895896542 - type: f1_stderr value: 0.6485027046025079 - type: main_score value: 92.975 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.66480000000001 - type: accuracy_stderr value: 0.45673204398202666 - type: ap value: 95.33843919456118 - type: ap_stderr value: 0.6449846039754393 - type: f1 value: 96.6637668164617 - type: f1_stderr value: 0.45793673051468287 - type: main_score value: 96.66480000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 98.61149110807114 - type: accuracy_stderr value: 0.469748178253266 - type: f1 value: 98.4685511007568 - type: f1_stderr value: 0.51636776728259 - type: main_score value: 98.61149110807114 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 95.51299589603283 - type: accuracy_stderr value: 0.3591676911539482 - type: f1 value: 85.2464691439773 - type: f1_stderr value: 0.9234502856695337 - type: main_score value: 95.51299589603283 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 82.04774714189644 - type: accuracy_stderr value: 0.7288818520309376 - type: f1 value: 79.28060657840692 - type: f1_stderr value: 0.6872008571781982 - type: main_score value: 82.04774714189644 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 84.40147948890383 - type: accuracy_stderr value: 1.2939587629143627 - type: f1 value: 83.97779287582267 - type: f1_stderr value: 0.9970599222060901 - type: main_score value: 84.40147948890383 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 45.80879120838561 - type: v_measure value: 45.80879120838561 - type: v_measure_std value: 1.257800489264564 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 44.106849261042505 - type: v_measure value: 44.106849261042505 - type: v_measure_std value: 1.4347344477874981 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.794062752995345 - type: mrr value: 32.98581714772614 - type: main_score value: 31.794062752995345 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 56.03342473834434 - type: v_measure value: 56.03342473834434 - type: v_measure_std value: 5.972192613803461 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: main_score value: 65.83156688381274 - type: v_measure value: 65.83156688381274 - type: v_measure_std value: 14.180225112120162 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cosine_pearson value: 84.15759544348467 - type: cosine_spearman value: 82.66085892322664 - type: manhattan_pearson value: 82.27257241990692 - type: manhattan_spearman value: 82.57752467555896 - type: euclidean_pearson value: 82.20795646456065 - type: euclidean_spearman value: 82.51008729416401 - type: main_score value: 82.66085892322664 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 84.3406321391237 - type: cosine_spearman value: 77.71091257651071 - type: manhattan_pearson value: 81.25784268400994 - type: manhattan_spearman value: 77.98426383345507 - type: euclidean_pearson value: 81.25641851462917 - type: euclidean_spearman value: 77.93254971878063 - type: main_score value: 77.71091257651071 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 86.1528398894769 - type: cosine_spearman value: 87.44662352358895 - type: manhattan_pearson value: 86.92164570802663 - type: manhattan_spearman value: 86.9132692625668 - type: euclidean_pearson value: 87.00156426580821 - type: euclidean_spearman value: 86.98750068631274 - type: main_score value: 87.44662352358895 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 83.32782491176253 - type: cosine_spearman value: 83.48313793311584 - type: manhattan_pearson value: 82.60528063429948 - type: manhattan_spearman value: 83.10434862310481 - type: euclidean_pearson value: 82.68016090104034 - type: euclidean_spearman value: 83.14418662406631 - type: main_score value: 83.48313793311584 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 86.31535441436343 - type: cosine_spearman value: 87.63145141246594 - type: manhattan_pearson value: 86.95972711389149 - type: manhattan_spearman value: 86.9849824463052 - type: euclidean_pearson value: 86.95391575487379 - type: euclidean_spearman value: 86.97613682266213 - type: main_score value: 87.63145141246594 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 83.43854397443079 - type: cosine_spearman value: 86.70176531845136 - type: manhattan_pearson value: 85.82302317064868 - type: manhattan_spearman value: 86.36561734213241 - type: euclidean_pearson value: 85.80127366135169 - type: euclidean_spearman value: 86.34803859754834 - type: main_score value: 86.70176531845136 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cosine_pearson value: 90.38940955877999 - type: cosine_spearman value: 91.18282119920893 - type: manhattan_pearson value: 91.31823663739615 - type: manhattan_spearman value: 90.67257321731341 - type: euclidean_pearson value: 91.30318753138528 - type: euclidean_spearman value: 90.69044765693836 - type: main_score value: 91.18282119920893 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cosine_pearson value: 69.33936467780947 - type: cosine_spearman value: 69.02345807358802 - type: manhattan_pearson value: 70.11799452953082 - type: manhattan_spearman value: 68.55450923481405 - type: euclidean_pearson value: 70.10857680491809 - type: euclidean_spearman value: 68.44610245708984 - type: main_score value: 69.02345807358802 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 85.97288135509513 - type: cosine_spearman value: 87.25208310840168 - type: manhattan_pearson value: 86.3786471501451 - type: manhattan_spearman value: 86.71177136523868 - type: euclidean_pearson value: 86.40522339296625 - type: euclidean_spearman value: 86.73930576508816 - type: main_score value: 87.25208310840168 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.60324164489178 - type: mrr value: 96.30331904841708 - type: main_score value: 87.60324164489178 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.6920792079208 - type: cos_sim_accuracy_threshold value: 90.36337347155474 - type: cos_sim_ap value: 90.93952679056765 - type: cos_sim_f1 value: 83.10700706137968 - type: cos_sim_f1_threshold value: 90.36337347155474 - type: cos_sim_precision value: 90.96313912009512 - type: cos_sim_recall value: 76.5 - type: dot_accuracy value: 99.54554455445545 - type: dot_accuracy_threshold value: 2876800.0 - type: dot_ap value: 84.01112287735286 - type: dot_f1 value: 75.7622739018088 - type: dot_f1_threshold value: 2820800.0 - type: dot_precision value: 78.39572192513369 - type: dot_recall value: 73.3 - type: euclidean_accuracy value: 99.6930693069307 - type: euclidean_accuracy_threshold value: 7718.054017089397 - type: euclidean_ap value: 91.1257568881301 - type: euclidean_f1 value: 83.09022150189087 - type: euclidean_f1_threshold value: 7817.08324628535 - type: euclidean_precision value: 90.36427732079906 - type: euclidean_recall value: 76.9 - type: manhattan_accuracy value: 99.6920792079208 - type: manhattan_accuracy_threshold value: 364735.19654273987 - type: manhattan_ap value: 91.2326885940691 - type: manhattan_f1 value: 83.36008560727663 - type: manhattan_f1_threshold value: 375395.8945572376 - type: manhattan_precision value: 89.64326812428078 - type: manhattan_recall value: 77.9 - type: max_accuracy value: 99.6930693069307 - type: max_ap value: 91.2326885940691 - type: max_f1 value: 83.36008560727663 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 66.2095300942637 - type: v_measure value: 66.2095300942637 - type: v_measure_std value: 3.214369679617631 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 45.74307000935057 - type: v_measure value: 45.74307000935057 - type: v_measure_std value: 1.5352466748569888 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.90337951829123 - type: mrr value: 56.12889663441134 - type: main_score value: 54.90337951829123 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 31.0669308484832 - type: cosine_spearman value: 31.19637421540861 - type: dot_pearson value: 30.62326176666765 - type: dot_spearman value: 30.42135737502967 - type: main_score value: 31.19637421540861 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 87.34339999999999 - type: accuracy_stderr value: 1.838245696309393 - type: ap value: 33.536584790435406 - type: ap_stderr value: 2.276373512492581 - type: f1 value: 72.47307082324448 - type: f1_stderr value: 1.9964640292072542 - type: main_score value: 87.34339999999999 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 78.86247877758915 - type: accuracy_stderr value: 1.1273253738982443 - type: f1 value: 79.14666244848874 - type: f1_stderr value: 1.1532640958036497 - type: main_score value: 78.86247877758915 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 70.44270836680788 - type: v_measure value: 70.44270836680788 - type: v_measure_std value: 1.5185423698266132 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.74512725755498 - type: cos_sim_accuracy_threshold value: 82.34941560483547 - type: cos_sim_ap value: 79.6389274210382 - type: cos_sim_f1 value: 71.76319176319176 - type: cos_sim_f1_threshold value: 80.1523829249257 - type: cos_sim_precision value: 70.0502512562814 - type: cos_sim_recall value: 73.56200527704485 - type: dot_accuracy value: 85.13441020444657 - type: dot_accuracy_threshold value: 2220800.0 - type: dot_ap value: 71.67080150823449 - type: dot_f1 value: 66.18984119287187 - type: dot_f1_threshold value: 2086400.0 - type: dot_precision value: 61.224489795918366 - type: dot_recall value: 72.0316622691293 - type: euclidean_accuracy value: 87.69148238660071 - type: euclidean_accuracy_threshold value: 9221.50036619459 - type: euclidean_ap value: 79.65326151280289 - type: euclidean_f1 value: 71.7903489983621 - type: euclidean_f1_threshold value: 10313.528386219872 - type: euclidean_precision value: 68.70026525198939 - type: euclidean_recall value: 75.17150395778364 - type: manhattan_accuracy value: 87.74512725755498 - type: manhattan_accuracy_threshold value: 444289.1119837761 - type: manhattan_ap value: 79.67744645365104 - type: manhattan_f1 value: 71.94423699278066 - type: manhattan_f1_threshold value: 491676.24004781246 - type: manhattan_precision value: 68.0961357210179 - type: manhattan_recall value: 76.2532981530343 - type: max_accuracy value: 87.74512725755498 - type: max_ap value: 79.67744645365104 - type: max_f1 value: 71.94423699278066 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.5544688943222 - type: cos_sim_accuracy_threshold value: 81.58909533293946 - type: cos_sim_ap value: 86.95174990178396 - type: cos_sim_f1 value: 79.1543756145526 - type: cos_sim_f1_threshold value: 80.08573448087095 - type: cos_sim_precision value: 77.78355879292404 - type: cos_sim_recall value: 80.5743763473976 - type: dot_accuracy value: 88.60752124810804 - type: dot_accuracy_threshold value: 2136000.0 - type: dot_ap value: 84.26724775947629 - type: dot_f1 value: 77.67666146985243 - type: dot_f1_threshold value: 2064000.0 - type: dot_precision value: 73.40505721921468 - type: dot_recall value: 82.47613181398214 - type: euclidean_accuracy value: 89.5370046959289 - type: euclidean_accuracy_threshold value: 9750.113991666478 - type: euclidean_ap value: 86.99393092403776 - type: euclidean_f1 value: 79.07167337207571 - type: euclidean_f1_threshold value: 10338.095928500366 - type: euclidean_precision value: 76.59497690531177 - type: euclidean_recall value: 81.71388974437943 - type: manhattan_accuracy value: 89.57581402569178 - type: manhattan_accuracy_threshold value: 463812.92815208435 - type: manhattan_ap value: 87.00849868076658 - type: manhattan_f1 value: 79.08583576933297 - type: manhattan_f1_threshold value: 482453.35128605366 - type: manhattan_precision value: 78.00494270950348 - type: manhattan_recall value: 80.19710502001848 - type: max_accuracy value: 89.57581402569178 - type: max_ap value: 87.00849868076658 - type: max_f1 value: 79.1543756145526 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cosine_pearson value: 45.108559635369325 - type: cosine_spearman value: 47.172833128216176 - type: manhattan_pearson value: 45.75443077564791 - type: manhattan_spearman value: 47.13974146235398 - type: euclidean_pearson value: 45.78921257223492 - type: euclidean_spearman value: 47.177095238278625 - type: main_score value: 47.172833128216176 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cosine_pearson value: 48.304409578388466 - type: cosine_spearman value: 50.75006977697012 - type: manhattan_pearson value: 52.688818756177035 - type: manhattan_spearman value: 50.739214155741095 - type: euclidean_pearson value: 52.71788557204978 - type: euclidean_spearman value: 50.77895730336448 - type: main_score value: 50.75006977697012 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 54.339999999999996 - type: accuracy_stderr value: 1.6518837731511269 - type: f1 value: 53.37316538790502 - type: f1_stderr value: 1.6112926272861336 - type: main_score value: 54.339999999999996 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cosine_pearson value: 59.62831218167518 - type: cosine_spearman value: 62.02213472473759 - type: manhattan_pearson value: 61.122261197018176 - type: manhattan_spearman value: 62.208780520694454 - type: euclidean_pearson value: 61.17827629627213 - type: euclidean_spearman value: 62.266859648664244 - type: main_score value: 62.02213472473759 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: main_score value: 54.64518394835408 - type: v_measure value: 54.64518394835408 - type: v_measure_std value: 1.2745946640208072 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: main_score value: 63.68323477729556 - type: v_measure value: 63.68323477729556 - type: v_measure_std value: 1.740918833098302 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 84.61500884703916 - type: mrr value: 87.01424603174604 - type: main_score value: 84.61500884703916 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 85.60137988993483 - type: mrr value: 87.96857142857142 - type: main_score value: 85.60137988993483 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 24.191 - type: map_at_10 value: 35.819 - type: map_at_100 value: 37.639 - type: map_at_1000 value: 37.775 - type: map_at_3 value: 32.045 - type: map_at_5 value: 34.008 - type: mrr_at_1 value: 36.684 - type: mrr_at_10 value: 44.769 - type: mrr_at_100 value: 45.754 - type: mrr_at_1000 value: 45.809 - type: mrr_at_3 value: 42.465 - type: mrr_at_5 value: 43.696 - type: ndcg_at_1 value: 36.834 - type: ndcg_at_10 value: 42.208 - type: ndcg_at_100 value: 49.507 - type: ndcg_at_1000 value: 51.834 - type: ndcg_at_3 value: 37.416 - type: ndcg_at_5 value: 39.152 - type: precision_at_1 value: 36.834 - type: precision_at_10 value: 9.357 - type: precision_at_100 value: 1.5310000000000001 - type: precision_at_1000 value: 0.183 - type: precision_at_3 value: 21.08 - type: precision_at_5 value: 15.068999999999999 - type: recall_at_1 value: 24.191 - type: recall_at_10 value: 52.078 - type: recall_at_100 value: 82.548 - type: recall_at_1000 value: 98.017 - type: recall_at_3 value: 37.484 - type: recall_at_5 value: 43.187 - type: main_score value: 42.208 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 81.98436560432953 - type: cos_sim_accuracy_threshold value: 67.33228049687503 - type: cos_sim_ap value: 90.13312662430796 - type: cos_sim_f1 value: 83.2163938077737 - type: cos_sim_f1_threshold value: 64.44945196171463 - type: cos_sim_precision value: 79.45555082943429 - type: cos_sim_recall value: 87.350946925415 - type: dot_accuracy value: 80.50511124473843 - type: dot_accuracy_threshold value: 1736000.0 - type: dot_ap value: 88.76136186445322 - type: dot_f1 value: 81.75838631878973 - type: dot_f1_threshold value: 1681600.0 - type: dot_precision value: 76.96594427244582 - type: dot_recall value: 87.18728080430208 - type: euclidean_accuracy value: 82.21286831028262 - type: euclidean_accuracy_threshold value: 13240.938473272565 - type: euclidean_ap value: 90.14863232280865 - type: euclidean_f1 value: 83.277292086976 - type: euclidean_f1_threshold value: 13667.852165734186 - type: euclidean_precision value: 79.97847147470398 - type: euclidean_recall value: 86.85994856207621 - type: manhattan_accuracy value: 82.21286831028262 - type: manhattan_accuracy_threshold value: 629412.1389746666 - type: manhattan_ap value: 90.03868533208357 - type: manhattan_f1 value: 83.15683870248579 - type: manhattan_f1_threshold value: 649621.3114321232 - type: manhattan_precision value: 79.46314443971026 - type: manhattan_recall value: 87.21066167874679 - type: max_accuracy value: 82.21286831028262 - type: max_ap value: 90.14863232280865 - type: max_f1 value: 83.277292086976 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 65.595 - type: map_at_10 value: 73.717 - type: map_at_100 value: 74.134 - type: map_at_1000 value: 74.143 - type: map_at_3 value: 71.97 - type: map_at_5 value: 73.11800000000001 - type: mrr_at_1 value: 65.648 - type: mrr_at_10 value: 73.618 - type: mrr_at_100 value: 74.02499999999999 - type: mrr_at_1000 value: 74.033 - type: mrr_at_3 value: 71.865 - type: mrr_at_5 value: 73.04 - type: ndcg_at_1 value: 65.753 - type: ndcg_at_10 value: 77.458 - type: ndcg_at_100 value: 79.46 - type: ndcg_at_1000 value: 79.666 - type: ndcg_at_3 value: 73.988 - type: ndcg_at_5 value: 76.038 - type: precision_at_1 value: 65.753 - type: precision_at_10 value: 8.999 - type: precision_at_100 value: 0.9939999999999999 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 26.765 - type: precision_at_5 value: 17.092 - type: recall_at_1 value: 65.595 - type: recall_at_10 value: 89.041 - type: recall_at_100 value: 98.31400000000001 - type: recall_at_1000 value: 99.895 - type: recall_at_3 value: 79.768 - type: recall_at_5 value: 84.66799999999999 - type: main_score value: 77.458 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 27.248 - type: map_at_10 value: 84.303 - type: map_at_100 value: 86.866 - type: map_at_1000 value: 86.888 - type: map_at_3 value: 58.658 - type: map_at_5 value: 74.265 - type: mrr_at_1 value: 92.2 - type: mrr_at_10 value: 94.733 - type: mrr_at_100 value: 94.767 - type: mrr_at_1000 value: 94.768 - type: mrr_at_3 value: 94.492 - type: mrr_at_5 value: 94.627 - type: ndcg_at_1 value: 92.2 - type: ndcg_at_10 value: 90.462 - type: ndcg_at_100 value: 92.562 - type: ndcg_at_1000 value: 92.757 - type: ndcg_at_3 value: 89.44800000000001 - type: ndcg_at_5 value: 88.683 - type: precision_at_1 value: 92.2 - type: precision_at_10 value: 42.980000000000004 - type: precision_at_100 value: 4.851 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 80.233 - type: precision_at_5 value: 67.95 - type: recall_at_1 value: 27.248 - type: recall_at_10 value: 91.46600000000001 - type: recall_at_100 value: 98.566 - type: recall_at_1000 value: 99.557 - type: recall_at_3 value: 60.671 - type: recall_at_5 value: 78.363 - type: main_score value: 90.462 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 54.7 - type: map_at_10 value: 64.574 - type: map_at_100 value: 65.144 - type: map_at_1000 value: 65.156 - type: map_at_3 value: 62.333000000000006 - type: map_at_5 value: 63.63799999999999 - type: mrr_at_1 value: 54.7 - type: mrr_at_10 value: 64.603 - type: mrr_at_100 value: 65.172 - type: mrr_at_1000 value: 65.184 - type: mrr_at_3 value: 62.383 - type: mrr_at_5 value: 63.683 - type: ndcg_at_1 value: 54.7 - type: ndcg_at_10 value: 69.298 - type: ndcg_at_100 value: 71.81 - type: ndcg_at_1000 value: 72.117 - type: ndcg_at_3 value: 64.72099999999999 - type: ndcg_at_5 value: 67.071 - type: precision_at_1 value: 54.7 - type: precision_at_10 value: 8.41 - type: precision_at_100 value: 0.9530000000000001 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 23.867 - type: precision_at_5 value: 15.459999999999999 - type: recall_at_1 value: 54.7 - type: recall_at_10 value: 84.1 - type: recall_at_100 value: 95.3 - type: recall_at_1000 value: 97.7 - type: recall_at_3 value: 71.6 - type: recall_at_5 value: 77.3 - type: main_score value: 69.298 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 49.942285494420936 - type: accuracy_stderr value: 0.9218275144833329 - type: f1 value: 41.32381790374152 - type: f1_stderr value: 0.8291507105327707 - type: main_score value: 49.942285494420936 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 88.91181988742964 - type: accuracy_stderr value: 1.952391767940518 - type: ap value: 60.18509628974178 - type: ap_stderr value: 4.273060966573582 - type: f1 value: 84.02722221827027 - type: f1_stderr value: 2.238197243395083 - type: main_score value: 88.91181988742964 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cosine_pearson value: 68.32691294171383 - type: cosine_spearman value: 75.95458618586729 - type: manhattan_pearson value: 74.37198807732018 - type: manhattan_spearman value: 75.99352157963375 - type: euclidean_pearson value: 74.36294627886716 - type: euclidean_spearman value: 75.98632511635132 - type: main_score value: 75.95458618586729 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: 8e0c766dbe9e16e1d221116a3f36795fbade07f6 metrics: - type: map value: 35.4327533126161 - type: mrr value: 34.61507936507937 - type: main_score value: 35.4327533126161 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 72.652 - type: map_at_10 value: 81.396 - type: map_at_100 value: 81.597 - type: map_at_1000 value: 81.60300000000001 - type: map_at_3 value: 79.757 - type: map_at_5 value: 80.798 - type: mrr_at_1 value: 75.01400000000001 - type: mrr_at_10 value: 81.842 - type: mrr_at_100 value: 82.025 - type: mrr_at_1000 value: 82.03099999999999 - type: mrr_at_3 value: 80.45400000000001 - type: mrr_at_5 value: 81.345 - type: ndcg_at_1 value: 74.98599999999999 - type: ndcg_at_10 value: 84.70100000000001 - type: ndcg_at_100 value: 85.568 - type: ndcg_at_1000 value: 85.721 - type: ndcg_at_3 value: 81.64099999999999 - type: ndcg_at_5 value: 83.375 - type: precision_at_1 value: 74.98599999999999 - type: precision_at_10 value: 10.049 - type: precision_at_100 value: 1.047 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 30.458000000000002 - type: precision_at_5 value: 19.206 - type: recall_at_1 value: 72.652 - type: recall_at_10 value: 94.40899999999999 - type: recall_at_100 value: 98.241 - type: recall_at_1000 value: 99.42 - type: recall_at_3 value: 86.354 - type: recall_at_5 value: 90.472 - type: main_score value: 84.70100000000001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 78.19098856758575 - type: accuracy_stderr value: 0.6325028678427684 - type: f1 value: 74.80611425574001 - type: f1_stderr value: 0.9021806207904779 - type: main_score value: 78.19098856758575 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 82.58238063214526 - type: accuracy_stderr value: 1.0999970213165273 - type: f1 value: 81.94734854057064 - type: f1_stderr value: 1.248633855872851 - type: main_score value: 82.58238063214526 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 53.7 - type: map_at_10 value: 59.184000000000005 - type: map_at_100 value: 59.754 - type: map_at_1000 value: 59.8 - type: map_at_3 value: 57.833 - type: map_at_5 value: 58.548 - type: mrr_at_1 value: 54.0 - type: mrr_at_10 value: 59.352000000000004 - type: mrr_at_100 value: 59.926 - type: mrr_at_1000 value: 59.971 - type: mrr_at_3 value: 57.99999999999999 - type: mrr_at_5 value: 58.714999999999996 - type: ndcg_at_1 value: 53.7 - type: ndcg_at_10 value: 62.022 - type: ndcg_at_100 value: 65.038 - type: ndcg_at_1000 value: 66.366 - type: ndcg_at_3 value: 59.209 - type: ndcg_at_5 value: 60.51299999999999 - type: precision_at_1 value: 53.7 - type: precision_at_10 value: 7.1 - type: precision_at_100 value: 0.856 - type: precision_at_1000 value: 0.096 - type: precision_at_3 value: 21.067 - type: precision_at_5 value: 13.28 - type: recall_at_1 value: 53.7 - type: recall_at_10 value: 71.0 - type: recall_at_100 value: 85.6 - type: recall_at_1000 value: 96.3 - type: recall_at_3 value: 63.2 - type: recall_at_5 value: 66.4 - type: main_score value: 62.022 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 78.91333333333334 - type: accuracy_stderr value: 1.0834307648494321 - type: f1 value: 78.881433228092 - type: f1_stderr value: 1.122457277013712 - type: main_score value: 78.91333333333334 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 76.39415268002165 - type: cos_sim_accuracy_threshold value: 68.98242139321592 - type: cos_sim_ap value: 83.20687440058073 - type: cos_sim_f1 value: 78.4351145038168 - type: cos_sim_f1_threshold value: 65.47409929698304 - type: cos_sim_precision value: 71.54046997389034 - type: cos_sim_recall value: 86.80042238648363 - type: dot_accuracy value: 74.60747157552788 - type: dot_accuracy_threshold value: 1737600.0 - type: dot_ap value: 79.78938545919723 - type: dot_f1 value: 76.92307692307692 - type: dot_f1_threshold value: 1652800.0 - type: dot_precision value: 67.90622473726758 - type: dot_recall value: 88.70116156283 - type: euclidean_accuracy value: 76.34001082837032 - type: euclidean_accuracy_threshold value: 12597.299662420446 - type: euclidean_ap value: 83.60222701792158 - type: euclidean_f1 value: 78.77947295423024 - type: euclidean_f1_threshold value: 13639.653702639469 - type: euclidean_precision value: 70.06578947368422 - type: euclidean_recall value: 89.96832101372756 - type: manhattan_accuracy value: 76.23172712506768 - type: manhattan_accuracy_threshold value: 587601.2824743986 - type: manhattan_ap value: 83.51813426548178 - type: manhattan_f1 value: 78.6654135338346 - type: manhattan_f1_threshold value: 639711.1931562424 - type: manhattan_precision value: 70.87214225232854 - type: manhattan_recall value: 88.3843717001056 - type: max_accuracy value: 76.39415268002165 - type: max_ap value: 83.60222701792158 - type: max_f1 value: 78.77947295423024 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 94.59 - type: accuracy_stderr value: 0.8971621926942733 - type: ap value: 93.01229797205905 - type: ap_stderr value: 1.0519542956523058 - type: f1 value: 94.58077736915268 - type: f1_stderr value: 0.8954928292768671 - type: main_score value: 94.59 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cosine_pearson value: 24.341872875292857 - type: cosine_spearman value: 30.570037022875436 - type: manhattan_pearson value: 31.41015320258418 - type: manhattan_spearman value: 30.604526098895114 - type: euclidean_pearson value: 31.400038084432175 - type: euclidean_spearman value: 30.61062265273698 - type: main_score value: 30.570037022875436 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cosine_pearson value: 36.61757468091905 - type: cosine_spearman value: 38.981417359835504 - type: manhattan_pearson value: 37.971127169578764 - type: manhattan_spearman value: 39.55028286687854 - type: euclidean_pearson value: 37.96983777648438 - type: euclidean_spearman value: 39.542856511171784 - type: main_score value: 38.981417359835504 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cosine_pearson value: 68.29834902017382 - type: cosine_spearman value: 68.6823378297782 - type: manhattan_pearson value: 68.47336169904406 - type: manhattan_spearman value: 69.08033223619941 - type: euclidean_pearson value: 68.38785956191622 - type: euclidean_spearman value: 68.97973814449657 - type: main_score value: 68.6823378297782 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cosine_pearson value: 80.60572958563593 - type: cosine_spearman value: 80.87063761195603 - type: manhattan_pearson value: 79.30174059269083 - type: manhattan_spearman value: 80.02203618135883 - type: euclidean_pearson value: 79.3314553444783 - type: euclidean_spearman value: 80.04556415585255 - type: main_score value: 80.87063761195603 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.47921173708028 - type: mrr value: 77.9396513739777 - type: main_score value: 67.47921173708028 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 28.021 - type: map_at_10 value: 79.149 - type: map_at_100 value: 82.613 - type: map_at_1000 value: 82.67099999999999 - type: map_at_3 value: 55.665 - type: map_at_5 value: 68.46900000000001 - type: mrr_at_1 value: 91.106 - type: mrr_at_10 value: 93.372 - type: mrr_at_100 value: 93.44200000000001 - type: mrr_at_1000 value: 93.445 - type: mrr_at_3 value: 92.99300000000001 - type: mrr_at_5 value: 93.24900000000001 - type: ndcg_at_1 value: 91.106 - type: ndcg_at_10 value: 86.259 - type: ndcg_at_100 value: 89.46600000000001 - type: ndcg_at_1000 value: 90.012 - type: ndcg_at_3 value: 87.574 - type: ndcg_at_5 value: 86.283 - type: precision_at_1 value: 91.106 - type: precision_at_10 value: 42.742999999999995 - type: precision_at_100 value: 5.029999999999999 - type: precision_at_1000 value: 0.516 - type: precision_at_3 value: 76.593 - type: precision_at_5 value: 64.243 - type: recall_at_1 value: 28.021 - type: recall_at_10 value: 85.184 - type: recall_at_100 value: 95.79299999999999 - type: recall_at_1000 value: 98.547 - type: recall_at_3 value: 57.233000000000004 - type: recall_at_5 value: 71.628 - type: main_score value: 86.259 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 50.255 - type: accuracy_stderr value: 0.9341868121526873 - type: f1 value: 48.65080322457893 - type: f1_stderr value: 0.9391547591179161 - type: main_score value: 50.255 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: main_score value: 64.32076022871308 - type: v_measure value: 64.32076022871308 - type: v_measure_std value: 0.7190996709617924 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: main_score value: 54.57080911705562 - type: v_measure value: 54.57080911705562 - type: v_measure_std value: 1.5185826402845883 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 63.1 - type: map_at_10 value: 73.137 - type: map_at_100 value: 73.539 - type: map_at_1000 value: 73.546 - type: map_at_3 value: 71.467 - type: map_at_5 value: 72.552 - type: mrr_at_1 value: 63.3 - type: mrr_at_10 value: 73.238 - type: mrr_at_100 value: 73.64 - type: mrr_at_1000 value: 73.64699999999999 - type: mrr_at_3 value: 71.56700000000001 - type: mrr_at_5 value: 72.652 - type: ndcg_at_1 value: 63.1 - type: ndcg_at_10 value: 77.397 - type: ndcg_at_100 value: 79.11399999999999 - type: ndcg_at_1000 value: 79.305 - type: ndcg_at_3 value: 74.031 - type: ndcg_at_5 value: 75.976 - type: precision_at_1 value: 63.1 - type: precision_at_10 value: 9.049999999999999 - type: precision_at_100 value: 0.98 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 27.133000000000003 - type: precision_at_5 value: 17.22 - type: recall_at_1 value: 63.1 - type: recall_at_10 value: 90.5 - type: recall_at_100 value: 98.0 - type: recall_at_1000 value: 99.5 - type: recall_at_3 value: 81.39999999999999 - type: recall_at_5 value: 86.1 - type: main_score value: 77.397 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 89.26 - type: accuracy_stderr value: 1.44651304867948 - type: ap value: 75.17154345788362 - type: ap_stderr value: 2.7356371110082565 - type: f1 value: 87.94016849813178 - type: f1_stderr value: 1.3897605039980534 - type: main_score value: 89.26 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: main_score value: 71.20310003742769 - type: v_measure value: 71.20310003742769 - type: v_measure_std value: 2.3682783706448687 - type: main_score value: 59.64232194434788 - type: v_measure value: 59.64232194434788 - type: v_measure_std value: 2.4292956011867557 - task: type: Reranking dataset: name: MTEB AlloprofReranking type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: 65393d0d7a08a10b4e348135e824f385d420b0fd metrics: - type: main_score value: 78.62041803111894 - type: map value: 78.62041803111894 - type: mrr value: 79.82309057762426 - type: nAUC_map_diff1 value: 58.23586953459263 - type: nAUC_map_max value: 16.162821346484357 - type: nAUC_map_std value: 20.727030444422525 - type: nAUC_mrr_diff1 value: 57.89675675999501 - type: nAUC_mrr_max value: 17.188359535738417 - type: nAUC_mrr_std value: 20.121404571879598 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval type: lyon-nlp/alloprof config: default split: test revision: fcf295ea64c750f41fadbaa37b9b861558e1bfbd metrics: - type: main_score value: 58.499 - type: map_at_1 value: 40.371 - type: map_at_10 value: 52.337 - type: map_at_100 value: 53.04 - type: map_at_1000 value: 53.065 - type: map_at_20 value: 52.772 - type: map_at_3 value: 49.201 - type: map_at_5 value: 51.025 - type: mrr_at_1 value: 40.3713298791019 - type: mrr_at_10 value: 52.322165337061755 - type: mrr_at_100 value: 53.02092832847133 - type: mrr_at_1000 value: 53.04594680215603 - type: mrr_at_20 value: 52.750849914358135 - type: mrr_at_3 value: 49.150834772596475 - type: mrr_at_5 value: 50.998848589522275 - type: nauc_map_at_1000_diff1 value: 44.71946249374932 - type: nauc_map_at_1000_max value: 28.074204125714193 - type: nauc_map_at_1000_std value: -5.1319087890196275 - type: nauc_map_at_100_diff1 value: 44.71140286780233 - type: nauc_map_at_100_max value: 28.09677884622645 - type: nauc_map_at_100_std value: -5.116353867480612 - type: nauc_map_at_10_diff1 value: 44.737968596047736 - type: nauc_map_at_10_max value: 28.103186472557184 - type: nauc_map_at_10_std value: -5.258817287329683 - type: nauc_map_at_1_diff1 value: 47.48389890056789 - type: nauc_map_at_1_max value: 24.803734709402654 - type: nauc_map_at_1_std value: -6.504759899363267 - type: nauc_map_at_20_diff1 value: 44.67268454863271 - type: nauc_map_at_20_max value: 28.068912295976933 - type: nauc_map_at_20_std value: -5.1971060419801836 - type: nauc_map_at_3_diff1 value: 44.59399231542881 - type: nauc_map_at_3_max value: 27.097806786915502 - type: nauc_map_at_3_std value: -5.957120508111229 - type: nauc_map_at_5_diff1 value: 44.549807218619236 - type: nauc_map_at_5_max value: 28.03902312965202 - type: nauc_map_at_5_std value: -5.279585300980128 - type: nauc_mrr_at_1000_diff1 value: 44.70183532803094 - type: nauc_mrr_at_1000_max value: 28.08833759937601 - type: nauc_mrr_at_1000_std value: -5.097929115475795 - type: nauc_mrr_at_100_diff1 value: 44.693824401340684 - type: nauc_mrr_at_100_max value: 28.110898009292296 - type: nauc_mrr_at_100_std value: -5.082401300601749 - type: nauc_mrr_at_10_diff1 value: 44.74052791862188 - type: nauc_mrr_at_10_max value: 28.125378341430725 - type: nauc_mrr_at_10_std value: -5.209767905428716 - type: nauc_mrr_at_1_diff1 value: 47.48389890056789 - type: nauc_mrr_at_1_max value: 24.803734709402654 - type: nauc_mrr_at_1_std value: -6.504759899363267 - type: nauc_mrr_at_20_diff1 value: 44.65204014980107 - type: nauc_mrr_at_20_max value: 28.071523791101487 - type: nauc_mrr_at_20_std value: -5.176680495032765 - type: nauc_mrr_at_3_diff1 value: 44.566371489967835 - type: nauc_mrr_at_3_max value: 27.138418179089243 - type: nauc_mrr_at_3_std value: -5.8860676927947715 - type: nauc_mrr_at_5_diff1 value: 44.513022796226025 - type: nauc_mrr_at_5_max value: 28.037968016529184 - type: nauc_mrr_at_5_std value: -5.286851060853457 - type: nauc_ndcg_at_1000_diff1 value: 44.31019947897497 - type: nauc_ndcg_at_1000_max value: 29.332844099450185 - type: nauc_ndcg_at_1000_std value: -4.185675731246788 - type: nauc_ndcg_at_100_diff1 value: 44.15415366286996 - type: nauc_ndcg_at_100_max value: 30.098413084162345 - type: nauc_ndcg_at_100_std value: -3.557438303045246 - type: nauc_ndcg_at_10_diff1 value: 44.117356815361376 - type: nauc_ndcg_at_10_max value: 30.090057186506147 - type: nauc_ndcg_at_10_std value: -4.294561567142078 - type: nauc_ndcg_at_1_diff1 value: 47.48389890056789 - type: nauc_ndcg_at_1_max value: 24.803734709402654 - type: nauc_ndcg_at_1_std value: -6.504759899363267 - type: nauc_ndcg_at_20_diff1 value: 43.868556983413285 - type: nauc_ndcg_at_20_max value: 30.06455269775592 - type: nauc_ndcg_at_20_std value: -3.9645560243946623 - type: nauc_ndcg_at_3_diff1 value: 43.71970793339256 - type: nauc_ndcg_at_3_max value: 28.057786581438034 - type: nauc_ndcg_at_3_std value: -5.597352364190012 - type: nauc_ndcg_at_5_diff1 value: 43.57692922989753 - type: nauc_ndcg_at_5_max value: 29.811975056854994 - type: nauc_ndcg_at_5_std value: -4.362865924703688 - type: nauc_precision_at_1000_diff1 value: 37.65255144893002 - type: nauc_precision_at_1000_max value: 88.70768683938714 - type: nauc_precision_at_1000_std value: 69.77642765639528 - type: nauc_precision_at_100_diff1 value: 38.99412121382678 - type: nauc_precision_at_100_max value: 61.57652450016459 - type: nauc_precision_at_100_std value: 24.826035139656348 - type: nauc_precision_at_10_diff1 value: 41.78189732924517 - type: nauc_precision_at_10_max value: 39.83536802453079 - type: nauc_precision_at_10_std value: 0.431964006091015 - type: nauc_precision_at_1_diff1 value: 47.48389890056789 - type: nauc_precision_at_1_max value: 24.803734709402654 - type: nauc_precision_at_1_std value: -6.504759899363267 - type: nauc_precision_at_20_diff1 value: 39.33781305274886 - type: nauc_precision_at_20_max value: 43.00448814568695 - type: nauc_precision_at_20_std value: 4.5633424143661365 - type: nauc_precision_at_3_diff1 value: 40.99977742505519 - type: nauc_precision_at_3_max value: 31.14585236181214 - type: nauc_precision_at_3_std value: -4.404002104899136 - type: nauc_precision_at_5_diff1 value: 40.12130730401297 - type: nauc_precision_at_5_max value: 36.45000981581976 - type: nauc_precision_at_5_std value: -0.8603896798394983 - type: nauc_recall_at_1000_diff1 value: 37.652551448927504 - type: nauc_recall_at_1000_max value: 88.70768683938547 - type: nauc_recall_at_1000_std value: 69.77642765638893 - type: nauc_recall_at_100_diff1 value: 38.9941212138267 - type: nauc_recall_at_100_max value: 61.57652450016457 - type: nauc_recall_at_100_std value: 24.82603513965631 - type: nauc_recall_at_10_diff1 value: 41.781897329245105 - type: nauc_recall_at_10_max value: 39.83536802453082 - type: nauc_recall_at_10_std value: 0.4319640060909985 - type: nauc_recall_at_1_diff1 value: 47.48389890056789 - type: nauc_recall_at_1_max value: 24.803734709402654 - type: nauc_recall_at_1_std value: -6.504759899363267 - type: nauc_recall_at_20_diff1 value: 39.337813052748835 - type: nauc_recall_at_20_max value: 43.00448814568676 - type: nauc_recall_at_20_std value: 4.56334241436601 - type: nauc_recall_at_3_diff1 value: 40.99977742505522 - type: nauc_recall_at_3_max value: 31.14585236181218 - type: nauc_recall_at_3_std value: -4.404002104899084 - type: nauc_recall_at_5_diff1 value: 40.121307304013 - type: nauc_recall_at_5_max value: 36.450009815819726 - type: nauc_recall_at_5_std value: -0.8603896798395225 - type: ndcg_at_1 value: 40.371 - type: ndcg_at_10 value: 58.499 - type: ndcg_at_100 value: 61.958 - type: ndcg_at_1000 value: 62.638000000000005 - type: ndcg_at_20 value: 60.068 - type: ndcg_at_3 value: 52.079 - type: ndcg_at_5 value: 55.359 - type: precision_at_1 value: 40.371 - type: precision_at_10 value: 7.797999999999999 - type: precision_at_100 value: 0.943 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.208 - type: precision_at_3 value: 20.135 - type: precision_at_5 value: 13.669999999999998 - type: recall_at_1 value: 40.371 - type: recall_at_10 value: 77.979 - type: recall_at_100 value: 94.257 - type: recall_at_1000 value: 99.655 - type: recall_at_20 value: 84.154 - type: recall_at_3 value: 60.406000000000006 - type: recall_at_5 value: 68.351 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 55.186 - type: f1 value: 54.46705535013317 - type: f1_weighted value: 54.46705535013317 - type: main_score value: 55.186 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: main_score value: 65.766 - type: map_at_1 value: 17.116999999999997 - type: map_at_10 value: 24.2 - type: map_at_100 value: 25.196 - type: map_at_1000 value: 25.285999999999998 - type: map_at_20 value: 24.84 - type: map_at_3 value: 21.246000000000002 - type: map_at_5 value: 23.386000000000003 - type: mrr_at_1 value: 17.117117117117118 - type: mrr_at_10 value: 24.19955669955671 - type: mrr_at_100 value: 25.195531920335007 - type: mrr_at_1000 value: 25.284600511909495 - type: mrr_at_20 value: 24.840254977638896 - type: mrr_at_3 value: 21.246246246246244 - type: mrr_at_5 value: 23.38588588588589 - type: nauc_map_at_1000_diff1 value: 10.81116818873305 - type: nauc_map_at_1000_max value: 18.081485212587296 - type: nauc_map_at_1000_std value: 15.55247182359811 - type: nauc_map_at_100_diff1 value: 10.769025561727476 - type: nauc_map_at_100_max value: 18.05422658310923 - type: nauc_map_at_100_std value: 15.5467718904851 - type: nauc_map_at_10_diff1 value: 10.683272018434048 - type: nauc_map_at_10_max value: 18.142476171157714 - type: nauc_map_at_10_std value: 15.160871943210017 - type: nauc_map_at_1_diff1 value: 15.136874216646229 - type: nauc_map_at_1_max value: 19.68585969419655 - type: nauc_map_at_1_std value: 15.169957564848444 - type: nauc_map_at_20_diff1 value: 11.04316522915875 - type: nauc_map_at_20_max value: 17.817024791267443 - type: nauc_map_at_20_std value: 15.071246935999893 - type: nauc_map_at_3_diff1 value: 8.893328353778843 - type: nauc_map_at_3_max value: 16.402408590507946 - type: nauc_map_at_3_std value: 14.631998787185735 - type: nauc_map_at_5_diff1 value: 9.802455874823172 - type: nauc_map_at_5_max value: 17.939476196078495 - type: nauc_map_at_5_std value: 14.130589132632698 - type: nauc_mrr_at_1000_diff1 value: 10.813072323683013 - type: nauc_mrr_at_1000_max value: 18.08332318614462 - type: nauc_mrr_at_1000_std value: 15.553043223942819 - type: nauc_mrr_at_100_diff1 value: 10.77091057430458 - type: nauc_mrr_at_100_max value: 18.055798185778123 - type: nauc_mrr_at_100_std value: 15.547068262312003 - type: nauc_mrr_at_10_diff1 value: 10.683272018434048 - type: nauc_mrr_at_10_max value: 18.142476171157714 - type: nauc_mrr_at_10_std value: 15.160871943210017 - type: nauc_mrr_at_1_diff1 value: 15.136874216646229 - type: nauc_mrr_at_1_max value: 19.68585969419655 - type: nauc_mrr_at_1_std value: 15.169957564848444 - type: nauc_mrr_at_20_diff1 value: 11.04316522915875 - type: nauc_mrr_at_20_max value: 17.817024791267443 - type: nauc_mrr_at_20_std value: 15.071246935999893 - type: nauc_mrr_at_3_diff1 value: 8.893328353778843 - type: nauc_mrr_at_3_max value: 16.402408590507946 - type: nauc_mrr_at_3_std value: 14.631998787185735 - type: nauc_mrr_at_5_diff1 value: 9.802455874823172 - type: nauc_mrr_at_5_max value: 17.939476196078495 - type: nauc_mrr_at_5_std value: 14.130589132632698 - type: nauc_ndcg_at_1000_diff1 value: 11.202853727201774 - type: nauc_ndcg_at_1000_max value: 19.0293189527563 - type: nauc_ndcg_at_1000_std value: 18.390388750658357 - type: nauc_ndcg_at_100_diff1 value: 10.087335018055228 - type: nauc_ndcg_at_100_max value: 18.78516003607274 - type: nauc_ndcg_at_100_std value: 18.780357674944415 - type: nauc_ndcg_at_10_diff1 value: 10.574953671198443 - type: nauc_ndcg_at_10_max value: 18.572291623672044 - type: nauc_ndcg_at_10_std value: 15.808055075116057 - type: nauc_ndcg_at_1_diff1 value: 15.136874216646229 - type: nauc_ndcg_at_1_max value: 19.68585969419655 - type: nauc_ndcg_at_1_std value: 15.169957564848444 - type: nauc_ndcg_at_20_diff1 value: 11.86104023461335 - type: nauc_ndcg_at_20_max value: 17.436985589044458 - type: nauc_ndcg_at_20_std value: 15.588720372098383 - type: nauc_ndcg_at_3_diff1 value: 7.212552449189805 - type: nauc_ndcg_at_3_max value: 15.573909877641508 - type: nauc_ndcg_at_3_std value: 14.53705493856145 - type: nauc_ndcg_at_5_diff1 value: 8.778923731622235 - type: nauc_ndcg_at_5_max value: 18.140995131168534 - type: nauc_ndcg_at_5_std value: 13.608313703781533 - type: nauc_precision_at_1000_diff1 value: 21.242679241621413 - type: nauc_precision_at_1000_max value: 28.358433127289924 - type: nauc_precision_at_1000_std value: 43.82822797432329 - type: nauc_precision_at_100_diff1 value: 6.627014646720404 - type: nauc_precision_at_100_max value: 22.40433487802035 - type: nauc_precision_at_100_std value: 34.933889742457595 - type: nauc_precision_at_10_diff1 value: 10.885683410075934 - type: nauc_precision_at_10_max value: 19.96889041019717 - type: nauc_precision_at_10_std value: 17.798863824564464 - type: nauc_precision_at_1_diff1 value: 15.136874216646229 - type: nauc_precision_at_1_max value: 19.68585969419655 - type: nauc_precision_at_1_std value: 15.169957564848444 - type: nauc_precision_at_20_diff1 value: 15.496066928172066 - type: nauc_precision_at_20_max value: 16.03026652303162 - type: nauc_precision_at_20_std value: 17.26605341902364 - type: nauc_precision_at_3_diff1 value: 2.968469300914268 - type: nauc_precision_at_3_max value: 13.49791571660617 - type: nauc_precision_at_3_std value: 14.311739399090806 - type: nauc_precision_at_5_diff1 value: 6.502154730668018 - type: nauc_precision_at_5_max value: 18.889080152631124 - type: nauc_precision_at_5_std value: 12.221319698087786 - type: nauc_recall_at_1000_diff1 value: 21.242679241621435 - type: nauc_recall_at_1000_max value: 28.358433127289974 - type: nauc_recall_at_1000_std value: 43.82822797432328 - type: nauc_recall_at_100_diff1 value: 6.62701464672039 - type: nauc_recall_at_100_max value: 22.404334878020286 - type: nauc_recall_at_100_std value: 34.93388974245755 - type: nauc_recall_at_10_diff1 value: 10.885683410075906 - type: nauc_recall_at_10_max value: 19.968890410197133 - type: nauc_recall_at_10_std value: 17.7988638245644 - type: nauc_recall_at_1_diff1 value: 15.136874216646229 - type: nauc_recall_at_1_max value: 19.68585969419655 - type: nauc_recall_at_1_std value: 15.169957564848444 - type: nauc_recall_at_20_diff1 value: 15.49606692817206 - type: nauc_recall_at_20_max value: 16.030266523031628 - type: nauc_recall_at_20_std value: 17.26605341902362 - type: nauc_recall_at_3_diff1 value: 2.968469300914263 - type: nauc_recall_at_3_max value: 13.497915716606142 - type: nauc_recall_at_3_std value: 14.31173939909079 - type: nauc_recall_at_5_diff1 value: 6.50215473066801 - type: nauc_recall_at_5_max value: 18.889080152631095 - type: nauc_recall_at_5_std value: 12.221319698087767 - type: ndcg_at_1 value: 17.116999999999997 - type: ndcg_at_10 value: 28.524 - type: ndcg_at_100 value: 33.476 - type: ndcg_at_1000 value: 36.012 - type: ndcg_at_20 value: 30.820999999999998 - type: ndcg_at_3 value: 22.721 - type: ndcg_at_5 value: 26.596999999999998 - type: precision_at_1 value: 17.116999999999997 - type: precision_at_10 value: 4.234 - type: precision_at_100 value: 0.658 - type: precision_at_1000 value: 0.086 - type: precision_at_20 value: 2.568 - type: precision_at_3 value: 9.009 - type: precision_at_5 value: 7.297 - type: recall_at_1 value: 17.116999999999997 - type: recall_at_10 value: 42.342 - type: recall_at_100 value: 65.766 - type: recall_at_1000 value: 86.036 - type: recall_at_20 value: 51.351 - type: recall_at_3 value: 27.027 - type: recall_at_5 value: 36.486000000000004 - task: type: Clustering dataset: name: MTEB HALClusteringS2S type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: main_score value: 28.18744772954557 - type: v_measure value: 28.18744772954557 - type: v_measure_std value: 3.239838057506439 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P (fr) type: reciTAL/mlsum config: fr split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: main_score value: 47.75009059283003 - type: v_measure value: 47.75009059283003 - type: v_measure_std value: 2.009277732690298 - type: main_score value: 47.46091989113078 - type: v_measure value: 47.46091989113078 - type: v_measure_std value: 2.604802270948194 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 97.20325712496086 - type: f1 value: 97.05991090368462 - type: f1_weighted value: 97.20748006323807 - type: main_score value: 97.20325712496086 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 93.07234575634199 - type: f1 value: 76.54521288506878 - type: f1_weighted value: 93.6903586431893 - type: main_score value: 93.07234575634199 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: mteb/masakhanews config: fra split: test revision: 18193f187b92da67168c655c9973a165ed9593dd metrics: - type: accuracy value: 82.48815165876778 - type: f1 value: 78.71164464238117 - type: f1_weighted value: 82.38927389376973 - type: main_score value: 82.48815165876778 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: main_score value: 73.85712952800003 - type: v_measure value: 73.85712952800003 - type: v_measure_std value: 22.471668299794416 - type: main_score value: 67.23960512566751 - type: v_measure value: 67.23960512566751 - type: v_measure_std value: 24.65079601360142 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 79.59986550100874 - type: f1 value: 76.0439154517916 - type: f1_weighted value: 79.48538292013761 - type: main_score value: 79.59986550100874 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 82.182246133154 - type: f1 value: 81.68006668655397 - type: f1_weighted value: 81.94775072858566 - type: main_score value: 82.182246133154 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: main_score value: 62.532 - type: map_at_1 value: 45.823 - type: map_at_10 value: 57.174 - type: map_at_100 value: 57.735 - type: map_at_1000 value: 57.767 - type: map_at_20 value: 57.53 - type: map_at_3 value: 54.716 - type: map_at_5 value: 56.227000000000004 - type: mrr_at_1 value: 45.82309582309582 - type: mrr_at_10 value: 57.17958217958217 - type: mrr_at_100 value: 57.744059413627866 - type: mrr_at_1000 value: 57.776651992832605 - type: mrr_at_20 value: 57.53890924556554 - type: mrr_at_3 value: 54.716079716079676 - type: mrr_at_5 value: 56.227136227136256 - type: nauc_map_at_1000_diff1 value: 39.48401851944296 - type: nauc_map_at_1000_max value: 36.55276875160682 - type: nauc_map_at_1000_std value: 3.9173787361040913 - type: nauc_map_at_100_diff1 value: 39.45696514871956 - type: nauc_map_at_100_max value: 36.55786982498759 - type: nauc_map_at_100_std value: 3.9506714061766557 - type: nauc_map_at_10_diff1 value: 39.31548009319837 - type: nauc_map_at_10_max value: 36.75711871602276 - type: nauc_map_at_10_std value: 3.782911249250981 - type: nauc_map_at_1_diff1 value: 44.190649439568766 - type: nauc_map_at_1_max value: 31.017419446234317 - type: nauc_map_at_1_std value: 0.5544388561183956 - type: nauc_map_at_20_diff1 value: 39.443640617310585 - type: nauc_map_at_20_max value: 36.63799366674228 - type: nauc_map_at_20_std value: 3.934276303386171 - type: nauc_map_at_3_diff1 value: 40.30871768246873 - type: nauc_map_at_3_max value: 36.944169455458656 - type: nauc_map_at_3_std value: 2.9847330185694556 - type: nauc_map_at_5_diff1 value: 39.590461060438095 - type: nauc_map_at_5_max value: 36.998781454405574 - type: nauc_map_at_5_std value: 3.532693606637119 - type: nauc_mrr_at_1000_diff1 value: 39.46102363098429 - type: nauc_mrr_at_1000_max value: 36.56900606103558 - type: nauc_mrr_at_1000_std value: 3.972436075561705 - type: nauc_mrr_at_100_diff1 value: 39.43269261665982 - type: nauc_mrr_at_100_max value: 36.574081599242014 - type: nauc_mrr_at_100_std value: 4.006374171904806 - type: nauc_mrr_at_10_diff1 value: 39.29970560564493 - type: nauc_mrr_at_10_max value: 36.778388879484716 - type: nauc_mrr_at_10_std value: 3.8335456201567206 - type: nauc_mrr_at_1_diff1 value: 44.190649439568766 - type: nauc_mrr_at_1_max value: 31.017419446234317 - type: nauc_mrr_at_1_std value: 0.5544388561183956 - type: nauc_mrr_at_20_diff1 value: 39.42091158484574 - type: nauc_mrr_at_20_max value: 36.65421566061936 - type: nauc_mrr_at_20_std value: 3.988695948848555 - type: nauc_mrr_at_3_diff1 value: 40.313976315898195 - type: nauc_mrr_at_3_max value: 36.960483501441985 - type: nauc_mrr_at_3_std value: 3.0112756156560394 - type: nauc_mrr_at_5_diff1 value: 39.56386294620379 - type: nauc_mrr_at_5_max value: 37.02119815939672 - type: nauc_mrr_at_5_std value: 3.6118004205573184 - type: nauc_ndcg_at_1000_diff1 value: 38.05281585863137 - type: nauc_ndcg_at_1000_max value: 37.41178875860201 - type: nauc_ndcg_at_1000_std value: 5.525420555163393 - type: nauc_ndcg_at_100_diff1 value: 37.18408005856676 - type: nauc_ndcg_at_100_max value: 37.617851212997685 - type: nauc_ndcg_at_100_std value: 6.871461890669446 - type: nauc_ndcg_at_10_diff1 value: 36.624444841382484 - type: nauc_ndcg_at_10_max value: 38.62100324849529 - type: nauc_ndcg_at_10_std value: 6.027810657475449 - type: nauc_ndcg_at_1_diff1 value: 44.190649439568766 - type: nauc_ndcg_at_1_max value: 31.017419446234317 - type: nauc_ndcg_at_1_std value: 0.5544388561183956 - type: nauc_ndcg_at_20_diff1 value: 37.057047514121564 - type: nauc_ndcg_at_20_max value: 38.19839331454421 - type: nauc_ndcg_at_20_std value: 6.770369938343684 - type: nauc_ndcg_at_3_diff1 value: 38.95821428563954 - type: nauc_ndcg_at_3_max value: 38.87440219376017 - type: nauc_ndcg_at_3_std value: 4.097498274708613 - type: nauc_ndcg_at_5_diff1 value: 37.515589837182034 - type: nauc_ndcg_at_5_max value: 39.165561493023276 - type: nauc_ndcg_at_5_std value: 5.291512124344874 - type: nauc_precision_at_1000_diff1 value: -13.365474882749279 - type: nauc_precision_at_1000_max value: 50.68568417959442 - type: nauc_precision_at_1000_std value: 37.847145129019054 - type: nauc_precision_at_100_diff1 value: 12.081443207482383 - type: nauc_precision_at_100_max value: 43.67561356191485 - type: nauc_precision_at_100_std value: 44.64523987759538 - type: nauc_precision_at_10_diff1 value: 23.20358204183261 - type: nauc_precision_at_10_max value: 46.93706139285088 - type: nauc_precision_at_10_std value: 17.36243956517301 - type: nauc_precision_at_1_diff1 value: 44.190649439568766 - type: nauc_precision_at_1_max value: 31.017419446234317 - type: nauc_precision_at_1_std value: 0.5544388561183956 - type: nauc_precision_at_20_diff1 value: 22.42836999246196 - type: nauc_precision_at_20_max value: 46.29381413041759 - type: nauc_precision_at_20_std value: 26.126609401922696 - type: nauc_precision_at_3_diff1 value: 34.503018704702484 - type: nauc_precision_at_3_max value: 45.194775358016095 - type: nauc_precision_at_3_std value: 7.864444241838433 - type: nauc_precision_at_5_diff1 value: 29.494641243672138 - type: nauc_precision_at_5_max value: 47.326071718857484 - type: nauc_precision_at_5_std value: 12.273738036245172 - type: nauc_recall_at_1000_diff1 value: -13.365474882756335 - type: nauc_recall_at_1000_max value: 50.68568417959348 - type: nauc_recall_at_1000_std value: 37.8471451290128 - type: nauc_recall_at_100_diff1 value: 12.08144320748251 - type: nauc_recall_at_100_max value: 43.675613561914986 - type: nauc_recall_at_100_std value: 44.645239877595564 - type: nauc_recall_at_10_diff1 value: 23.203582041832526 - type: nauc_recall_at_10_max value: 46.9370613928509 - type: nauc_recall_at_10_std value: 17.36243956517297 - type: nauc_recall_at_1_diff1 value: 44.190649439568766 - type: nauc_recall_at_1_max value: 31.017419446234317 - type: nauc_recall_at_1_std value: 0.5544388561183956 - type: nauc_recall_at_20_diff1 value: 22.42836999246212 - type: nauc_recall_at_20_max value: 46.29381413041773 - type: nauc_recall_at_20_std value: 26.12660940192268 - type: nauc_recall_at_3_diff1 value: 34.50301870470248 - type: nauc_recall_at_3_max value: 45.19477535801611 - type: nauc_recall_at_3_std value: 7.8644442418384335 - type: nauc_recall_at_5_diff1 value: 29.494641243672216 - type: nauc_recall_at_5_max value: 47.32607171885759 - type: nauc_recall_at_5_std value: 12.273738036245142 - type: ndcg_at_1 value: 45.823 - type: ndcg_at_10 value: 62.532 - type: ndcg_at_100 value: 65.298 - type: ndcg_at_1000 value: 66.214 - type: ndcg_at_20 value: 63.82600000000001 - type: ndcg_at_3 value: 57.528999999999996 - type: ndcg_at_5 value: 60.24 - type: precision_at_1 value: 45.823 - type: precision_at_10 value: 7.928 - type: precision_at_100 value: 0.923 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.22 - type: precision_at_3 value: 21.881 - type: precision_at_5 value: 14.438999999999998 - type: recall_at_1 value: 45.823 - type: recall_at_10 value: 79.279 - type: recall_at_100 value: 92.301 - type: recall_at_1000 value: 99.631 - type: recall_at_20 value: 84.398 - type: recall_at_3 value: 65.643 - type: recall_at_5 value: 72.195 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cosine_accuracy value: 99.90069513406156 - type: cosine_accuracy_threshold value: 54.45001207375879 - type: cosine_ap value: 100.0 - type: cosine_f1 value: 99.95032290114257 - type: cosine_f1_threshold value: 54.45001207375879 - type: cosine_precision value: 100.0 - type: cosine_recall value: 99.90069513406156 - type: dot_accuracy value: 99.90069513406156 - type: dot_accuracy_threshold value: 1312800.0 - type: dot_ap value: 100.0 - type: dot_f1 value: 99.95032290114257 - type: dot_f1_threshold value: 1312800.0 - type: dot_precision value: 100.0 - type: dot_recall value: 99.90069513406156 - type: euclidean_accuracy value: 99.90069513406156 - type: euclidean_accuracy_threshold value: 15150.791732002876 - type: euclidean_ap value: 100.0 - type: euclidean_f1 value: 99.95032290114257 - type: euclidean_f1_threshold value: 15150.791732002876 - type: euclidean_precision value: 100.0 - type: euclidean_recall value: 99.90069513406156 - type: main_score value: 100.0 - type: manhattan_accuracy value: 99.90069513406156 - type: manhattan_accuracy_threshold value: 717903.2791554928 - type: manhattan_ap value: 100.0 - type: manhattan_f1 value: 99.95032290114257 - type: manhattan_f1_threshold value: 717903.2791554928 - type: manhattan_precision value: 100.0 - type: manhattan_recall value: 99.90069513406156 - type: max_ap value: 100.0 - type: max_f1 value: 99.95032290114257 - type: max_precision value: 100.0 - type: max_recall value: 99.90069513406156 - type: similarity_accuracy value: 99.90069513406156 - type: similarity_accuracy_threshold value: 54.45001207375879 - type: similarity_ap value: 100.0 - type: similarity_f1 value: 99.95032290114257 - type: similarity_f1_threshold value: 54.45001207375879 - type: similarity_precision value: 100.0 - type: similarity_recall value: 99.90069513406156 - task: type: PairClassification dataset: name: MTEB PawsXPairClassification (fr) type: google-research-datasets/paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cosine_accuracy value: 67.95 - type: cosine_accuracy_threshold value: 97.36901285947026 - type: cosine_ap value: 70.14158727060726 - type: cosine_f1 value: 65.38108356290174 - type: cosine_f1_threshold value: 94.90683744884689 - type: cosine_precision value: 55.84313725490196 - type: cosine_recall value: 78.8482834994463 - type: dot_accuracy value: 60.5 - type: dot_accuracy_threshold value: 2606400.0 - type: dot_ap value: 57.0114505567262 - type: dot_f1 value: 63.29394387001477 - type: dot_f1_threshold value: 2345600.0 - type: dot_precision value: 47.4792243767313 - type: dot_recall value: 94.90586932447398 - type: euclidean_accuracy value: 68.05 - type: euclidean_accuracy_threshold value: 3824.99743197985 - type: euclidean_ap value: 70.01158306654237 - type: euclidean_f1 value: 65.21939953810623 - type: euclidean_f1_threshold value: 5187.47968966464 - type: euclidean_precision value: 55.942947702060216 - type: euclidean_recall value: 78.18383167220377 - type: main_score value: 70.14158727060726 - type: manhattan_accuracy value: 68.05 - type: manhattan_accuracy_threshold value: 191852.34832763672 - type: manhattan_ap value: 70.01670033904287 - type: manhattan_f1 value: 65.2854511970534 - type: manhattan_f1_threshold value: 246807.1710705757 - type: manhattan_precision value: 55.87076438140268 - type: manhattan_recall value: 78.51605758582502 - type: max_ap value: 70.14158727060726 - type: max_f1 value: 65.38108356290174 - type: max_precision value: 55.942947702060216 - type: max_recall value: 94.90586932447398 - type: similarity_accuracy value: 67.95 - type: similarity_accuracy_threshold value: 97.36901285947026 - type: similarity_ap value: 70.14158727060726 - type: similarity_f1 value: 65.38108356290174 - type: similarity_f1_threshold value: 94.90683744884689 - type: similarity_precision value: 55.84313725490196 - type: similarity_recall value: 78.8482834994463 - task: type: STS dataset: name: MTEB SICKFr type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cosine_pearson value: 79.79861486027 - type: cosine_spearman value: 79.3918786992987 - type: euclidean_pearson value: 77.73226212475764 - type: euclidean_spearman value: 79.08856888397014 - type: main_score value: 79.3918786992987 - type: manhattan_pearson value: 77.8002206650809 - type: manhattan_spearman value: 79.15284532531264 - type: pearson value: 79.79861486027 - type: spearman value: 79.3918786992987 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 83.32314025534286 - type: cosine_spearman value: 83.2806004701507 - type: euclidean_pearson value: 81.88040500817269 - type: euclidean_spearman value: 82.73179823676206 - type: main_score value: 83.2806004701507 - type: manhattan_pearson value: 82.0438174605579 - type: manhattan_spearman value: 83.0253049811576 - type: pearson value: 83.32314025534286 - type: spearman value: 83.2806004701507 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: mteb/stsb_multi_mt config: fr split: test revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c metrics: - type: cosine_pearson value: 84.56723075054445 - type: cosine_spearman value: 85.08759191551403 - type: euclidean_pearson value: 83.186096744725 - type: euclidean_spearman value: 84.36958569816491 - type: main_score value: 85.08759191551403 - type: manhattan_pearson value: 83.1405072165467 - type: manhattan_spearman value: 84.34227830781155 - type: pearson value: 84.56723075054445 - type: spearman value: 85.08759191551403 - task: type: Summarization dataset: name: MTEB SummEvalFr type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cosine_pearson value: 31.921764332449115 - type: cosine_spearman value: 31.260442997631806 - type: dot_pearson value: 31.585578707631406 - type: dot_spearman value: 31.479238746310028 - type: main_score value: 31.260442997631806 - type: pearson value: 31.921764332449115 - type: spearman value: 31.260442997631806 - task: type: Reranking dataset: name: MTEB SyntecReranking type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: daf0863838cd9e3ba50544cdce3ac2b338a1b0ad metrics: - type: main_score value: 91.83333333333333 - type: map value: 91.83333333333333 - type: mrr value: 92.0 - type: nAUC_map_diff1 value: 53.97793263646914 - type: nAUC_map_max value: 44.264158743282195 - type: nAUC_map_std value: 14.692218350754885 - type: nAUC_mrr_diff1 value: 54.36926882239366 - type: nAUC_mrr_max value: 46.43108510296003 - type: nAUC_mrr_std value: 17.48914092664096 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 19661ccdca4dfc2d15122d776b61685f48c68ca9 metrics: - type: main_score value: 90.36699999999999 - type: map_at_1 value: 79.0 - type: map_at_10 value: 87.18599999999999 - type: map_at_100 value: 87.18599999999999 - type: map_at_1000 value: 87.18599999999999 - type: map_at_20 value: 87.18599999999999 - type: map_at_3 value: 86.0 - type: map_at_5 value: 86.95 - type: mrr_at_1 value: 79.0 - type: mrr_at_10 value: 87.18611111111112 - type: mrr_at_100 value: 87.18611111111112 - type: mrr_at_1000 value: 87.18611111111112 - type: mrr_at_20 value: 87.18611111111112 - type: mrr_at_3 value: 86.0 - type: mrr_at_5 value: 86.95 - type: nauc_map_at_1000_diff1 value: 63.05539428169271 - type: nauc_map_at_1000_max value: 45.428107132447124 - type: nauc_map_at_1000_std value: 13.94507583970834 - type: nauc_map_at_100_diff1 value: 63.05539428169271 - type: nauc_map_at_100_max value: 45.428107132447124 - type: nauc_map_at_100_std value: 13.94507583970834 - type: nauc_map_at_10_diff1 value: 63.05539428169271 - type: nauc_map_at_10_max value: 45.428107132447124 - type: nauc_map_at_10_std value: 13.94507583970834 - type: nauc_map_at_1_diff1 value: 64.24122923028831 - type: nauc_map_at_1_max value: 44.34077957053877 - type: nauc_map_at_1_std value: 9.594344386466878 - type: nauc_map_at_20_diff1 value: 63.05539428169271 - type: nauc_map_at_20_max value: 45.428107132447124 - type: nauc_map_at_20_std value: 13.94507583970834 - type: nauc_map_at_3_diff1 value: 62.30831315577075 - type: nauc_map_at_3_max value: 47.33980193586779 - type: nauc_map_at_3_std value: 16.132624025733 - type: nauc_map_at_5_diff1 value: 63.079622378971834 - type: nauc_map_at_5_max value: 45.13424437707254 - type: nauc_map_at_5_std value: 13.730785051570013 - type: nauc_mrr_at_1000_diff1 value: 63.05539428169271 - type: nauc_mrr_at_1000_max value: 45.428107132447124 - type: nauc_mrr_at_1000_std value: 13.94507583970834 - type: nauc_mrr_at_100_diff1 value: 63.05539428169271 - type: nauc_mrr_at_100_max value: 45.428107132447124 - type: nauc_mrr_at_100_std value: 13.94507583970834 - type: nauc_mrr_at_10_diff1 value: 63.05539428169271 - type: nauc_mrr_at_10_max value: 45.428107132447124 - type: nauc_mrr_at_10_std value: 13.94507583970834 - type: nauc_mrr_at_1_diff1 value: 64.24122923028831 - type: nauc_mrr_at_1_max value: 44.34077957053877 - type: nauc_mrr_at_1_std value: 9.594344386466878 - type: nauc_mrr_at_20_diff1 value: 63.05539428169271 - type: nauc_mrr_at_20_max value: 45.428107132447124 - type: nauc_mrr_at_20_std value: 13.94507583970834 - type: nauc_mrr_at_3_diff1 value: 62.30831315577075 - type: nauc_mrr_at_3_max value: 47.33980193586779 - type: nauc_mrr_at_3_std value: 16.132624025733 - type: nauc_mrr_at_5_diff1 value: 63.079622378971834 - type: nauc_mrr_at_5_max value: 45.13424437707254 - type: nauc_mrr_at_5_std value: 13.730785051570013 - type: nauc_ndcg_at_1000_diff1 value: 62.97376441474187 - type: nauc_ndcg_at_1000_max value: 45.457846840130586 - type: nauc_ndcg_at_1000_std value: 14.17695491254452 - type: nauc_ndcg_at_100_diff1 value: 62.97376441474187 - type: nauc_ndcg_at_100_max value: 45.457846840130586 - type: nauc_ndcg_at_100_std value: 14.17695491254452 - type: nauc_ndcg_at_10_diff1 value: 62.97376441474187 - type: nauc_ndcg_at_10_max value: 45.457846840130586 - type: nauc_ndcg_at_10_std value: 14.17695491254452 - type: nauc_ndcg_at_1_diff1 value: 64.24122923028831 - type: nauc_ndcg_at_1_max value: 44.34077957053877 - type: nauc_ndcg_at_1_std value: 9.594344386466878 - type: nauc_ndcg_at_20_diff1 value: 62.97376441474187 - type: nauc_ndcg_at_20_max value: 45.457846840130586 - type: nauc_ndcg_at_20_std value: 14.17695491254452 - type: nauc_ndcg_at_3_diff1 value: 61.47043349797183 - type: nauc_ndcg_at_3_max value: 49.12165820225059 - type: nauc_ndcg_at_3_std value: 18.525396343409568 - type: nauc_ndcg_at_5_diff1 value: 63.04022063936115 - type: nauc_ndcg_at_5_max value: 44.381937619091765 - type: nauc_ndcg_at_5_std value: 13.3263412698325 - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_100_diff1 value: .nan - type: nauc_precision_at_100_max value: .nan - type: nauc_precision_at_100_std value: .nan - type: nauc_precision_at_10_diff1 value: 100.0 - type: nauc_precision_at_10_max value: 100.0 - type: nauc_precision_at_10_std value: 100.0 - type: nauc_precision_at_1_diff1 value: 64.24122923028831 - type: nauc_precision_at_1_max value: 44.34077957053877 - type: nauc_precision_at_1_std value: 9.594344386466878 - type: nauc_precision_at_20_diff1 value: 100.0 - type: nauc_precision_at_20_max value: 100.0 - type: nauc_precision_at_20_std value: 100.0 - type: nauc_precision_at_3_diff1 value: 56.27917833800158 - type: nauc_precision_at_3_max value: 60.51976346093969 - type: nauc_precision_at_3_std value: 33.02209772798002 - type: nauc_precision_at_5_diff1 value: 63.81886087768404 - type: nauc_precision_at_5_max value: 27.544351073763345 - type: nauc_precision_at_5_std value: -0.4668534080301362 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: .nan - type: nauc_recall_at_100_max value: .nan - type: nauc_recall_at_100_std value: .nan - type: nauc_recall_at_10_diff1 value: .nan - type: nauc_recall_at_10_max value: .nan - type: nauc_recall_at_10_std value: .nan - type: nauc_recall_at_1_diff1 value: 64.24122923028831 - type: nauc_recall_at_1_max value: 44.34077957053877 - type: nauc_recall_at_1_std value: 9.594344386466878 - type: nauc_recall_at_20_diff1 value: .nan - type: nauc_recall_at_20_max value: .nan - type: nauc_recall_at_20_std value: .nan - type: nauc_recall_at_3_diff1 value: 56.27917833800187 - type: nauc_recall_at_3_max value: 60.51976346094 - type: nauc_recall_at_3_std value: 33.022097727980125 - type: nauc_recall_at_5_diff1 value: 63.81886087768457 - type: nauc_recall_at_5_max value: 27.544351073763107 - type: nauc_recall_at_5_std value: -0.46685340803013775 - type: ndcg_at_1 value: 79.0 - type: ndcg_at_10 value: 90.36699999999999 - type: ndcg_at_100 value: 90.36699999999999 - type: ndcg_at_1000 value: 90.36699999999999 - type: ndcg_at_20 value: 90.36699999999999 - type: ndcg_at_3 value: 88.071 - type: ndcg_at_5 value: 89.75 - type: precision_at_1 value: 79.0 - type: precision_at_10 value: 10.0 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 5.0 - type: precision_at_3 value: 31.333 - type: precision_at_5 value: 19.6 - type: recall_at_1 value: 79.0 - type: recall_at_10 value: 100.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 100.0 - type: recall_at_3 value: 94.0 - type: recall_at_5 value: 98.0 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fra-fra split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: main_score value: 77.425 - type: map_at_1 value: 46.749 - type: map_at_10 value: 72.108 - type: map_at_100 value: 73.32499999999999 - type: map_at_1000 value: 73.341 - type: map_at_20 value: 72.991 - type: map_at_3 value: 65.09 - type: map_at_5 value: 70.137 - type: mrr_at_1 value: 71.82910547396529 - type: mrr_at_10 value: 78.63357492529722 - type: mrr_at_100 value: 78.97374961354801 - type: mrr_at_1000 value: 78.97840549855806 - type: mrr_at_20 value: 78.86005025292395 - type: mrr_at_3 value: 77.28081886960389 - type: mrr_at_5 value: 78.0551846906987 - type: nauc_map_at_1000_diff1 value: 57.508397030020156 - type: nauc_map_at_1000_max value: 43.80251983780665 - type: nauc_map_at_1000_std value: -16.231491160419434 - type: nauc_map_at_100_diff1 value: 57.48614844875469 - type: nauc_map_at_100_max value: 43.797011627763055 - type: nauc_map_at_100_std value: -16.239303348969592 - type: nauc_map_at_10_diff1 value: 57.254064849553934 - type: nauc_map_at_10_max value: 42.765535577219026 - type: nauc_map_at_10_std value: -17.255606315997156 - type: nauc_map_at_1_diff1 value: 65.04324659040175 - type: nauc_map_at_1_max value: 17.852220653388855 - type: nauc_map_at_1_std value: -14.257753661018779 - type: nauc_map_at_20_diff1 value: 57.48367588324867 - type: nauc_map_at_20_max value: 43.680084254814425 - type: nauc_map_at_20_std value: -16.59381108810359 - type: nauc_map_at_3_diff1 value: 58.328817274958276 - type: nauc_map_at_3_max value: 34.603370607250675 - type: nauc_map_at_3_std value: -15.326569334165047 - type: nauc_map_at_5_diff1 value: 57.544271139796365 - type: nauc_map_at_5_max value: 41.58159814532708 - type: nauc_map_at_5_std value: -17.035562345654515 - type: nauc_mrr_at_1000_diff1 value: 67.23053035385993 - type: nauc_mrr_at_1000_max value: 53.982556981667095 - type: nauc_mrr_at_1000_std value: -12.015571062417035 - type: nauc_mrr_at_100_diff1 value: 67.23047293440347 - type: nauc_mrr_at_100_max value: 53.97931489747768 - type: nauc_mrr_at_100_std value: -12.026957248146365 - type: nauc_mrr_at_10_diff1 value: 67.25927907237941 - type: nauc_mrr_at_10_max value: 53.99647347811833 - type: nauc_mrr_at_10_std value: -12.356365137919108 - type: nauc_mrr_at_1_diff1 value: 67.80552098159194 - type: nauc_mrr_at_1_max value: 52.34740974885752 - type: nauc_mrr_at_1_std value: -9.009347371853096 - type: nauc_mrr_at_20_diff1 value: 67.22472566769486 - type: nauc_mrr_at_20_max value: 54.03480374123263 - type: nauc_mrr_at_20_std value: -12.129416933895373 - type: nauc_mrr_at_3_diff1 value: 66.86636026044627 - type: nauc_mrr_at_3_max value: 53.84675762408544 - type: nauc_mrr_at_3_std value: -12.318414220208327 - type: nauc_mrr_at_5_diff1 value: 67.16713697443882 - type: nauc_mrr_at_5_max value: 54.174275682276765 - type: nauc_mrr_at_5_std value: -12.382704200660772 - type: nauc_ndcg_at_1000_diff1 value: 60.076768803793875 - type: nauc_ndcg_at_1000_max value: 48.06880976583911 - type: nauc_ndcg_at_1000_std value: -14.8002468401513 - type: nauc_ndcg_at_100_diff1 value: 59.84195440900073 - type: nauc_ndcg_at_100_max value: 48.031759882567265 - type: nauc_ndcg_at_100_std value: -14.93671795434138 - type: nauc_ndcg_at_10_diff1 value: 59.091362656630984 - type: nauc_ndcg_at_10_max value: 45.902216798175296 - type: nauc_ndcg_at_10_std value: -18.225812204918686 - type: nauc_ndcg_at_1_diff1 value: 67.80552098159194 - type: nauc_ndcg_at_1_max value: 52.34740974885752 - type: nauc_ndcg_at_1_std value: -9.009347371853096 - type: nauc_ndcg_at_20_diff1 value: 59.80472569029982 - type: nauc_ndcg_at_20_max value: 47.92221974783734 - type: nauc_ndcg_at_20_std value: -16.589965314279805 - type: nauc_ndcg_at_3_diff1 value: 56.9195769675713 - type: nauc_ndcg_at_3_max value: 44.992740041222575 - type: nauc_ndcg_at_3_std value: -16.329730380555382 - type: nauc_ndcg_at_5_diff1 value: 59.31912266230594 - type: nauc_ndcg_at_5_max value: 44.75423089733974 - type: nauc_ndcg_at_5_std value: -17.744216780645583 - type: nauc_precision_at_1000_diff1 value: -30.976050318575094 - type: nauc_precision_at_1000_max value: 16.55619583017722 - type: nauc_precision_at_1000_std value: 10.549164466552044 - type: nauc_precision_at_100_diff1 value: -30.217028356940872 - type: nauc_precision_at_100_max value: 17.709049202840184 - type: nauc_precision_at_100_std value: 10.04190905252673 - type: nauc_precision_at_10_diff1 value: -19.588612396735584 - type: nauc_precision_at_10_max value: 23.97095583735318 - type: nauc_precision_at_10_std value: 1.3308819095790259 - type: nauc_precision_at_1_diff1 value: 67.80552098159194 - type: nauc_precision_at_1_max value: 52.34740974885752 - type: nauc_precision_at_1_std value: -9.009347371853096 - type: nauc_precision_at_20_diff1 value: -24.56372903999468 - type: nauc_precision_at_20_max value: 21.970766470092478 - type: nauc_precision_at_20_std value: 5.690019568793079 - type: nauc_precision_at_3_diff1 value: -5.293993834675436 - type: nauc_precision_at_3_max value: 33.48037221970611 - type: nauc_precision_at_3_std value: -0.9905029996040207 - type: nauc_precision_at_5_diff1 value: -12.477204961113433 - type: nauc_precision_at_5_max value: 28.41320824321574 - type: nauc_precision_at_5_std value: -0.25510168506666026 - type: nauc_recall_at_1000_diff1 value: 63.80720019823024 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 100.0 - type: nauc_recall_at_100_diff1 value: 45.99503772001805 - type: nauc_recall_at_100_max value: 53.62256247578381 - type: nauc_recall_at_100_std value: -2.1521605315502126 - type: nauc_recall_at_10_diff1 value: 51.49183566173087 - type: nauc_recall_at_10_max value: 39.94460610694432 - type: nauc_recall_at_10_std value: -27.417226994058534 - type: nauc_recall_at_1_diff1 value: 65.04324659040175 - type: nauc_recall_at_1_max value: 17.852220653388855 - type: nauc_recall_at_1_std value: -14.257753661018779 - type: nauc_recall_at_20_diff1 value: 53.65987970751146 - type: nauc_recall_at_20_max value: 48.20536243702891 - type: nauc_recall_at_20_std value: -24.77784527777353 - type: nauc_recall_at_3_diff1 value: 53.27794448209969 - type: nauc_recall_at_3_max value: 30.304767840963283 - type: nauc_recall_at_3_std value: -19.099603261339936 - type: nauc_recall_at_5_diff1 value: 53.77383683020561 - type: nauc_recall_at_5_max value: 39.58616026474047 - type: nauc_recall_at_5_std value: -23.255086482736036 - type: ndcg_at_1 value: 71.829 - type: ndcg_at_10 value: 77.425 - type: ndcg_at_100 value: 80.88 - type: ndcg_at_1000 value: 81.128 - type: ndcg_at_20 value: 79.403 - type: ndcg_at_3 value: 72.89 - type: ndcg_at_5 value: 74.521 - type: precision_at_1 value: 71.829 - type: precision_at_10 value: 17.596999999999998 - type: precision_at_100 value: 2.033 - type: precision_at_1000 value: 0.207 - type: precision_at_20 value: 9.513 - type: precision_at_3 value: 44.192 - type: precision_at_5 value: 31.776 - type: recall_at_1 value: 46.749 - type: recall_at_10 value: 85.49799999999999 - type: recall_at_100 value: 98.17099999999999 - type: recall_at_1000 value: 99.733 - type: recall_at_20 value: 91.70700000000001 - type: recall_at_3 value: 70.309 - type: recall_at_5 value: 78.507 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: b89853e6de927b0e3bfa8ecc0e56fe4e02ceafc6 metrics: - type: accuracy value: 65.0 - type: f1 value: 58.85888258599016 - type: f1_weighted value: 65.99554726292321 - type: main_score value: 65.0 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: main_score value: 59.71300000000001 - type: map_at_1 value: 35.135 - type: map_at_10 value: 51.092000000000006 - type: map_at_100 value: 51.773 - type: map_at_1000 value: 51.776999999999994 - type: map_at_20 value: 51.665000000000006 - type: map_at_3 value: 46.574 - type: map_at_5 value: 49.032 - type: mrr_at_1 value: 36.201991465149355 - type: mrr_at_10 value: 51.546405427984475 - type: mrr_at_100 value: 52.202374673015285 - type: mrr_at_1000 value: 52.20610086068531 - type: mrr_at_20 value: 52.096805353180756 - type: mrr_at_3 value: 47.01280227596022 - type: mrr_at_5 value: 49.49146514935999 - type: nauc_map_at_1000_diff1 value: 19.758403663654388 - type: nauc_map_at_1000_max value: 1.9211716901459552 - type: nauc_map_at_1000_std value: -12.391775130617594 - type: nauc_map_at_100_diff1 value: 19.75801012476506 - type: nauc_map_at_100_max value: 1.927233271789035 - type: nauc_map_at_100_std value: -12.390686358565384 - type: nauc_map_at_10_diff1 value: 19.618023487744257 - type: nauc_map_at_10_max value: 1.948823709088292 - type: nauc_map_at_10_std value: -12.590649627823774 - type: nauc_map_at_1_diff1 value: 22.704520355653777 - type: nauc_map_at_1_max value: -0.7340073588952427 - type: nauc_map_at_1_std value: -11.685082615631233 - type: nauc_map_at_20_diff1 value: 19.710150386755245 - type: nauc_map_at_20_max value: 1.9579689185617946 - type: nauc_map_at_20_std value: -12.454848473878485 - type: nauc_map_at_3_diff1 value: 19.88571571635227 - type: nauc_map_at_3_max value: 2.2089391275055754 - type: nauc_map_at_3_std value: -12.152625563551476 - type: nauc_map_at_5_diff1 value: 19.345423817148774 - type: nauc_map_at_5_max value: 2.4471831202433783 - type: nauc_map_at_5_std value: -11.60532301686549 - type: nauc_mrr_at_1000_diff1 value: 16.90786453167799 - type: nauc_mrr_at_1000_max value: 0.65578323377857 - type: nauc_mrr_at_1000_std value: -12.395929715413015 - type: nauc_mrr_at_100_diff1 value: 16.90781127619206 - type: nauc_mrr_at_100_max value: 0.6619900297824423 - type: nauc_mrr_at_100_std value: -12.394826789608906 - type: nauc_mrr_at_10_diff1 value: 16.785894192163838 - type: nauc_mrr_at_10_max value: 0.7096666849274212 - type: nauc_mrr_at_10_std value: -12.592883550594735 - type: nauc_mrr_at_1_diff1 value: 19.59282927806732 - type: nauc_mrr_at_1_max value: -1.1271716729359413 - type: nauc_mrr_at_1_std value: -11.710668880297517 - type: nauc_mrr_at_20_diff1 value: 16.86673477981559 - type: nauc_mrr_at_20_max value: 0.6897167399764257 - type: nauc_mrr_at_20_std value: -12.464631471378414 - type: nauc_mrr_at_3_diff1 value: 17.0481261621288 - type: nauc_mrr_at_3_max value: 0.7183007174016199 - type: nauc_mrr_at_3_std value: -12.329335728574527 - type: nauc_mrr_at_5_diff1 value: 16.698916629443854 - type: nauc_mrr_at_5_max value: 1.2515514207224299 - type: nauc_mrr_at_5_std value: -11.662599392805308 - type: nauc_ndcg_at_1000_diff1 value: 19.30605856078901 - type: nauc_ndcg_at_1000_max value: 2.3402231520806835 - type: nauc_ndcg_at_1000_std value: -12.370409989770332 - type: nauc_ndcg_at_100_diff1 value: 19.31155460872256 - type: nauc_ndcg_at_100_max value: 2.510633162779702 - type: nauc_ndcg_at_100_std value: -12.313796276064673 - type: nauc_ndcg_at_10_diff1 value: 18.511651466450843 - type: nauc_ndcg_at_10_max value: 2.6756675185155263 - type: nauc_ndcg_at_10_std value: -13.573610085360095 - type: nauc_ndcg_at_1_diff1 value: 22.704520355653777 - type: nauc_ndcg_at_1_max value: -0.7340073588952427 - type: nauc_ndcg_at_1_std value: -11.685082615631233 - type: nauc_ndcg_at_20_diff1 value: 19.01305812933961 - type: nauc_ndcg_at_20_max value: 2.777977280012548 - type: nauc_ndcg_at_20_std value: -12.959515013552128 - type: nauc_ndcg_at_3_diff1 value: 19.15053976740578 - type: nauc_ndcg_at_3_max value: 3.2587972262385496 - type: nauc_ndcg_at_3_std value: -12.105808757691328 - type: nauc_ndcg_at_5_diff1 value: 18.010082675090597 - type: nauc_ndcg_at_5_max value: 3.753876824229378 - type: nauc_ndcg_at_5_std value: -11.044202434548701 - type: nauc_precision_at_1000_diff1 value: -11.75783343822487 - type: nauc_precision_at_1000_max value: 5.7856460776313465 - type: nauc_precision_at_1000_std value: 62.79171280927037 - type: nauc_precision_at_100_diff1 value: 9.08527555500537 - type: nauc_precision_at_100_max value: 36.16754653078746 - type: nauc_precision_at_100_std value: 28.37969482833522 - type: nauc_precision_at_10_diff1 value: 10.685081888632977 - type: nauc_precision_at_10_max value: 7.185779514361452 - type: nauc_precision_at_10_std value: -22.209758078034394 - type: nauc_precision_at_1_diff1 value: 22.704520355653777 - type: nauc_precision_at_1_max value: -0.7340073588952427 - type: nauc_precision_at_1_std value: -11.685082615631233 - type: nauc_precision_at_20_diff1 value: 10.0745772945806 - type: nauc_precision_at_20_max value: 16.81469938479116 - type: nauc_precision_at_20_std value: -22.804277740935298 - type: nauc_precision_at_3_diff1 value: 16.900587067301714 - type: nauc_precision_at_3_max value: 6.595958907337978 - type: nauc_precision_at_3_std value: -11.888316132805594 - type: nauc_precision_at_5_diff1 value: 12.771428972972895 - type: nauc_precision_at_5_max value: 8.79201485711544 - type: nauc_precision_at_5_std value: -8.609881800940762 - type: nauc_recall_at_1000_diff1 value: -11.757833438225305 - type: nauc_recall_at_1000_max value: 5.785646077628613 - type: nauc_recall_at_1000_std value: 62.791712809264176 - type: nauc_recall_at_100_diff1 value: 9.085275555005722 - type: nauc_recall_at_100_max value: 36.167546530787995 - type: nauc_recall_at_100_std value: 28.37969482833511 - type: nauc_recall_at_10_diff1 value: 10.68508188863288 - type: nauc_recall_at_10_max value: 7.185779514361484 - type: nauc_recall_at_10_std value: -22.209758078034465 - type: nauc_recall_at_1_diff1 value: 22.704520355653777 - type: nauc_recall_at_1_max value: -0.7340073588952427 - type: nauc_recall_at_1_std value: -11.685082615631233 - type: nauc_recall_at_20_diff1 value: 10.074577294581067 - type: nauc_recall_at_20_max value: 16.814699384791545 - type: nauc_recall_at_20_std value: -22.80427774093497 - type: nauc_recall_at_3_diff1 value: 16.900587067301768 - type: nauc_recall_at_3_max value: 6.595958907337955 - type: nauc_recall_at_3_std value: -11.888316132805613 - type: nauc_recall_at_5_diff1 value: 12.77142897297289 - type: nauc_recall_at_5_max value: 8.792014857115413 - type: nauc_recall_at_5_std value: -8.609881800940697 - type: ndcg_at_1 value: 35.135 - type: ndcg_at_10 value: 59.71300000000001 - type: ndcg_at_100 value: 62.5 - type: ndcg_at_1000 value: 62.578 - type: ndcg_at_20 value: 61.775000000000006 - type: ndcg_at_3 value: 50.336999999999996 - type: ndcg_at_5 value: 54.748 - type: precision_at_1 value: 35.135 - type: precision_at_10 value: 8.72 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.765 - type: precision_at_3 value: 20.413 - type: precision_at_5 value: 14.381 - type: recall_at_1 value: 35.135 - type: recall_at_10 value: 87.198 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 95.306 - type: recall_at_3 value: 61.23800000000001 - type: recall_at_5 value: 71.906 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: 36ddb419bcffe6a5374c3891957912892916f28d metrics: - type: accuracy value: 84.13000000000001 - type: ap value: 38.21674564144456 - type: ap_weighted value: 38.21674564144456 - type: f1 value: 73.58128735002478 - type: f1_weighted value: 85.75596717538494 - type: main_score value: 84.13000000000001 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: 0a3d4aa409b22f80eb22cbf59b492637637b536d metrics: - type: cosine_accuracy value: 89.0 - type: cosine_accuracy_threshold value: 95.30268088769837 - type: cosine_ap value: 78.23422403821777 - type: cosine_f1 value: 69.23076923076923 - type: cosine_f1_threshold value: 87.1877340095262 - type: cosine_precision value: 67.5 - type: cosine_recall value: 71.05263157894737 - type: dot_accuracy value: 88.3 - type: dot_accuracy_threshold value: 2472000.0 - type: dot_ap value: 74.26705897704197 - type: dot_f1 value: 66.49874055415617 - type: dot_f1_threshold value: 2316800.0 - type: dot_precision value: 63.76811594202898 - type: dot_recall value: 69.47368421052632 - type: euclidean_accuracy value: 89.2 - type: euclidean_accuracy_threshold value: 6878.705188647788 - type: euclidean_ap value: 78.51718555534579 - type: euclidean_f1 value: 69.54314720812182 - type: euclidean_f1_threshold value: 8323.035838252725 - type: euclidean_precision value: 67.15686274509804 - type: euclidean_recall value: 72.10526315789474 - type: main_score value: 78.51718555534579 - type: manhattan_accuracy value: 89.2 - type: manhattan_accuracy_threshold value: 326812.48528957367 - type: manhattan_ap value: 78.50895632545628 - type: manhattan_f1 value: 69.84924623115577 - type: manhattan_f1_threshold value: 398102.616417408 - type: manhattan_precision value: 66.82692307692307 - type: manhattan_recall value: 73.15789473684211 - type: max_ap value: 78.51718555534579 - type: max_f1 value: 69.84924623115577 - type: max_precision value: 67.5 - type: max_recall value: 73.15789473684211 - type: similarity_accuracy value: 89.0 - type: similarity_accuracy_threshold value: 95.30268088769837 - type: similarity_ap value: 78.23422403821777 - type: similarity_f1 value: 69.23076923076923 - type: similarity_f1_threshold value: 87.1877340095262 - type: similarity_precision value: 67.5 - type: similarity_recall value: 71.05263157894737 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: 1cd6abbb00df7d14be3dbd76a7dcc64b3a79a7cd metrics: - type: cosine_pearson value: 91.04238667979497 - type: cosine_spearman value: 90.96758456402505 - type: euclidean_pearson value: 88.88396869759062 - type: euclidean_spearman value: 90.80235709678217 - type: main_score value: 90.96758456402505 - type: manhattan_pearson value: 88.91331977492183 - type: manhattan_spearman value: 90.82823486754444 - type: pearson value: 91.04238667979497 - type: spearman value: 90.96758456402505 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: main_score value: 43.189 - type: map_at_1 value: 8.838 - type: map_at_10 value: 20.335 - type: map_at_100 value: 29.818 - type: map_at_1000 value: 31.672 - type: map_at_20 value: 24.037 - type: map_at_3 value: 14.144000000000002 - type: map_at_5 value: 16.674 - type: mrr_at_1 value: 66.25 - type: mrr_at_10 value: 74.51428571428573 - type: mrr_at_100 value: 74.85025528596333 - type: mrr_at_1000 value: 74.861579760375 - type: mrr_at_20 value: 74.75227906231197 - type: mrr_at_3 value: 73.25 - type: mrr_at_5 value: 73.825 - type: nauc_map_at_1000_diff1 value: 25.397956304548963 - type: nauc_map_at_1000_max value: 34.60045634629073 - type: nauc_map_at_1000_std value: 25.484338507029523 - type: nauc_map_at_100_diff1 value: 26.732402811074362 - type: nauc_map_at_100_max value: 33.16273154550298 - type: nauc_map_at_100_std value: 22.705558316419694 - type: nauc_map_at_10_diff1 value: 31.048350740517666 - type: nauc_map_at_10_max value: 20.58247280790142 - type: nauc_map_at_10_std value: -0.3057740988996755 - type: nauc_map_at_1_diff1 value: 37.44384898753489 - type: nauc_map_at_1_max value: 2.009066872007797 - type: nauc_map_at_1_std value: -18.38972044447374 - type: nauc_map_at_20_diff1 value: 29.145950023489974 - type: nauc_map_at_20_max value: 25.337239700245075 - type: nauc_map_at_20_std value: 7.680343084384305 - type: nauc_map_at_3_diff1 value: 32.41886776815376 - type: nauc_map_at_3_max value: 8.976460728750666 - type: nauc_map_at_3_std value: -14.206927116348458 - type: nauc_map_at_5_diff1 value: 31.316919153957873 - type: nauc_map_at_5_max value: 14.015365438005226 - type: nauc_map_at_5_std value: -8.909007562143335 - type: nauc_mrr_at_1000_diff1 value: 42.77521158292109 - type: nauc_mrr_at_1000_max value: 58.03733674934908 - type: nauc_mrr_at_1000_std value: 42.65118460573791 - type: nauc_mrr_at_100_diff1 value: 42.76917109803571 - type: nauc_mrr_at_100_max value: 58.04747433083853 - type: nauc_mrr_at_100_std value: 42.65151388365855 - type: nauc_mrr_at_10_diff1 value: 42.4992726119988 - type: nauc_mrr_at_10_max value: 58.157080658302974 - type: nauc_mrr_at_10_std value: 42.98778606676595 - type: nauc_mrr_at_1_diff1 value: 46.67764597969527 - type: nauc_mrr_at_1_max value: 54.52896662427813 - type: nauc_mrr_at_1_std value: 35.71181387979735 - type: nauc_mrr_at_20_diff1 value: 42.79101300218034 - type: nauc_mrr_at_20_max value: 58.05679669975563 - type: nauc_mrr_at_20_std value: 42.72288886007032 - type: nauc_mrr_at_3_diff1 value: 41.85440967628899 - type: nauc_mrr_at_3_max value: 57.975577899726126 - type: nauc_mrr_at_3_std value: 43.523432037784985 - type: nauc_mrr_at_5_diff1 value: 42.3041465494315 - type: nauc_mrr_at_5_max value: 58.54530113479029 - type: nauc_mrr_at_5_std value: 43.2944834223015 - type: nauc_ndcg_at_1000_diff1 value: 32.16216922989725 - type: nauc_ndcg_at_1000_max value: 50.03467332768009 - type: nauc_ndcg_at_1000_std value: 42.87877265207483 - type: nauc_ndcg_at_100_diff1 value: 33.55193527551313 - type: nauc_ndcg_at_100_max value: 45.12048953873363 - type: nauc_ndcg_at_100_std value: 34.788021436199024 - type: nauc_ndcg_at_10_diff1 value: 31.14168233882658 - type: nauc_ndcg_at_10_max value: 45.31079148382448 - type: nauc_ndcg_at_10_std value: 28.555214349385466 - type: nauc_ndcg_at_1_diff1 value: 45.12481069889602 - type: nauc_ndcg_at_1_max value: 45.93377570654117 - type: nauc_ndcg_at_1_std value: 26.672617000885186 - type: nauc_ndcg_at_20_diff1 value: 31.81216979830056 - type: nauc_ndcg_at_20_max value: 41.93464767693644 - type: nauc_ndcg_at_20_std value: 26.08707327004535 - type: nauc_ndcg_at_3_diff1 value: 29.90627202771331 - type: nauc_ndcg_at_3_max value: 46.50414958925517 - type: nauc_ndcg_at_3_std value: 29.66009841753563 - type: nauc_ndcg_at_5_diff1 value: 29.08122779713697 - type: nauc_ndcg_at_5_max value: 46.81499760516951 - type: nauc_ndcg_at_5_std value: 29.935930977468267 - type: nauc_precision_at_1000_diff1 value: -18.71150014402453 - type: nauc_precision_at_1000_max value: -0.9220395765472844 - type: nauc_precision_at_1000_std value: 7.219897945975822 - type: nauc_precision_at_100_diff1 value: -8.609528664023014 - type: nauc_precision_at_100_max value: 29.147048677242864 - type: nauc_precision_at_100_std value: 44.958041507680036 - type: nauc_precision_at_10_diff1 value: 2.8689201908213477 - type: nauc_precision_at_10_max value: 44.40893361361308 - type: nauc_precision_at_10_std value: 47.18569807586499 - type: nauc_precision_at_1_diff1 value: 46.01228536231763 - type: nauc_precision_at_1_max value: 54.30280987857099 - type: nauc_precision_at_1_std value: 36.923128493492776 - type: nauc_precision_at_20_diff1 value: -1.9783515948740122 - type: nauc_precision_at_20_max value: 38.42066921295958 - type: nauc_precision_at_20_std value: 47.41935674153161 - type: nauc_precision_at_3_diff1 value: 9.877584475384026 - type: nauc_precision_at_3_max value: 44.77006526403546 - type: nauc_precision_at_3_std value: 39.51299545977156 - type: nauc_precision_at_5_diff1 value: 5.096217475317008 - type: nauc_precision_at_5_max value: 45.66716959157208 - type: nauc_precision_at_5_std value: 42.651208343259505 - type: nauc_recall_at_1000_diff1 value: 25.395292649442965 - type: nauc_recall_at_1000_max value: 44.94193476114992 - type: nauc_recall_at_1000_std value: 53.58345238223027 - type: nauc_recall_at_100_diff1 value: 23.962022146293293 - type: nauc_recall_at_100_max value: 32.15140842028602 - type: nauc_recall_at_100_std value: 30.57126984952762 - type: nauc_recall_at_10_diff1 value: 28.120539807446004 - type: nauc_recall_at_10_max value: 18.154834280193572 - type: nauc_recall_at_10_std value: -0.6032386653260938 - type: nauc_recall_at_1_diff1 value: 37.44384898753489 - type: nauc_recall_at_1_max value: 2.009066872007797 - type: nauc_recall_at_1_std value: -18.38972044447374 - type: nauc_recall_at_20_diff1 value: 23.438945970294554 - type: nauc_recall_at_20_max value: 17.201259624644326 - type: nauc_recall_at_20_std value: 3.75587033487961 - type: nauc_recall_at_3_diff1 value: 29.867460507200587 - type: nauc_recall_at_3_max value: 8.066960542463528 - type: nauc_recall_at_3_std value: -15.13440571172203 - type: nauc_recall_at_5_diff1 value: 28.657118879661887 - type: nauc_recall_at_5_max value: 12.942552735963842 - type: nauc_recall_at_5_std value: -9.57735672972808 - type: ndcg_at_1 value: 54.50000000000001 - type: ndcg_at_10 value: 43.189 - type: ndcg_at_100 value: 48.595 - type: ndcg_at_1000 value: 55.681000000000004 - type: ndcg_at_20 value: 43.09 - type: ndcg_at_3 value: 47.599000000000004 - type: ndcg_at_5 value: 44.907000000000004 - type: precision_at_1 value: 66.5 - type: precision_at_10 value: 35.725 - type: precision_at_100 value: 11.583 - type: precision_at_1000 value: 2.302 - type: precision_at_20 value: 27.375 - type: precision_at_3 value: 52.0 - type: precision_at_5 value: 44.7 - type: recall_at_1 value: 8.838 - type: recall_at_10 value: 25.424999999999997 - type: recall_at_100 value: 55.632000000000005 - type: recall_at_1000 value: 77.857 - type: recall_at_20 value: 34.458 - type: recall_at_3 value: 15.229999999999999 - type: recall_at_5 value: 18.872 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: main_score value: 50.28804848851286 - type: v_measure value: 50.28804848851286 - type: v_measure_std value: 2.9879120747919505 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: main_score value: 46.121 - type: map_at_1 value: 24.027 - type: map_at_10 value: 38.14 - type: map_at_100 value: 40.092 - type: map_at_1000 value: 40.266000000000005 - type: map_at_20 value: 39.195 - type: map_at_3 value: 33.415 - type: map_at_5 value: 36.115 - type: mrr_at_1 value: 46.60493827160494 - type: mrr_at_10 value: 54.70305457573974 - type: mrr_at_100 value: 55.355642920233414 - type: mrr_at_1000 value: 55.3908291424442 - type: mrr_at_20 value: 55.00793641725012 - type: mrr_at_3 value: 52.3148148148148 - type: mrr_at_5 value: 53.54166666666664 - type: nauc_map_at_1000_diff1 value: 37.73510043188139 - type: nauc_map_at_1000_max value: 28.32920495001755 - type: nauc_map_at_1000_std value: 2.1388839190211293 - type: nauc_map_at_100_diff1 value: 37.670108404247685 - type: nauc_map_at_100_max value: 28.227406812543826 - type: nauc_map_at_100_std value: 2.120931632442644 - type: nauc_map_at_10_diff1 value: 37.465256098544174 - type: nauc_map_at_10_max value: 27.091226456549666 - type: nauc_map_at_10_std value: 1.1173775566235409 - type: nauc_map_at_1_diff1 value: 41.23855326212752 - type: nauc_map_at_1_max value: 21.290748552864557 - type: nauc_map_at_1_std value: -0.8385928448565472 - type: nauc_map_at_20_diff1 value: 37.47054494805535 - type: nauc_map_at_20_max value: 27.729045702955386 - type: nauc_map_at_20_std value: 1.7216485460777051 - type: nauc_map_at_3_diff1 value: 37.262641031829105 - type: nauc_map_at_3_max value: 23.89124216989901 - type: nauc_map_at_3_std value: -0.14736489529369678 - type: nauc_map_at_5_diff1 value: 37.054030521972926 - type: nauc_map_at_5_max value: 25.37485175729055 - type: nauc_map_at_5_std value: 0.1603899014557275 - type: nauc_mrr_at_1000_diff1 value: 45.74249029214392 - type: nauc_mrr_at_1000_max value: 36.07619933100338 - type: nauc_mrr_at_1000_std value: 4.393752835100674 - type: nauc_mrr_at_100_diff1 value: 45.72338919745602 - type: nauc_mrr_at_100_max value: 36.07500193737586 - type: nauc_mrr_at_100_std value: 4.415904610787372 - type: nauc_mrr_at_10_diff1 value: 45.712821401955814 - type: nauc_mrr_at_10_max value: 36.077633940467855 - type: nauc_mrr_at_10_std value: 4.31515612100577 - type: nauc_mrr_at_1_diff1 value: 48.95197646135339 - type: nauc_mrr_at_1_max value: 37.627960253727124 - type: nauc_mrr_at_1_std value: 4.355410396712492 - type: nauc_mrr_at_20_diff1 value: 45.657031672968316 - type: nauc_mrr_at_20_max value: 36.02034080808377 - type: nauc_mrr_at_20_std value: 4.291569107759258 - type: nauc_mrr_at_3_diff1 value: 46.14016248486381 - type: nauc_mrr_at_3_max value: 35.096997959937816 - type: nauc_mrr_at_3_std value: 3.473234729162835 - type: nauc_mrr_at_5_diff1 value: 46.044456362138746 - type: nauc_mrr_at_5_max value: 35.54259698630834 - type: nauc_mrr_at_5_std value: 3.242035621890524 - type: nauc_ndcg_at_1000_diff1 value: 39.37342092420808 - type: nauc_ndcg_at_1000_max value: 32.34854163612446 - type: nauc_ndcg_at_1000_std value: 4.9764682793258865 - type: nauc_ndcg_at_100_diff1 value: 38.396532780365966 - type: nauc_ndcg_at_100_max value: 31.427345966345072 - type: nauc_ndcg_at_100_std value: 5.436384757156155 - type: nauc_ndcg_at_10_diff1 value: 38.33852883060773 - type: nauc_ndcg_at_10_max value: 29.405844267873825 - type: nauc_ndcg_at_10_std value: 2.9724473995284453 - type: nauc_ndcg_at_1_diff1 value: 49.360894087944914 - type: nauc_ndcg_at_1_max value: 37.10711812240423 - type: nauc_ndcg_at_1_std value: 3.8523559329866988 - type: nauc_ndcg_at_20_diff1 value: 38.050204646363945 - type: nauc_ndcg_at_20_max value: 29.935603389108866 - type: nauc_ndcg_at_20_std value: 3.779925764680313 - type: nauc_ndcg_at_3_diff1 value: 39.4668764835337 - type: nauc_ndcg_at_3_max value: 30.65976708125836 - type: nauc_ndcg_at_3_std value: 1.2337033504877237 - type: nauc_ndcg_at_5_diff1 value: 38.86503445443355 - type: nauc_ndcg_at_5_max value: 29.0023578220992 - type: nauc_ndcg_at_5_std value: 0.8206100069462643 - type: nauc_precision_at_1000_diff1 value: 5.84775168273073 - type: nauc_precision_at_1000_max value: 27.58660371315182 - type: nauc_precision_at_1000_std value: 9.028324162807364 - type: nauc_precision_at_100_diff1 value: 10.655637431827838 - type: nauc_precision_at_100_max value: 32.11889757111383 - type: nauc_precision_at_100_std value: 13.051376462007925 - type: nauc_precision_at_10_diff1 value: 20.55227291550576 - type: nauc_precision_at_10_max value: 34.48969436232284 - type: nauc_precision_at_10_std value: 7.57890876950882 - type: nauc_precision_at_1_diff1 value: 49.360894087944914 - type: nauc_precision_at_1_max value: 37.10711812240423 - type: nauc_precision_at_1_std value: 3.8523559329866988 - type: nauc_precision_at_20_diff1 value: 16.62880025315897 - type: nauc_precision_at_20_max value: 34.15703662717139 - type: nauc_precision_at_20_std value: 10.909431920732883 - type: nauc_precision_at_3_diff1 value: 28.04332082306772 - type: nauc_precision_at_3_max value: 31.009374202971753 - type: nauc_precision_at_3_std value: 2.307756409916575 - type: nauc_precision_at_5_diff1 value: 24.824270715808705 - type: nauc_precision_at_5_max value: 31.644036540931886 - type: nauc_precision_at_5_std value: 2.958068954639614 - type: nauc_recall_at_1000_diff1 value: 23.79234063489045 - type: nauc_recall_at_1000_max value: 26.76365425679858 - type: nauc_recall_at_1000_std value: 23.815318997671913 - type: nauc_recall_at_100_diff1 value: 22.399781833514737 - type: nauc_recall_at_100_max value: 23.192360958839174 - type: nauc_recall_at_100_std value: 15.984687692762742 - type: nauc_recall_at_10_diff1 value: 28.512649044683837 - type: nauc_recall_at_10_max value: 22.77819651497193 - type: nauc_recall_at_10_std value: 4.646633382718951 - type: nauc_recall_at_1_diff1 value: 41.23855326212752 - type: nauc_recall_at_1_max value: 21.290748552864557 - type: nauc_recall_at_1_std value: -0.8385928448565472 - type: nauc_recall_at_20_diff1 value: 26.797853661700632 - type: nauc_recall_at_20_max value: 21.9956231017133 - type: nauc_recall_at_20_std value: 5.664775183514371 - type: nauc_recall_at_3_diff1 value: 31.42511076281081 - type: nauc_recall_at_3_max value: 19.459398184547652 - type: nauc_recall_at_3_std value: -0.8592886454260257 - type: nauc_recall_at_5_diff1 value: 29.62950699804912 - type: nauc_recall_at_5_max value: 19.941323519486684 - type: nauc_recall_at_5_std value: -0.45387351120880465 - type: ndcg_at_1 value: 46.451 - type: ndcg_at_10 value: 46.121 - type: ndcg_at_100 value: 52.830999999999996 - type: ndcg_at_1000 value: 55.557 - type: ndcg_at_20 value: 48.535000000000004 - type: ndcg_at_3 value: 42.178 - type: ndcg_at_5 value: 43.406 - type: precision_at_1 value: 46.451 - type: precision_at_10 value: 12.562000000000001 - type: precision_at_100 value: 1.963 - type: precision_at_1000 value: 0.244 - type: precision_at_20 value: 7.392 - type: precision_at_3 value: 27.572000000000003 - type: precision_at_5 value: 20.031 - type: recall_at_1 value: 24.027 - type: recall_at_10 value: 52.61900000000001 - type: recall_at_100 value: 77.491 - type: recall_at_1000 value: 93.55 - type: recall_at_20 value: 59.745000000000005 - type: recall_at_3 value: 37.765 - type: recall_at_5 value: 44.304 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: main_score value: 77.02799999999999 - type: map_at_1 value: 41.249 - type: map_at_10 value: 69.512 - type: map_at_100 value: 70.291 - type: map_at_1000 value: 70.334 - type: map_at_20 value: 69.992 - type: map_at_3 value: 65.751 - type: map_at_5 value: 68.161 - type: mrr_at_1 value: 82.4983119513842 - type: mrr_at_10 value: 87.71202426502866 - type: mrr_at_100 value: 87.84265780907221 - type: mrr_at_1000 value: 87.8455843626266 - type: mrr_at_20 value: 87.80640011547308 - type: mrr_at_3 value: 86.94575737114536 - type: mrr_at_5 value: 87.46770200315063 - type: nauc_map_at_1000_diff1 value: 17.17119899625707 - type: nauc_map_at_1000_max value: 29.981569339485393 - type: nauc_map_at_1000_std value: 8.93659568948167 - type: nauc_map_at_100_diff1 value: 17.156175947340035 - type: nauc_map_at_100_max value: 29.988121004348194 - type: nauc_map_at_100_std value: 8.967947232110745 - type: nauc_map_at_10_diff1 value: 16.854416108818132 - type: nauc_map_at_10_max value: 29.784211249360194 - type: nauc_map_at_10_std value: 8.535227936720936 - type: nauc_map_at_1_diff1 value: 68.01294545515707 - type: nauc_map_at_1_max value: 47.51019900345037 - type: nauc_map_at_1_std value: -1.7951406243808212 - type: nauc_map_at_20_diff1 value: 16.993955459776572 - type: nauc_map_at_20_max value: 29.920806300647463 - type: nauc_map_at_20_std value: 8.873597327714583 - type: nauc_map_at_3_diff1 value: 16.16514623575243 - type: nauc_map_at_3_max value: 27.62371849413713 - type: nauc_map_at_3_std value: 5.131406130565191 - type: nauc_map_at_5_diff1 value: 16.507863832657364 - type: nauc_map_at_5_max value: 28.9019090072195 - type: nauc_map_at_5_std value: 7.2380930617814645 - type: nauc_mrr_at_1000_diff1 value: 66.74502991743417 - type: nauc_mrr_at_1000_max value: 50.29274140603486 - type: nauc_mrr_at_1000_std value: 1.602388931386098 - type: nauc_mrr_at_100_diff1 value: 66.7413605208101 - type: nauc_mrr_at_100_max value: 50.29720043419606 - type: nauc_mrr_at_100_std value: 1.612142495535232 - type: nauc_mrr_at_10_diff1 value: 66.71814591414376 - type: nauc_mrr_at_10_max value: 50.39851050116519 - type: nauc_mrr_at_10_std value: 1.7339878916186384 - type: nauc_mrr_at_1_diff1 value: 68.01294545515707 - type: nauc_mrr_at_1_max value: 47.627701029006225 - type: nauc_mrr_at_1_std value: -1.442043059079073 - type: nauc_mrr_at_20_diff1 value: 66.72944815863312 - type: nauc_mrr_at_20_max value: 50.325719646409716 - type: nauc_mrr_at_20_std value: 1.6584317196476688 - type: nauc_mrr_at_3_diff1 value: 66.29662294615758 - type: nauc_mrr_at_3_max value: 50.29363488669571 - type: nauc_mrr_at_3_std value: 1.1373012069481296 - type: nauc_mrr_at_5_diff1 value: 66.70959181668684 - type: nauc_mrr_at_5_max value: 50.42831108375743 - type: nauc_mrr_at_5_std value: 1.5492429855609648 - type: nauc_ndcg_at_1000_diff1 value: 24.337157353044912 - type: nauc_ndcg_at_1000_max value: 35.021784629126984 - type: nauc_ndcg_at_1000_std value: 11.976738067383161 - type: nauc_ndcg_at_100_diff1 value: 23.584427352691776 - type: nauc_ndcg_at_100_max value: 35.12304754035805 - type: nauc_ndcg_at_100_std value: 12.921291623167921 - type: nauc_ndcg_at_10_diff1 value: 22.057127915032765 - type: nauc_ndcg_at_10_max value: 34.09397142140321 - type: nauc_ndcg_at_10_std value: 11.21339882108658 - type: nauc_ndcg_at_1_diff1 value: 68.01294545515707 - type: nauc_ndcg_at_1_max value: 47.51019900345037 - type: nauc_ndcg_at_1_std value: -1.7951406243808212 - type: nauc_ndcg_at_20_diff1 value: 22.404347553479102 - type: nauc_ndcg_at_20_max value: 34.50508324969608 - type: nauc_ndcg_at_20_std value: 12.281993331498175 - type: nauc_ndcg_at_3_diff1 value: 21.21895220595676 - type: nauc_ndcg_at_3_max value: 30.76465236403928 - type: nauc_ndcg_at_3_std value: 5.501903724385424 - type: nauc_ndcg_at_5_diff1 value: 21.489825424548258 - type: nauc_ndcg_at_5_max value: 32.43517409935615 - type: nauc_ndcg_at_5_std value: 8.59021290966302 - type: nauc_precision_at_1000_diff1 value: 9.056916578488696 - type: nauc_precision_at_1000_max value: 47.29861770129213 - type: nauc_precision_at_1000_std value: 60.06028316961357 - type: nauc_precision_at_100_diff1 value: 6.853208191063939 - type: nauc_precision_at_100_max value: 40.23686318254916 - type: nauc_precision_at_100_std value: 44.69884156134862 - type: nauc_precision_at_10_diff1 value: 7.7572606953149315 - type: nauc_precision_at_10_max value: 33.24412509121427 - type: nauc_precision_at_10_std value: 22.894891705425753 - type: nauc_precision_at_1_diff1 value: 68.01294545515707 - type: nauc_precision_at_1_max value: 47.51019900345037 - type: nauc_precision_at_1_std value: -1.7951406243808212 - type: nauc_precision_at_20_diff1 value: 6.102789021481188 - type: nauc_precision_at_20_max value: 34.384739158981084 - type: nauc_precision_at_20_std value: 29.40165302735249 - type: nauc_precision_at_3_diff1 value: 10.004182813463276 - type: nauc_precision_at_3_max value: 27.07527926636925 - type: nauc_precision_at_3_std value: 8.034252288165805 - type: nauc_precision_at_5_diff1 value: 8.672082689816547 - type: nauc_precision_at_5_max value: 29.352582129843867 - type: nauc_precision_at_5_std value: 14.456464951944461 - type: nauc_recall_at_1000_diff1 value: 9.056916578488018 - type: nauc_recall_at_1000_max value: 47.29861770129215 - type: nauc_recall_at_1000_std value: 60.06028316961315 - type: nauc_recall_at_100_diff1 value: 6.853208191063934 - type: nauc_recall_at_100_max value: 40.23686318254888 - type: nauc_recall_at_100_std value: 44.698841561348615 - type: nauc_recall_at_10_diff1 value: 7.7572606953149394 - type: nauc_recall_at_10_max value: 33.244125091214286 - type: nauc_recall_at_10_std value: 22.894891705425863 - type: nauc_recall_at_1_diff1 value: 68.01294545515707 - type: nauc_recall_at_1_max value: 47.51019900345037 - type: nauc_recall_at_1_std value: -1.7951406243808212 - type: nauc_recall_at_20_diff1 value: 6.102789021481126 - type: nauc_recall_at_20_max value: 34.38473915898118 - type: nauc_recall_at_20_std value: 29.40165302735251 - type: nauc_recall_at_3_diff1 value: 10.004182813463203 - type: nauc_recall_at_3_max value: 27.07527926636916 - type: nauc_recall_at_3_std value: 8.034252288165728 - type: nauc_recall_at_5_diff1 value: 8.672082689816364 - type: nauc_recall_at_5_max value: 29.352582129843714 - type: nauc_recall_at_5_std value: 14.4564649519445 - type: ndcg_at_1 value: 82.498 - type: ndcg_at_10 value: 77.02799999999999 - type: ndcg_at_100 value: 79.593 - type: ndcg_at_1000 value: 80.372 - type: ndcg_at_20 value: 78.194 - type: ndcg_at_3 value: 71.932 - type: ndcg_at_5 value: 74.878 - type: precision_at_1 value: 82.498 - type: precision_at_10 value: 16.289 - type: precision_at_100 value: 1.8259999999999998 - type: precision_at_1000 value: 0.193 - type: precision_at_20 value: 8.519 - type: precision_at_3 value: 46.851 - type: precision_at_5 value: 30.436000000000003 - type: recall_at_1 value: 41.249 - type: recall_at_10 value: 81.44500000000001 - type: recall_at_100 value: 91.323 - type: recall_at_1000 value: 96.44200000000001 - type: recall_at_20 value: 85.18599999999999 - type: recall_at_3 value: 70.277 - type: recall_at_5 value: 76.09 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: main_score value: 72.695 - type: map_at_1 value: 2.313 - type: map_at_10 value: 16.541 - type: map_at_100 value: 42.664 - type: map_at_1000 value: 51.048 - type: map_at_20 value: 25.691000000000003 - type: map_at_3 value: 6.8580000000000005 - type: map_at_5 value: 10.227 - type: mrr_at_1 value: 90.69767441860465 - type: mrr_at_10 value: 94.65116279069768 - type: mrr_at_100 value: 94.65116279069768 - type: mrr_at_1000 value: 94.65116279069768 - type: mrr_at_20 value: 94.65116279069768 - type: mrr_at_3 value: 94.18604651162791 - type: mrr_at_5 value: 94.65116279069768 - type: nauc_map_at_1000_diff1 value: -19.394271777832838 - type: nauc_map_at_1000_max value: 35.63073356621754 - type: nauc_map_at_1000_std value: 56.92803671553409 - type: nauc_map_at_100_diff1 value: -7.023340458676494 - type: nauc_map_at_100_max value: 22.967662469404267 - type: nauc_map_at_100_std value: 28.64423344417142 - type: nauc_map_at_10_diff1 value: 18.22452762970126 - type: nauc_map_at_10_max value: 3.235969423980127 - type: nauc_map_at_10_std value: -11.528499499305529 - type: nauc_map_at_1_diff1 value: 17.90743559505749 - type: nauc_map_at_1_max value: -14.61627654448527 - type: nauc_map_at_1_std value: -24.262430292012667 - type: nauc_map_at_20_diff1 value: 14.96422992084746 - type: nauc_map_at_20_max value: 11.128128185086132 - type: nauc_map_at_20_std value: -0.4087236026844547 - type: nauc_map_at_3_diff1 value: 16.45733174189393 - type: nauc_map_at_3_max value: -14.88196784500194 - type: nauc_map_at_3_std value: -26.096323520383446 - type: nauc_map_at_5_diff1 value: 17.572159494245003 - type: nauc_map_at_5_max value: -11.206812710229503 - type: nauc_map_at_5_std value: -22.27070819579704 - type: nauc_mrr_at_1000_diff1 value: 33.66069097978205 - type: nauc_mrr_at_1000_max value: 43.87773602456895 - type: nauc_mrr_at_1000_std value: 52.33730714398662 - type: nauc_mrr_at_100_diff1 value: 33.66069097978205 - type: nauc_mrr_at_100_max value: 43.87773602456895 - type: nauc_mrr_at_100_std value: 52.33730714398662 - type: nauc_mrr_at_10_diff1 value: 33.66069097978205 - type: nauc_mrr_at_10_max value: 43.87773602456895 - type: nauc_mrr_at_10_std value: 52.33730714398662 - type: nauc_mrr_at_1_diff1 value: 23.709794626749783 - type: nauc_mrr_at_1_max value: 35.45939642825464 - type: nauc_mrr_at_1_std value: 45.18790321558505 - type: nauc_mrr_at_20_diff1 value: 33.66069097978205 - type: nauc_mrr_at_20_max value: 43.87773602456895 - type: nauc_mrr_at_20_std value: 52.33730714398662 - type: nauc_mrr_at_3_diff1 value: 38.96783570139972 - type: nauc_mrr_at_3_max value: 48.367517142603624 - type: nauc_mrr_at_3_std value: 56.15032257246786 - type: nauc_mrr_at_5_diff1 value: 33.66069097978205 - type: nauc_mrr_at_5_max value: 43.87773602456895 - type: nauc_mrr_at_5_std value: 52.33730714398662 - type: nauc_ndcg_at_1000_diff1 value: -8.409227649777549 - type: nauc_ndcg_at_1000_max value: 55.08579408014661 - type: nauc_ndcg_at_1000_std value: 64.71829411541155 - type: nauc_ndcg_at_100_diff1 value: -12.171382005828134 - type: nauc_ndcg_at_100_max value: 37.279599751187895 - type: nauc_ndcg_at_100_std value: 55.59571261330682 - type: nauc_ndcg_at_10_diff1 value: -4.2745893875224645 - type: nauc_ndcg_at_10_max value: 35.61094191299521 - type: nauc_ndcg_at_10_std value: 31.49122710738599 - type: nauc_ndcg_at_1_diff1 value: 34.77341575621081 - type: nauc_ndcg_at_1_max value: 18.418784098194983 - type: nauc_ndcg_at_1_std value: 3.6003144907881026 - type: nauc_ndcg_at_20_diff1 value: -16.937600290863816 - type: nauc_ndcg_at_20_max value: 28.731002593372718 - type: nauc_ndcg_at_20_std value: 40.140028262395546 - type: nauc_ndcg_at_3_diff1 value: 21.008563623057892 - type: nauc_ndcg_at_3_max value: 32.092932411602945 - type: nauc_ndcg_at_3_std value: 7.783159518591246 - type: nauc_ndcg_at_5_diff1 value: 13.35248395075747 - type: nauc_ndcg_at_5_max value: 33.48637127489678 - type: nauc_ndcg_at_5_std value: 19.883656903878986 - type: nauc_precision_at_1000_diff1 value: -34.613170483366815 - type: nauc_precision_at_1000_max value: 14.178980568050093 - type: nauc_precision_at_1000_std value: 53.45813399059421 - type: nauc_precision_at_100_diff1 value: -40.67552345859168 - type: nauc_precision_at_100_max value: 23.091965607829138 - type: nauc_precision_at_100_std value: 62.39644907525577 - type: nauc_precision_at_10_diff1 value: -29.61210257317124 - type: nauc_precision_at_10_max value: 43.992102732918255 - type: nauc_precision_at_10_std value: 67.25524849542518 - type: nauc_precision_at_1_diff1 value: 23.709794626749783 - type: nauc_precision_at_1_max value: 35.45939642825464 - type: nauc_precision_at_1_std value: 45.18790321558505 - type: nauc_precision_at_20_diff1 value: -38.29110052486433 - type: nauc_precision_at_20_max value: 28.73705296191401 - type: nauc_precision_at_20_std value: 62.12026159344505 - type: nauc_precision_at_3_diff1 value: -4.950069185044093 - type: nauc_precision_at_3_max value: 35.30311413187648 - type: nauc_precision_at_3_std value: 37.24789627772557 - type: nauc_precision_at_5_diff1 value: -8.259725731846123 - type: nauc_precision_at_5_max value: 33.985287538899314 - type: nauc_precision_at_5_std value: 53.59550306044433 - type: nauc_recall_at_1000_diff1 value: -5.996961409631926 - type: nauc_recall_at_1000_max value: 63.118266233402764 - type: nauc_recall_at_1000_std value: 69.5649709802058 - type: nauc_recall_at_100_diff1 value: 6.920650261229799 - type: nauc_recall_at_100_max value: 26.76777278523633 - type: nauc_recall_at_100_std value: 24.81349844560708 - type: nauc_recall_at_10_diff1 value: 18.636579796911292 - type: nauc_recall_at_10_max value: 2.214374250576099 - type: nauc_recall_at_10_std value: -12.939953791707651 - type: nauc_recall_at_1_diff1 value: 17.90743559505749 - type: nauc_recall_at_1_max value: -14.61627654448527 - type: nauc_recall_at_1_std value: -24.262430292012667 - type: nauc_recall_at_20_diff1 value: 17.612041689452855 - type: nauc_recall_at_20_max value: 11.182632726686007 - type: nauc_recall_at_20_std value: -2.4835954401161864 - type: nauc_recall_at_3_diff1 value: 16.773341381117 - type: nauc_recall_at_3_max value: -15.051242807277163 - type: nauc_recall_at_3_std value: -26.410274593618038 - type: nauc_recall_at_5_diff1 value: 17.091861029537423 - type: nauc_recall_at_5_max value: -13.243464985211395 - type: nauc_recall_at_5_std value: -23.92982354951768 - type: ndcg_at_1 value: 78.295 - type: ndcg_at_10 value: 72.695 - type: ndcg_at_100 value: 65.69500000000001 - type: ndcg_at_1000 value: 73.359 - type: ndcg_at_20 value: 69.16499999999999 - type: ndcg_at_3 value: 76.632 - type: ndcg_at_5 value: 74.024 - type: precision_at_1 value: 90.69800000000001 - type: precision_at_10 value: 81.628 - type: precision_at_100 value: 38.116 - type: precision_at_1000 value: 7.199999999999999 - type: precision_at_20 value: 72.209 - type: precision_at_3 value: 89.922 - type: precision_at_5 value: 86.047 - type: recall_at_1 value: 2.313 - type: recall_at_10 value: 17.48 - type: recall_at_100 value: 53.937000000000005 - type: recall_at_1000 value: 80.018 - type: recall_at_20 value: 28.081 - type: recall_at_3 value: 6.927 - type: recall_at_5 value: 10.575 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 79.41492938802959 - type: f1 value: 75.75917683785259 - type: f1_weighted value: 79.4156392656699 - type: main_score value: 79.41492938802959 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 81.9334229993275 - type: f1 value: 81.40628785444537 - type: f1_weighted value: 81.79807477693303 - type: main_score value: 81.9334229993275 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: main_score value: 36.723 - type: map_at_1 value: 5.8069999999999995 - type: map_at_10 value: 13.602 - type: map_at_100 value: 17.196 - type: map_at_1000 value: 18.609 - type: map_at_20 value: 15.146999999999998 - type: map_at_3 value: 9.594999999999999 - type: map_at_5 value: 11.453000000000001 - type: mrr_at_1 value: 47.368421052631575 - type: mrr_at_10 value: 55.60703228659884 - type: mrr_at_100 value: 56.1552975760445 - type: mrr_at_1000 value: 56.19164342988321 - type: mrr_at_20 value: 55.922507068281476 - type: mrr_at_3 value: 53.147574819401456 - type: mrr_at_5 value: 54.680082559339525 - type: nauc_map_at_1000_diff1 value: 34.05763404594125 - type: nauc_map_at_1000_max value: 29.5226776533209 - type: nauc_map_at_1000_std value: 15.427632324819914 - type: nauc_map_at_100_diff1 value: 34.80313586539057 - type: nauc_map_at_100_max value: 27.999543781245972 - type: nauc_map_at_100_std value: 11.502430185601197 - type: nauc_map_at_10_diff1 value: 39.10493763818235 - type: nauc_map_at_10_max value: 20.299110129894572 - type: nauc_map_at_10_std value: -1.8131312981171384 - type: nauc_map_at_1_diff1 value: 54.952292547558436 - type: nauc_map_at_1_max value: 13.172173380536137 - type: nauc_map_at_1_std value: -11.135859432447047 - type: nauc_map_at_20_diff1 value: 36.56338939350608 - type: nauc_map_at_20_max value: 24.057778180377355 - type: nauc_map_at_20_std value: 4.030543599731532 - type: nauc_map_at_3_diff1 value: 46.798195082350766 - type: nauc_map_at_3_max value: 14.899395608553915 - type: nauc_map_at_3_std value: -10.505614189182307 - type: nauc_map_at_5_diff1 value: 42.83953515294862 - type: nauc_map_at_5_max value: 17.04727497975375 - type: nauc_map_at_5_std value: -7.6517071380275885 - type: nauc_mrr_at_1000_diff1 value: 41.44193432540061 - type: nauc_mrr_at_1000_max value: 39.88086824180341 - type: nauc_mrr_at_1000_std value: 27.351885880283966 - type: nauc_mrr_at_100_diff1 value: 41.43357468563369 - type: nauc_mrr_at_100_max value: 39.91394628214467 - type: nauc_mrr_at_100_std value: 27.37166382203234 - type: nauc_mrr_at_10_diff1 value: 41.46082695650948 - type: nauc_mrr_at_10_max value: 39.858957188572944 - type: nauc_mrr_at_10_std value: 27.18216001182641 - type: nauc_mrr_at_1_diff1 value: 41.485448798176904 - type: nauc_mrr_at_1_max value: 33.6944538535235 - type: nauc_mrr_at_1_std value: 22.826701578387503 - type: nauc_mrr_at_20_diff1 value: 41.374365310091925 - type: nauc_mrr_at_20_max value: 39.923859616197035 - type: nauc_mrr_at_20_std value: 27.27268109687068 - type: nauc_mrr_at_3_diff1 value: 42.1244757279239 - type: nauc_mrr_at_3_max value: 38.380669877043864 - type: nauc_mrr_at_3_std value: 25.734391560690224 - type: nauc_mrr_at_5_diff1 value: 41.26497822292423 - type: nauc_mrr_at_5_max value: 39.17164048501762 - type: nauc_mrr_at_5_std value: 26.304110615701987 - type: nauc_ndcg_at_1000_diff1 value: 31.76845316166595 - type: nauc_ndcg_at_1000_max value: 44.0530198648453 - type: nauc_ndcg_at_1000_std value: 33.37050209530549 - type: nauc_ndcg_at_100_diff1 value: 31.70167104254346 - type: nauc_ndcg_at_100_max value: 38.98577219865644 - type: nauc_ndcg_at_100_std value: 28.46948949404448 - type: nauc_ndcg_at_10_diff1 value: 31.41371490994258 - type: nauc_ndcg_at_10_max value: 36.46974014607837 - type: nauc_ndcg_at_10_std value: 28.214061102873274 - type: nauc_ndcg_at_1_diff1 value: 45.195218239572185 - type: nauc_ndcg_at_1_max value: 32.47174554115089 - type: nauc_ndcg_at_1_std value: 22.252970640869655 - type: nauc_ndcg_at_20_diff1 value: 30.22073304733139 - type: nauc_ndcg_at_20_max value: 36.85722580956459 - type: nauc_ndcg_at_20_std value: 28.82508960932221 - type: nauc_ndcg_at_3_diff1 value: 34.85087007597385 - type: nauc_ndcg_at_3_max value: 35.08880030166066 - type: nauc_ndcg_at_3_std value: 24.477164602350427 - type: nauc_ndcg_at_5_diff1 value: 32.15269255562139 - type: nauc_ndcg_at_5_max value: 36.26512978748847 - type: nauc_ndcg_at_5_std value: 26.121143638336193 - type: nauc_precision_at_1000_diff1 value: -5.016344866521763 - type: nauc_precision_at_1000_max value: 13.76155613533569 - type: nauc_precision_at_1000_std value: 42.87650310943072 - type: nauc_precision_at_100_diff1 value: -2.4765231121724867 - type: nauc_precision_at_100_max value: 26.413714147361173 - type: nauc_precision_at_100_std value: 52.07869389693284 - type: nauc_precision_at_10_diff1 value: 9.381859834804454 - type: nauc_precision_at_10_max value: 36.79686689654208 - type: nauc_precision_at_10_std value: 41.450385008923874 - type: nauc_precision_at_1_diff1 value: 43.14276503972391 - type: nauc_precision_at_1_max value: 33.23669937901841 - type: nauc_precision_at_1_std value: 23.574191783291614 - type: nauc_precision_at_20_diff1 value: 3.3554639781732143 - type: nauc_precision_at_20_max value: 35.07048369650734 - type: nauc_precision_at_20_std value: 46.90757933302204 - type: nauc_precision_at_3_diff1 value: 22.3364560733951 - type: nauc_precision_at_3_max value: 34.49198383469041 - type: nauc_precision_at_3_std value: 28.30886758592867 - type: nauc_precision_at_5_diff1 value: 14.242157915266043 - type: nauc_precision_at_5_max value: 36.78665790141447 - type: nauc_precision_at_5_std value: 34.22226904133568 - type: nauc_recall_at_1000_diff1 value: 6.177080203711223 - type: nauc_recall_at_1000_max value: 20.36718691855502 - type: nauc_recall_at_1000_std value: 21.44974953318914 - type: nauc_recall_at_100_diff1 value: 16.98521396327983 - type: nauc_recall_at_100_max value: 25.739641139625473 - type: nauc_recall_at_100_std value: 16.08045361596745 - type: nauc_recall_at_10_diff1 value: 28.066091446759465 - type: nauc_recall_at_10_max value: 15.875422037194987 - type: nauc_recall_at_10_std value: -2.7729209404094712 - type: nauc_recall_at_1_diff1 value: 54.952292547558436 - type: nauc_recall_at_1_max value: 13.172173380536137 - type: nauc_recall_at_1_std value: -11.135859432447047 - type: nauc_recall_at_20_diff1 value: 22.454203317605455 - type: nauc_recall_at_20_max value: 19.38991609441149 - type: nauc_recall_at_20_std value: 3.3669889925713683 - type: nauc_recall_at_3_diff1 value: 42.41050348142469 - type: nauc_recall_at_3_max value: 14.345477767632861 - type: nauc_recall_at_3_std value: -11.275161125178107 - type: nauc_recall_at_5_diff1 value: 34.851159133502286 - type: nauc_recall_at_5_max value: 15.03263812713638 - type: nauc_recall_at_5_std value: -9.042538295018138 - type: ndcg_at_1 value: 44.891999999999996 - type: ndcg_at_10 value: 36.723 - type: ndcg_at_100 value: 33.101 - type: ndcg_at_1000 value: 41.493 - type: ndcg_at_20 value: 34.14 - type: ndcg_at_3 value: 41.131 - type: ndcg_at_5 value: 39.446999999999996 - type: precision_at_1 value: 46.749 - type: precision_at_10 value: 27.616000000000003 - type: precision_at_100 value: 8.372 - type: precision_at_1000 value: 2.095 - type: precision_at_20 value: 20.294 - type: precision_at_3 value: 38.493 - type: precision_at_5 value: 34.427 - type: recall_at_1 value: 5.8069999999999995 - type: recall_at_10 value: 18.444 - type: recall_at_100 value: 33.655 - type: recall_at_1000 value: 63.839999999999996 - type: recall_at_20 value: 22.205 - type: recall_at_3 value: 10.61 - type: recall_at_5 value: 13.938999999999998 - task: type: Retrieval dataset: name: MTEB NQ-PL type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: main_score value: 56.854000000000006 - type: map_at_1 value: 34.514 - type: map_at_10 value: 49.644 - type: map_at_100 value: 50.608 - type: map_at_1000 value: 50.635 - type: map_at_20 value: 50.305 - type: map_at_3 value: 45.672000000000004 - type: map_at_5 value: 48.089 - type: mrr_at_1 value: 38.78910776361529 - type: mrr_at_10 value: 52.148397984145234 - type: mrr_at_100 value: 52.852966946095215 - type: mrr_at_1000 value: 52.87105017860762 - type: mrr_at_20 value: 52.64188894631607 - type: mrr_at_3 value: 48.97643877945134 - type: mrr_at_5 value: 50.92168791039002 - type: nauc_map_at_1000_diff1 value: 37.02156712167867 - type: nauc_map_at_1000_max value: 30.9541229199217 - type: nauc_map_at_1000_std value: 7.320033004454671 - type: nauc_map_at_100_diff1 value: 37.02236703226826 - type: nauc_map_at_100_max value: 30.9697676745961 - type: nauc_map_at_100_std value: 7.33984133867723 - type: nauc_map_at_10_diff1 value: 36.90102700826612 - type: nauc_map_at_10_max value: 30.785723842405183 - type: nauc_map_at_10_std value: 6.779448226242215 - type: nauc_map_at_1_diff1 value: 39.909029450982274 - type: nauc_map_at_1_max value: 25.241631663639062 - type: nauc_map_at_1_std value: 3.9346798436914625 - type: nauc_map_at_20_diff1 value: 37.01885833177735 - type: nauc_map_at_20_max value: 30.93864719019393 - type: nauc_map_at_20_std value: 7.157784404582363 - type: nauc_map_at_3_diff1 value: 36.66395294442894 - type: nauc_map_at_3_max value: 28.73917625955397 - type: nauc_map_at_3_std value: 4.974442294121807 - type: nauc_map_at_5_diff1 value: 36.50200331851477 - type: nauc_map_at_5_max value: 30.19694653814823 - type: nauc_map_at_5_std value: 6.080701892676308 - type: nauc_mrr_at_1000_diff1 value: 37.13771503608112 - type: nauc_mrr_at_1000_max value: 31.751547147247507 - type: nauc_mrr_at_1000_std value: 9.508614158791604 - type: nauc_mrr_at_100_diff1 value: 37.13715249048103 - type: nauc_mrr_at_100_max value: 31.76453363846907 - type: nauc_mrr_at_100_std value: 9.527333431366577 - type: nauc_mrr_at_10_diff1 value: 37.04617391414406 - type: nauc_mrr_at_10_max value: 31.835558691659767 - type: nauc_mrr_at_10_std value: 9.403478249864207 - type: nauc_mrr_at_1_diff1 value: 40.24340603514061 - type: nauc_mrr_at_1_max value: 27.892025295592664 - type: nauc_mrr_at_1_std value: 6.948060152377137 - type: nauc_mrr_at_20_diff1 value: 37.13679664662962 - type: nauc_mrr_at_20_max value: 31.80571193908972 - type: nauc_mrr_at_20_std value: 9.463516427443066 - type: nauc_mrr_at_3_diff1 value: 36.59947958587673 - type: nauc_mrr_at_3_max value: 30.56905612034133 - type: nauc_mrr_at_3_std value: 8.213473085446296 - type: nauc_mrr_at_5_diff1 value: 36.66740305041658 - type: nauc_mrr_at_5_max value: 31.470226490982878 - type: nauc_mrr_at_5_std value: 9.02109643375307 - type: nauc_ndcg_at_1000_diff1 value: 36.60296185088649 - type: nauc_ndcg_at_1000_max value: 33.40562074993109 - type: nauc_ndcg_at_1000_std value: 10.60845451213325 - type: nauc_ndcg_at_100_diff1 value: 36.59946610918652 - type: nauc_ndcg_at_100_max value: 33.9570260243297 - type: nauc_ndcg_at_100_std value: 11.340469448481196 - type: nauc_ndcg_at_10_diff1 value: 36.14418247401987 - type: nauc_ndcg_at_10_max value: 33.451039871075345 - type: nauc_ndcg_at_10_std value: 9.272972801419813 - type: nauc_ndcg_at_1_diff1 value: 40.07169143996099 - type: nauc_ndcg_at_1_max value: 27.943354680588055 - type: nauc_ndcg_at_1_std value: 7.036639009967827 - type: nauc_ndcg_at_20_diff1 value: 36.51152244027151 - type: nauc_ndcg_at_20_max value: 33.89378482325653 - type: nauc_ndcg_at_20_std value: 10.342721315866635 - type: nauc_ndcg_at_3_diff1 value: 35.4822845318483 - type: nauc_ndcg_at_3_max value: 29.912345910181415 - type: nauc_ndcg_at_3_std value: 5.9694134283330715 - type: nauc_ndcg_at_5_diff1 value: 35.221776161219466 - type: nauc_ndcg_at_5_max value: 32.1072171248216 - type: nauc_ndcg_at_5_std value: 7.670174771541694 - type: nauc_precision_at_1000_diff1 value: -4.285000172509594 - type: nauc_precision_at_1000_max value: 14.600633321561062 - type: nauc_precision_at_1000_std value: 21.991435704986305 - type: nauc_precision_at_100_diff1 value: 1.7266493932509126 - type: nauc_precision_at_100_max value: 22.9932202096611 - type: nauc_precision_at_100_std value: 27.464183639561075 - type: nauc_precision_at_10_diff1 value: 16.16723142044687 - type: nauc_precision_at_10_max value: 32.61177863055963 - type: nauc_precision_at_10_std value: 19.30609156634069 - type: nauc_precision_at_1_diff1 value: 40.07169143996099 - type: nauc_precision_at_1_max value: 27.943354680588055 - type: nauc_precision_at_1_std value: 7.036639009967827 - type: nauc_precision_at_20_diff1 value: 10.986359452355082 - type: nauc_precision_at_20_max value: 30.001608294285408 - type: nauc_precision_at_20_std value: 23.470161266132752 - type: nauc_precision_at_3_diff1 value: 25.021299827765368 - type: nauc_precision_at_3_max value: 31.112435175145354 - type: nauc_precision_at_3_std value: 9.97933575854508 - type: nauc_precision_at_5_diff1 value: 19.85258852538675 - type: nauc_precision_at_5_max value: 33.017057636553346 - type: nauc_precision_at_5_std value: 14.226398540277224 - type: nauc_recall_at_1000_diff1 value: 32.956809555733294 - type: nauc_recall_at_1000_max value: 81.17616645437344 - type: nauc_recall_at_1000_std value: 80.81894015338722 - type: nauc_recall_at_100_diff1 value: 34.21543518933059 - type: nauc_recall_at_100_max value: 64.60424388566007 - type: nauc_recall_at_100_std value: 55.36262550526809 - type: nauc_recall_at_10_diff1 value: 31.854572843060865 - type: nauc_recall_at_10_max value: 41.47697651985406 - type: nauc_recall_at_10_std value: 15.449819317346778 - type: nauc_recall_at_1_diff1 value: 39.909029450982274 - type: nauc_recall_at_1_max value: 25.241631663639062 - type: nauc_recall_at_1_std value: 3.9346798436914625 - type: nauc_recall_at_20_diff1 value: 33.155424988870266 - type: nauc_recall_at_20_max value: 47.41147314334969 - type: nauc_recall_at_20_std value: 24.122822585459915 - type: nauc_recall_at_3_diff1 value: 31.030069463711484 - type: nauc_recall_at_3_max value: 30.349471998175105 - type: nauc_recall_at_3_std value: 5.3792560913820635 - type: nauc_recall_at_5_diff1 value: 29.662449422215627 - type: nauc_recall_at_5_max value: 35.59583981361554 - type: nauc_recall_at_5_std value: 9.138475426366536 - type: ndcg_at_1 value: 38.847 - type: ndcg_at_10 value: 56.854000000000006 - type: ndcg_at_100 value: 60.767 - type: ndcg_at_1000 value: 61.399 - type: ndcg_at_20 value: 58.941 - type: ndcg_at_3 value: 49.576 - type: ndcg_at_5 value: 53.502 - type: precision_at_1 value: 38.847 - type: precision_at_10 value: 9.064 - type: precision_at_100 value: 1.127 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_20 value: 5.038 - type: precision_at_3 value: 22.335 - type: precision_at_5 value: 15.689 - type: recall_at_1 value: 34.514 - type: recall_at_10 value: 76.152 - type: recall_at_100 value: 92.837 - type: recall_at_1000 value: 97.596 - type: recall_at_20 value: 83.77799999999999 - type: recall_at_3 value: 57.484 - type: recall_at_5 value: 66.476 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 67.24297712134376 - type: accuracy_stderr value: 4.77558207347837 - type: ap value: 77.38171975466854 - type: ap_stderr value: 2.5801970175320394 - type: f1 value: 65.21823897814332 - type: f1_stderr value: 4.317111734308895 - type: main_score value: 67.24297712134376 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: d05a294af9e1d3ff2bfb6b714e08a24a6cabc669 metrics: - type: cosine_accuracy value: 97.95918367346938 - type: cosine_accuracy_threshold value: 59.87724328133361 - type: cosine_ap value: 99.24498625606927 - type: cosine_f1 value: 96.6867469879518 - type: cosine_f1_threshold value: 59.87724328133361 - type: cosine_precision value: 95.53571428571429 - type: cosine_recall value: 97.86585365853658 - type: dot_accuracy value: 98.51576994434137 - type: dot_accuracy_threshold value: 1574400.0 - type: dot_ap value: 99.28566232682996 - type: dot_f1 value: 97.57575757575758 - type: dot_f1_threshold value: 1564800.0 - type: dot_precision value: 96.98795180722891 - type: dot_recall value: 98.17073170731707 - type: euclidean_accuracy value: 97.6808905380334 - type: euclidean_accuracy_threshold value: 14418.957939643331 - type: euclidean_ap value: 99.0876340868033 - type: euclidean_f1 value: 96.24060150375941 - type: euclidean_f1_threshold value: 14442.183182634264 - type: euclidean_precision value: 94.95548961424333 - type: euclidean_recall value: 97.5609756097561 - type: main_score value: 99.28566232682996 - type: manhattan_accuracy value: 97.86641929499072 - type: manhattan_accuracy_threshold value: 681802.1857857704 - type: manhattan_ap value: 99.08465290287205 - type: manhattan_f1 value: 96.52042360060513 - type: manhattan_f1_threshold value: 681802.1857857704 - type: manhattan_precision value: 95.7957957957958 - type: manhattan_recall value: 97.2560975609756 - type: max_ap value: 99.28566232682996 - type: max_f1 value: 97.57575757575758 - type: max_precision value: 96.98795180722891 - type: max_recall value: 98.17073170731707 - type: similarity_accuracy value: 97.95918367346938 - type: similarity_accuracy_threshold value: 59.87724328133361 - type: similarity_ap value: 99.24498625606927 - type: similarity_f1 value: 96.6867469879518 - type: similarity_f1_threshold value: 59.87724328133361 - type: similarity_precision value: 95.53571428571429 - type: similarity_recall value: 97.86585365853658 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: d90724373c70959f17d2331ad51fb60c71176b03 metrics: - type: accuracy value: 90.41551246537396 - type: f1 value: 89.15361039614409 - type: f1_weighted value: 90.69893050097603 - type: main_score value: 90.41551246537396 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: 6a21ab8716e255ab1867265f8b396105e8aa63d4 metrics: - type: accuracy value: 77.77327935222672 - type: f1 value: 61.238079022455636 - type: f1_weighted value: 80.58753601509183 - type: main_score value: 77.77327935222672 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 87.2 - type: cos_sim_accuracy_threshold value: 83.69773167092553 - type: cos_sim_ap value: 95.43345251568122 - type: cos_sim_f1 value: 89.82785602503913 - type: cos_sim_f1_threshold value: 81.2116503074739 - type: cos_sim_precision value: 85.16320474777447 - type: cos_sim_recall value: 95.03311258278146 - type: dot_accuracy value: 85.9 - type: dot_accuracy_threshold value: 2177600.0 - type: dot_ap value: 92.4192102018206 - type: dot_f1 value: 88.9238020424195 - type: dot_f1_threshold value: 2163200.0 - type: dot_precision value: 84.60388639760838 - type: dot_recall value: 93.70860927152319 - type: euclidean_accuracy value: 87.5 - type: euclidean_accuracy_threshold value: 9325.450203438862 - type: euclidean_ap value: 95.42730698295347 - type: euclidean_f1 value: 89.92747784045125 - type: euclidean_f1_threshold value: 9325.450203438862 - type: euclidean_precision value: 87.59811616954474 - type: euclidean_recall value: 92.3841059602649 - type: manhattan_accuracy value: 87.5 - type: manhattan_accuracy_threshold value: 441412.88244724274 - type: manhattan_ap value: 95.4277447451651 - type: manhattan_f1 value: 89.92747784045125 - type: manhattan_f1_threshold value: 441412.88244724274 - type: manhattan_precision value: 87.59811616954474 - type: manhattan_recall value: 92.3841059602649 - type: max_accuracy value: 87.5 - type: max_ap value: 95.43345251568122 - type: max_f1 value: 89.92747784045125 - task: type: Retrieval dataset: name: MTEB Quora-PL type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: main_score value: 84.47099999999999 - type: map_at_1 value: 65.892 - type: map_at_10 value: 80.11500000000001 - type: map_at_100 value: 80.861 - type: map_at_1000 value: 80.879 - type: map_at_20 value: 80.604 - type: map_at_3 value: 76.97 - type: map_at_5 value: 78.926 - type: mrr_at_1 value: 75.83 - type: mrr_at_10 value: 83.2125238095233 - type: mrr_at_100 value: 83.38714262504709 - type: mrr_at_1000 value: 83.38942088013238 - type: mrr_at_20 value: 83.34284466299037 - type: mrr_at_3 value: 81.95333333333281 - type: mrr_at_5 value: 82.78533333333272 - type: nauc_map_at_1000_diff1 value: 73.95721764018812 - type: nauc_map_at_1000_max value: 9.653675847999432 - type: nauc_map_at_1000_std value: -42.35408133902171 - type: nauc_map_at_100_diff1 value: 73.96621756991526 - type: nauc_map_at_100_max value: 9.618124708373092 - type: nauc_map_at_100_std value: -42.41429680546156 - type: nauc_map_at_10_diff1 value: 74.20643666348498 - type: nauc_map_at_10_max value: 9.056688996919677 - type: nauc_map_at_10_std value: -44.13396437616006 - type: nauc_map_at_1_diff1 value: 77.18196114257519 - type: nauc_map_at_1_max value: 7.840648640771136 - type: nauc_map_at_1_std value: -39.84395715001256 - type: nauc_map_at_20_diff1 value: 74.03475632514551 - type: nauc_map_at_20_max value: 9.385795565805118 - type: nauc_map_at_20_std value: -43.160299598965466 - type: nauc_map_at_3_diff1 value: 74.43855921599284 - type: nauc_map_at_3_max value: 7.574218825911361 - type: nauc_map_at_3_std value: -46.1476276122436 - type: nauc_map_at_5_diff1 value: 74.38688915461512 - type: nauc_map_at_5_max value: 8.557764506539128 - type: nauc_map_at_5_std value: -45.53897898458085 - type: nauc_mrr_at_1000_diff1 value: 74.0311045258841 - type: nauc_mrr_at_1000_max value: 11.885448379701055 - type: nauc_mrr_at_1000_std value: -38.16008409213179 - type: nauc_mrr_at_100_diff1 value: 74.03074603058893 - type: nauc_mrr_at_100_max value: 11.886356221882725 - type: nauc_mrr_at_100_std value: -38.159139191997795 - type: nauc_mrr_at_10_diff1 value: 73.99521522874129 - type: nauc_mrr_at_10_max value: 11.77749620520773 - type: nauc_mrr_at_10_std value: -38.266295250166635 - type: nauc_mrr_at_1_diff1 value: 75.53192564838908 - type: nauc_mrr_at_1_max value: 12.979267595721275 - type: nauc_mrr_at_1_std value: -36.634066084632785 - type: nauc_mrr_at_20_diff1 value: 74.01273934757484 - type: nauc_mrr_at_20_max value: 11.887566738728225 - type: nauc_mrr_at_20_std value: -38.169250252410485 - type: nauc_mrr_at_3_diff1 value: 73.6073534511043 - type: nauc_mrr_at_3_max value: 11.450856365709727 - type: nauc_mrr_at_3_std value: -38.767141663073964 - type: nauc_mrr_at_5_diff1 value: 73.84950218235583 - type: nauc_mrr_at_5_max value: 11.787394554048813 - type: nauc_mrr_at_5_std value: -38.57240589862417 - type: nauc_ndcg_at_1000_diff1 value: 73.51677487598074 - type: nauc_ndcg_at_1000_max value: 10.72929244202152 - type: nauc_ndcg_at_1000_std value: -39.92813917654933 - type: nauc_ndcg_at_100_diff1 value: 73.53904136553481 - type: nauc_ndcg_at_100_max value: 10.569310211635521 - type: nauc_ndcg_at_100_std value: -40.12206261908318 - type: nauc_ndcg_at_10_diff1 value: 73.55958917204208 - type: nauc_ndcg_at_10_max value: 9.255791947077263 - type: nauc_ndcg_at_10_std value: -42.7856138240991 - type: nauc_ndcg_at_1_diff1 value: 75.34289960079188 - type: nauc_ndcg_at_1_max value: 13.499789436258705 - type: nauc_ndcg_at_1_std value: -35.91483904818284 - type: nauc_ndcg_at_20_diff1 value: 73.48070745481307 - type: nauc_ndcg_at_20_max value: 9.92427572953505 - type: nauc_ndcg_at_20_std value: -41.55653404596579 - type: nauc_ndcg_at_3_diff1 value: 72.72072901275445 - type: nauc_ndcg_at_3_max value: 8.303708237302729 - type: nauc_ndcg_at_3_std value: -43.618531107389344 - type: nauc_ndcg_at_5_diff1 value: 73.30060059269601 - type: nauc_ndcg_at_5_max value: 8.915386932153249 - type: nauc_ndcg_at_5_std value: -44.088053429661 - type: nauc_precision_at_1000_diff1 value: -41.540517884119524 - type: nauc_precision_at_1000_max value: 6.9361565712971265 - type: nauc_precision_at_1000_std value: 42.39482890919027 - type: nauc_precision_at_100_diff1 value: -40.609576663184896 - type: nauc_precision_at_100_max value: 6.302451339507686 - type: nauc_precision_at_100_std value: 41.30693233869549 - type: nauc_precision_at_10_diff1 value: -30.91653155031006 - type: nauc_precision_at_10_max value: 4.84981614338782 - type: nauc_precision_at_10_std value: 24.47022404030676 - type: nauc_precision_at_1_diff1 value: 75.34289960079188 - type: nauc_precision_at_1_max value: 13.499789436258705 - type: nauc_precision_at_1_std value: -35.91483904818284 - type: nauc_precision_at_20_diff1 value: -36.75164419452007 - type: nauc_precision_at_20_max value: 5.440757182282365 - type: nauc_precision_at_20_std value: 33.08928025809355 - type: nauc_precision_at_3_diff1 value: -5.3240699725635565 - type: nauc_precision_at_3_max value: 5.156636102003736 - type: nauc_precision_at_3_std value: -0.9779263105110453 - type: nauc_precision_at_5_diff1 value: -19.92133198420086 - type: nauc_precision_at_5_max value: 5.432766335564369 - type: nauc_precision_at_5_std value: 11.417736295996392 - type: nauc_recall_at_1000_diff1 value: 56.57663068186203 - type: nauc_recall_at_1000_max value: 25.80329039728696 - type: nauc_recall_at_1000_std value: 57.82937604195464 - type: nauc_recall_at_100_diff1 value: 67.25188672746224 - type: nauc_recall_at_100_max value: 6.879939694351325 - type: nauc_recall_at_100_std value: -30.098258041087096 - type: nauc_recall_at_10_diff1 value: 68.00694154421653 - type: nauc_recall_at_10_max value: 0.7226814903576098 - type: nauc_recall_at_10_std value: -52.980002751088215 - type: nauc_recall_at_1_diff1 value: 77.18196114257519 - type: nauc_recall_at_1_max value: 7.840648640771136 - type: nauc_recall_at_1_std value: -39.84395715001256 - type: nauc_recall_at_20_diff1 value: 66.56016564739411 - type: nauc_recall_at_20_max value: 1.919044428493598 - type: nauc_recall_at_20_std value: -49.5380686276396 - type: nauc_recall_at_3_diff1 value: 69.83247207081557 - type: nauc_recall_at_3_max value: 2.395588418833963 - type: nauc_recall_at_3_std value: -52.11119790224493 - type: nauc_recall_at_5_diff1 value: 69.25881483845956 - type: nauc_recall_at_5_max value: 2.9185552604991716 - type: nauc_recall_at_5_std value: -54.376346690212095 - type: ndcg_at_1 value: 75.92 - type: ndcg_at_10 value: 84.47099999999999 - type: ndcg_at_100 value: 86.11999999999999 - type: ndcg_at_1000 value: 86.276 - type: ndcg_at_20 value: 85.37599999999999 - type: ndcg_at_3 value: 81.0 - type: ndcg_at_5 value: 82.88799999999999 - type: precision_at_1 value: 75.92 - type: precision_at_10 value: 12.987000000000002 - type: precision_at_100 value: 1.5190000000000001 - type: precision_at_1000 value: 0.156 - type: precision_at_20 value: 6.977 - type: precision_at_3 value: 35.573 - type: precision_at_5 value: 23.566000000000003 - type: recall_at_1 value: 65.892 - type: recall_at_10 value: 93.318 - type: recall_at_100 value: 99.124 - type: recall_at_1000 value: 99.92699999999999 - type: recall_at_20 value: 96.256 - type: recall_at_3 value: 83.69 - type: recall_at_5 value: 88.783 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: main_score value: 19.528000000000002 - type: map_at_1 value: 4.5280000000000005 - type: map_at_10 value: 11.649 - type: map_at_100 value: 14.019 - type: map_at_1000 value: 14.35 - type: map_at_20 value: 12.866 - type: map_at_3 value: 8.35 - type: map_at_5 value: 9.84 - type: mrr_at_1 value: 22.3 - type: mrr_at_10 value: 32.690039682539656 - type: mrr_at_100 value: 33.91097016542133 - type: mrr_at_1000 value: 33.96940693754695 - type: mrr_at_20 value: 33.418312740750785 - type: mrr_at_3 value: 29.4 - type: mrr_at_5 value: 31.21999999999997 - type: nauc_map_at_1000_diff1 value: 20.52578935318615 - type: nauc_map_at_1000_max value: 28.28553814852898 - type: nauc_map_at_1000_std value: 18.74384140790138 - type: nauc_map_at_100_diff1 value: 20.508083204903077 - type: nauc_map_at_100_max value: 28.281447260273346 - type: nauc_map_at_100_std value: 18.51851601604162 - type: nauc_map_at_10_diff1 value: 21.028884157759624 - type: nauc_map_at_10_max value: 26.98935951161403 - type: nauc_map_at_10_std value: 14.434790357547536 - type: nauc_map_at_1_diff1 value: 23.406427416653127 - type: nauc_map_at_1_max value: 21.759624726647303 - type: nauc_map_at_1_std value: 8.335925909478444 - type: nauc_map_at_20_diff1 value: 20.370301978337785 - type: nauc_map_at_20_max value: 27.30787972231405 - type: nauc_map_at_20_std value: 16.166505401287353 - type: nauc_map_at_3_diff1 value: 23.920717676009453 - type: nauc_map_at_3_max value: 26.061264285994124 - type: nauc_map_at_3_std value: 10.707123907182902 - type: nauc_map_at_5_diff1 value: 22.180679453453557 - type: nauc_map_at_5_max value: 26.85332935641574 - type: nauc_map_at_5_std value: 12.316377808191762 - type: nauc_mrr_at_1000_diff1 value: 21.49186339320302 - type: nauc_mrr_at_1000_max value: 24.329921012356493 - type: nauc_mrr_at_1000_std value: 13.6080824939291 - type: nauc_mrr_at_100_diff1 value: 21.47653180378912 - type: nauc_mrr_at_100_max value: 24.34218235410752 - type: nauc_mrr_at_100_std value: 13.646711743513668 - type: nauc_mrr_at_10_diff1 value: 21.487198850706935 - type: nauc_mrr_at_10_max value: 24.32385099521571 - type: nauc_mrr_at_10_std value: 13.26596223383694 - type: nauc_mrr_at_1_diff1 value: 23.19221955587559 - type: nauc_mrr_at_1_max value: 21.963004569187575 - type: nauc_mrr_at_1_std value: 8.799819519408619 - type: nauc_mrr_at_20_diff1 value: 21.51014357510076 - type: nauc_mrr_at_20_max value: 24.376067405199347 - type: nauc_mrr_at_20_std value: 13.643597889716563 - type: nauc_mrr_at_3_diff1 value: 22.60437837853161 - type: nauc_mrr_at_3_max value: 23.58608363876532 - type: nauc_mrr_at_3_std value: 11.887163540535768 - type: nauc_mrr_at_5_diff1 value: 21.919324914716633 - type: nauc_mrr_at_5_max value: 23.71458680225389 - type: nauc_mrr_at_5_std value: 12.507643886191785 - type: nauc_ndcg_at_1000_diff1 value: 18.546848864440005 - type: nauc_ndcg_at_1000_max value: 30.031984469206325 - type: nauc_ndcg_at_1000_std value: 26.561149084437485 - type: nauc_ndcg_at_100_diff1 value: 18.76271748622068 - type: nauc_ndcg_at_100_max value: 30.180887663861306 - type: nauc_ndcg_at_100_std value: 25.50551358758007 - type: nauc_ndcg_at_10_diff1 value: 19.861367738304697 - type: nauc_ndcg_at_10_max value: 27.360442235691522 - type: nauc_ndcg_at_10_std value: 16.476546243351976 - type: nauc_ndcg_at_1_diff1 value: 23.56715803292495 - type: nauc_ndcg_at_1_max value: 22.29229945166374 - type: nauc_ndcg_at_1_std value: 8.43434671818737 - type: nauc_ndcg_at_20_diff1 value: 18.885059883708053 - type: nauc_ndcg_at_20_max value: 27.78854464221595 - type: nauc_ndcg_at_20_std value: 19.404353378015255 - type: nauc_ndcg_at_3_diff1 value: 23.34227259398943 - type: nauc_ndcg_at_3_max value: 25.75899010582446 - type: nauc_ndcg_at_3_std value: 12.097012181915954 - type: nauc_ndcg_at_5_diff1 value: 21.599246331396863 - type: nauc_ndcg_at_5_max value: 26.6575824351444 - type: nauc_ndcg_at_5_std value: 14.029006846982394 - type: nauc_precision_at_1000_diff1 value: 4.880571159099271 - type: nauc_precision_at_1000_max value: 24.693741787360725 - type: nauc_precision_at_1000_std value: 41.00756555344345 - type: nauc_precision_at_100_diff1 value: 10.440170876298648 - type: nauc_precision_at_100_max value: 28.942738351320408 - type: nauc_precision_at_100_std value: 36.921704945977446 - type: nauc_precision_at_10_diff1 value: 15.55680558043308 - type: nauc_precision_at_10_max value: 27.31414489241847 - type: nauc_precision_at_10_std value: 19.76275914256793 - type: nauc_precision_at_1_diff1 value: 23.56715803292495 - type: nauc_precision_at_1_max value: 22.29229945166374 - type: nauc_precision_at_1_std value: 8.43434671818737 - type: nauc_precision_at_20_diff1 value: 12.57247210423589 - type: nauc_precision_at_20_max value: 25.978951783180946 - type: nauc_precision_at_20_std value: 23.89998191646426 - type: nauc_precision_at_3_diff1 value: 22.61273732758558 - type: nauc_precision_at_3_max value: 26.51246898792034 - type: nauc_precision_at_3_std value: 13.618855663226162 - type: nauc_precision_at_5_diff1 value: 19.216237125486472 - type: nauc_precision_at_5_max value: 27.491221626577868 - type: nauc_precision_at_5_std value: 16.448119031617793 - type: nauc_recall_at_1000_diff1 value: 5.787043341957982 - type: nauc_recall_at_1000_max value: 25.922109246772763 - type: nauc_recall_at_1000_std value: 43.03768522656805 - type: nauc_recall_at_100_diff1 value: 10.696362559629796 - type: nauc_recall_at_100_max value: 29.335080453227146 - type: nauc_recall_at_100_std value: 37.271217586452124 - type: nauc_recall_at_10_diff1 value: 15.458092305569215 - type: nauc_recall_at_10_max value: 27.24445210740807 - type: nauc_recall_at_10_std value: 19.71157635644842 - type: nauc_recall_at_1_diff1 value: 23.406427416653127 - type: nauc_recall_at_1_max value: 21.759624726647303 - type: nauc_recall_at_1_std value: 8.335925909478444 - type: nauc_recall_at_20_diff1 value: 12.666354755313089 - type: nauc_recall_at_20_max value: 26.089770792562327 - type: nauc_recall_at_20_std value: 24.153776619741254 - type: nauc_recall_at_3_diff1 value: 22.545408113368953 - type: nauc_recall_at_3_max value: 26.18564049945919 - type: nauc_recall_at_3_std value: 13.308772571657293 - type: nauc_recall_at_5_diff1 value: 19.063078320434958 - type: nauc_recall_at_5_max value: 27.15038597116091 - type: nauc_recall_at_5_std value: 16.202694888143302 - type: ndcg_at_1 value: 22.2 - type: ndcg_at_10 value: 19.528000000000002 - type: ndcg_at_100 value: 28.444000000000003 - type: ndcg_at_1000 value: 33.826 - type: ndcg_at_20 value: 22.746 - type: ndcg_at_3 value: 18.413 - type: ndcg_at_5 value: 15.927 - type: precision_at_1 value: 22.2 - type: precision_at_10 value: 10.24 - type: precision_at_100 value: 2.3040000000000003 - type: precision_at_1000 value: 0.358 - type: precision_at_20 value: 6.97 - type: precision_at_3 value: 17.299999999999997 - type: precision_at_5 value: 13.919999999999998 - type: recall_at_1 value: 4.5280000000000005 - type: recall_at_10 value: 20.757 - type: recall_at_100 value: 46.75 - type: recall_at_1000 value: 72.738 - type: recall_at_20 value: 28.28 - type: recall_at_3 value: 10.558 - type: recall_at_5 value: 14.148 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: 71bba34b0ece6c56dfcf46d9758a27f7a90f17e9 metrics: - type: cosine_accuracy value: 87.50509580105992 - type: cosine_accuracy_threshold value: 89.01510631979949 - type: cosine_ap value: 85.58291779193907 - type: cosine_f1 value: 77.58919293384136 - type: cosine_f1_threshold value: 87.10908804245841 - type: cosine_precision value: 75.52258934592044 - type: cosine_recall value: 79.77207977207978 - type: dot_accuracy value: 83.9380350591113 - type: dot_accuracy_threshold value: 2292800.0 - type: dot_ap value: 77.56937485120034 - type: dot_f1 value: 73.32065906210391 - type: dot_f1_threshold value: 2190400.0 - type: dot_precision value: 66.03881278538812 - type: dot_recall value: 82.4074074074074 - type: euclidean_accuracy value: 87.89237668161435 - type: euclidean_accuracy_threshold value: 7497.701400069587 - type: euclidean_ap value: 85.97216152106346 - type: euclidean_f1 value: 77.97228300510578 - type: euclidean_f1_threshold value: 7799.027816670506 - type: euclidean_precision value: 79.89536621823618 - type: euclidean_recall value: 76.13960113960114 - type: main_score value: 85.97216152106346 - type: manhattan_accuracy value: 87.85161027313494 - type: manhattan_accuracy_threshold value: 357242.9743885994 - type: manhattan_ap value: 85.96709490495458 - type: manhattan_f1 value: 77.9874213836478 - type: manhattan_f1_threshold value: 383558.8531732559 - type: manhattan_precision value: 76.5432098765432 - type: manhattan_recall value: 79.48717948717949 - type: max_ap value: 85.97216152106346 - type: max_f1 value: 77.9874213836478 - type: max_precision value: 79.89536621823618 - type: max_recall value: 82.4074074074074 - type: similarity_accuracy value: 87.50509580105992 - type: similarity_accuracy_threshold value: 89.01510631979949 - type: similarity_ap value: 85.58291779193907 - type: similarity_f1 value: 77.58919293384136 - type: similarity_f1_threshold value: 87.10908804245841 - type: similarity_precision value: 75.52258934592044 - type: similarity_recall value: 79.77207977207978 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: fd5c2441b7eeff8676768036142af4cfa42c1339 metrics: - type: cosine_pearson value: 79.68602301743276 - type: cosine_spearman value: 78.15913085997471 - type: euclidean_pearson value: 77.19541180768627 - type: euclidean_spearman value: 77.9122894221527 - type: main_score value: 78.15913085997471 - type: manhattan_pearson value: 77.24713453824641 - type: manhattan_spearman value: 77.95971728547582 - type: pearson value: 79.68602301743276 - type: spearman value: 78.15913085997471 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 42.01062393061261 - type: cosine_spearman value: 42.79076406559122 - type: euclidean_pearson value: 28.57786522106708 - type: euclidean_spearman value: 42.51040813516686 - type: main_score value: 42.79076406559122 - type: manhattan_pearson value: 28.855884350706653 - type: manhattan_spearman value: 42.77481125184737 - type: pearson value: 42.01062393061261 - type: spearman value: 42.79076406559122 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: main_score value: 74.434 - type: map_at_1 value: 59.494 - type: map_at_10 value: 69.893 - type: map_at_100 value: 70.45 - type: map_at_1000 value: 70.466 - type: map_at_20 value: 70.259 - type: map_at_3 value: 67.037 - type: map_at_5 value: 68.777 - type: mrr_at_1 value: 62.66666666666667 - type: mrr_at_10 value: 71.04457671957671 - type: mrr_at_100 value: 71.52299909263925 - type: mrr_at_1000 value: 71.53881086964122 - type: mrr_at_20 value: 71.33636271136271 - type: mrr_at_3 value: 69.16666666666667 - type: mrr_at_5 value: 70.26666666666667 - type: nauc_map_at_1000_diff1 value: 68.97113084189034 - type: nauc_map_at_1000_max value: 51.00665747497857 - type: nauc_map_at_1000_std value: 8.970270487093412 - type: nauc_map_at_100_diff1 value: 68.97281660521169 - type: nauc_map_at_100_max value: 51.01659549614879 - type: nauc_map_at_100_std value: 8.986483862053491 - type: nauc_map_at_10_diff1 value: 69.07605123979184 - type: nauc_map_at_10_max value: 51.229841935772804 - type: nauc_map_at_10_std value: 9.050901052243548 - type: nauc_map_at_1_diff1 value: 71.46187295357046 - type: nauc_map_at_1_max value: 46.82038076857106 - type: nauc_map_at_1_std value: 6.931602615510153 - type: nauc_map_at_20_diff1 value: 68.93823362705625 - type: nauc_map_at_20_max value: 51.15218544845727 - type: nauc_map_at_20_std value: 8.993550237629675 - type: nauc_map_at_3_diff1 value: 69.19558420072627 - type: nauc_map_at_3_max value: 47.345905341053886 - type: nauc_map_at_3_std value: 4.833936436252541 - type: nauc_map_at_5_diff1 value: 69.05067049349557 - type: nauc_map_at_5_max value: 49.62866209452668 - type: nauc_map_at_5_std value: 7.455937282103214 - type: nauc_mrr_at_1000_diff1 value: 69.2896395759106 - type: nauc_mrr_at_1000_max value: 54.20478659857226 - type: nauc_mrr_at_1000_std value: 12.534151525016302 - type: nauc_mrr_at_100_diff1 value: 69.29115865311857 - type: nauc_mrr_at_100_max value: 54.212882919608475 - type: nauc_mrr_at_100_std value: 12.548435473868432 - type: nauc_mrr_at_10_diff1 value: 69.29596234146305 - type: nauc_mrr_at_10_max value: 54.391683731646935 - type: nauc_mrr_at_10_std value: 12.74312540729047 - type: nauc_mrr_at_1_diff1 value: 71.19661136604304 - type: nauc_mrr_at_1_max value: 53.50646788895577 - type: nauc_mrr_at_1_std value: 14.68408048005645 - type: nauc_mrr_at_20_diff1 value: 69.24714813412893 - type: nauc_mrr_at_20_max value: 54.32239828421196 - type: nauc_mrr_at_20_std value: 12.623980761665866 - type: nauc_mrr_at_3_diff1 value: 69.22708724496187 - type: nauc_mrr_at_3_max value: 53.18873450995116 - type: nauc_mrr_at_3_std value: 11.336687945925586 - type: nauc_mrr_at_5_diff1 value: 69.10748983236182 - type: nauc_mrr_at_5_max value: 53.878090193979034 - type: nauc_mrr_at_5_std value: 12.079036178698662 - type: nauc_ndcg_at_1000_diff1 value: 68.66705448374432 - type: nauc_ndcg_at_1000_max value: 52.74699991296371 - type: nauc_ndcg_at_1000_std value: 10.535824386304968 - type: nauc_ndcg_at_100_diff1 value: 68.66862462407086 - type: nauc_ndcg_at_100_max value: 52.979821543362874 - type: nauc_ndcg_at_100_std value: 10.856284103500371 - type: nauc_ndcg_at_10_diff1 value: 68.66965948376267 - type: nauc_ndcg_at_10_max value: 53.978681919984474 - type: nauc_ndcg_at_10_std value: 11.10472732803466 - type: nauc_ndcg_at_1_diff1 value: 71.19661136604304 - type: nauc_ndcg_at_1_max value: 53.50646788895577 - type: nauc_ndcg_at_1_std value: 14.68408048005645 - type: nauc_ndcg_at_20_diff1 value: 68.20754850499976 - type: nauc_ndcg_at_20_max value: 53.590485842045595 - type: nauc_ndcg_at_20_std value: 10.719753086433334 - type: nauc_ndcg_at_3_diff1 value: 68.23406959629385 - type: nauc_ndcg_at_3_max value: 48.8837450762613 - type: nauc_ndcg_at_3_std value: 6.287949648205997 - type: nauc_ndcg_at_5_diff1 value: 68.52532849588677 - type: nauc_ndcg_at_5_max value: 51.29845300513165 - type: nauc_ndcg_at_5_std value: 8.15488455762137 - type: nauc_precision_at_1000_diff1 value: -29.56388929021074 - type: nauc_precision_at_1000_max value: 18.61674681637121 - type: nauc_precision_at_1000_std value: 41.68541412973936 - type: nauc_precision_at_100_diff1 value: -17.020740767390375 - type: nauc_precision_at_100_max value: 24.321682766394957 - type: nauc_precision_at_100_std value: 39.36188711602 - type: nauc_precision_at_10_diff1 value: 7.735819461600302 - type: nauc_precision_at_10_max value: 39.59963139423176 - type: nauc_precision_at_10_std value: 33.923494696390385 - type: nauc_precision_at_1_diff1 value: 71.19661136604304 - type: nauc_precision_at_1_max value: 53.50646788895577 - type: nauc_precision_at_1_std value: 14.68408048005645 - type: nauc_precision_at_20_diff1 value: -3.587900694179661 - type: nauc_precision_at_20_max value: 33.36606615861144 - type: nauc_precision_at_20_std value: 34.51624192343654 - type: nauc_precision_at_3_diff1 value: 41.996620318298625 - type: nauc_precision_at_3_max value: 43.08007454860597 - type: nauc_precision_at_3_std value: 14.398965447916495 - type: nauc_precision_at_5_diff1 value: 25.054180107661132 - type: nauc_precision_at_5_max value: 40.94617942853718 - type: nauc_precision_at_5_std value: 23.69992709404865 - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_100_diff1 value: 68.09523809523836 - type: nauc_recall_at_100_max value: 63.034547152194406 - type: nauc_recall_at_100_std value: 23.594771241830657 - type: nauc_recall_at_10_diff1 value: 66.43213426149696 - type: nauc_recall_at_10_max value: 63.07509853849101 - type: nauc_recall_at_10_std value: 15.44924084252273 - type: nauc_recall_at_1_diff1 value: 71.46187295357046 - type: nauc_recall_at_1_max value: 46.82038076857106 - type: nauc_recall_at_1_std value: 6.931602615510153 - type: nauc_recall_at_20_diff1 value: 61.64354198229226 - type: nauc_recall_at_20_max value: 63.09950698826864 - type: nauc_recall_at_20_std value: 12.823209698925014 - type: nauc_recall_at_3_diff1 value: 65.63352507252078 - type: nauc_recall_at_3_max value: 45.10210171735505 - type: nauc_recall_at_3_std value: -0.08017546941514365 - type: nauc_recall_at_5_diff1 value: 65.93453179242769 - type: nauc_recall_at_5_max value: 51.97740656606473 - type: nauc_recall_at_5_std value: 4.929967882548962 - type: ndcg_at_1 value: 62.666999999999994 - type: ndcg_at_10 value: 74.434 - type: ndcg_at_100 value: 76.655 - type: ndcg_at_1000 value: 77.08 - type: ndcg_at_20 value: 75.588 - type: ndcg_at_3 value: 69.75099999999999 - type: ndcg_at_5 value: 72.09100000000001 - type: precision_at_1 value: 62.666999999999994 - type: precision_at_10 value: 9.9 - type: precision_at_100 value: 1.097 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.2 - type: precision_at_3 value: 27.0 - type: precision_at_5 value: 17.933 - type: recall_at_1 value: 59.494 - type: recall_at_10 value: 87.13300000000001 - type: recall_at_100 value: 96.667 - type: recall_at_1000 value: 100.0 - type: recall_at_20 value: 91.43299999999999 - type: recall_at_3 value: 74.461 - type: recall_at_5 value: 80.34400000000001 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: main_score value: 82.749 - type: map_at_1 value: 0.20400000000000001 - type: map_at_10 value: 2.099 - type: map_at_100 value: 12.948 - type: map_at_1000 value: 32.007000000000005 - type: map_at_20 value: 3.746 - type: map_at_3 value: 0.651 - type: map_at_5 value: 1.061 - type: mrr_at_1 value: 84.0 - type: mrr_at_10 value: 91.66666666666666 - type: mrr_at_100 value: 91.66666666666666 - type: mrr_at_1000 value: 91.66666666666666 - type: mrr_at_20 value: 91.66666666666666 - type: mrr_at_3 value: 91.66666666666666 - type: mrr_at_5 value: 91.66666666666666 - type: nauc_map_at_1000_diff1 value: 1.0291414165448085 - type: nauc_map_at_1000_max value: 57.33479540784058 - type: nauc_map_at_1000_std value: 76.70364036170582 - type: nauc_map_at_100_diff1 value: 6.949672309533349 - type: nauc_map_at_100_max value: 43.99861611069154 - type: nauc_map_at_100_std value: 64.12473626966596 - type: nauc_map_at_10_diff1 value: 4.208568177173666 - type: nauc_map_at_10_max value: 18.875910045226423 - type: nauc_map_at_10_std value: 34.58171216714189 - type: nauc_map_at_1_diff1 value: 8.433450768728983 - type: nauc_map_at_1_max value: 24.08001091473891 - type: nauc_map_at_1_std value: 35.21473053133869 - type: nauc_map_at_20_diff1 value: 6.041054220619057 - type: nauc_map_at_20_max value: 22.57475437061051 - type: nauc_map_at_20_std value: 35.254808865756964 - type: nauc_map_at_3_diff1 value: 11.166815378728485 - type: nauc_map_at_3_max value: 18.995433996118248 - type: nauc_map_at_3_std value: 34.29696290521795 - type: nauc_map_at_5_diff1 value: 7.1134812647567855 - type: nauc_map_at_5_max value: 20.03877039266845 - type: nauc_map_at_5_std value: 36.21644151312843 - type: nauc_mrr_at_1000_diff1 value: -7.262394669801826 - type: nauc_mrr_at_1000_max value: 66.22378992749366 - type: nauc_mrr_at_1000_std value: 68.18146188516563 - type: nauc_mrr_at_100_diff1 value: -7.262394669801826 - type: nauc_mrr_at_100_max value: 66.22378992749366 - type: nauc_mrr_at_100_std value: 68.18146188516563 - type: nauc_mrr_at_10_diff1 value: -7.262394669801826 - type: nauc_mrr_at_10_max value: 66.22378992749366 - type: nauc_mrr_at_10_std value: 68.18146188516563 - type: nauc_mrr_at_1_diff1 value: -11.38929798723619 - type: nauc_mrr_at_1_max value: 68.58738340697101 - type: nauc_mrr_at_1_std value: 68.00441826215022 - type: nauc_mrr_at_20_diff1 value: -7.262394669801826 - type: nauc_mrr_at_20_max value: 66.22378992749366 - type: nauc_mrr_at_20_std value: 68.18146188516563 - type: nauc_mrr_at_3_diff1 value: -7.262394669801826 - type: nauc_mrr_at_3_max value: 66.22378992749366 - type: nauc_mrr_at_3_std value: 68.18146188516563 - type: nauc_mrr_at_5_diff1 value: -7.262394669801826 - type: nauc_mrr_at_5_max value: 66.22378992749366 - type: nauc_mrr_at_5_std value: 68.18146188516563 - type: nauc_ndcg_at_1000_diff1 value: 2.5628376286433334 - type: nauc_ndcg_at_1000_max value: 57.605148480655025 - type: nauc_ndcg_at_1000_std value: 76.62891677430625 - type: nauc_ndcg_at_100_diff1 value: -13.313083767893671 - type: nauc_ndcg_at_100_max value: 52.932453336031905 - type: nauc_ndcg_at_100_std value: 73.5050466104544 - type: nauc_ndcg_at_10_diff1 value: -6.837803344621873 - type: nauc_ndcg_at_10_max value: 59.29833159945462 - type: nauc_ndcg_at_10_std value: 63.719268128346705 - type: nauc_ndcg_at_1_diff1 value: 4.834338452523335 - type: nauc_ndcg_at_1_max value: 53.58546768562144 - type: nauc_ndcg_at_1_std value: 59.07659252386643 - type: nauc_ndcg_at_20_diff1 value: -9.617683189610558 - type: nauc_ndcg_at_20_max value: 54.57354685878183 - type: nauc_ndcg_at_20_std value: 63.15198506529425 - type: nauc_ndcg_at_3_diff1 value: 15.216236580270994 - type: nauc_ndcg_at_3_max value: 58.345749967766416 - type: nauc_ndcg_at_3_std value: 61.78177922399883 - type: nauc_ndcg_at_5_diff1 value: 1.3882436296634026 - type: nauc_ndcg_at_5_max value: 62.44013008368074 - type: nauc_ndcg_at_5_std value: 65.64455986653293 - type: nauc_precision_at_1000_diff1 value: -18.516822124710856 - type: nauc_precision_at_1000_max value: 33.10336267989325 - type: nauc_precision_at_1000_std value: 29.49816019882571 - type: nauc_precision_at_100_diff1 value: -14.113619184538592 - type: nauc_precision_at_100_max value: 55.55228172103563 - type: nauc_precision_at_100_std value: 69.64355056246397 - type: nauc_precision_at_10_diff1 value: -27.271286464111455 - type: nauc_precision_at_10_max value: 61.885272647604594 - type: nauc_precision_at_10_std value: 60.73389705676694 - type: nauc_precision_at_1_diff1 value: -11.38929798723619 - type: nauc_precision_at_1_max value: 68.58738340697101 - type: nauc_precision_at_1_std value: 68.00441826215022 - type: nauc_precision_at_20_diff1 value: -21.53639909310826 - type: nauc_precision_at_20_max value: 53.361537614358376 - type: nauc_precision_at_20_std value: 55.58737187496432 - type: nauc_precision_at_3_diff1 value: 3.785071466384217 - type: nauc_precision_at_3_max value: 61.66906148377818 - type: nauc_precision_at_3_std value: 62.81857369734561 - type: nauc_precision_at_5_diff1 value: -16.00339477131436 - type: nauc_precision_at_5_max value: 61.5246951163262 - type: nauc_precision_at_5_std value: 63.615062452722135 - type: nauc_recall_at_1000_diff1 value: 5.871263115826736 - type: nauc_recall_at_1000_max value: 50.48397949000848 - type: nauc_recall_at_1000_std value: 67.37950715297474 - type: nauc_recall_at_100_diff1 value: 8.310215006893952 - type: nauc_recall_at_100_max value: 28.687726825722386 - type: nauc_recall_at_100_std value: 50.34038560928654 - type: nauc_recall_at_10_diff1 value: 3.3408195168322075 - type: nauc_recall_at_10_max value: 6.89511828305496 - type: nauc_recall_at_10_std value: 22.929267555360028 - type: nauc_recall_at_1_diff1 value: 8.433450768728983 - type: nauc_recall_at_1_max value: 24.08001091473891 - type: nauc_recall_at_1_std value: 35.21473053133869 - type: nauc_recall_at_20_diff1 value: 5.307683260432045 - type: nauc_recall_at_20_max value: 10.025532087519974 - type: nauc_recall_at_20_std value: 24.110512570368947 - type: nauc_recall_at_3_diff1 value: 13.355136074654078 - type: nauc_recall_at_3_max value: 8.568079109800236 - type: nauc_recall_at_3_std value: 23.691593767005745 - type: nauc_recall_at_5_diff1 value: 6.535580157651383 - type: nauc_recall_at_5_max value: 9.1442468749571 - type: nauc_recall_at_5_std value: 27.00111567203191 - type: ndcg_at_1 value: 79.0 - type: ndcg_at_10 value: 82.749 - type: ndcg_at_100 value: 63.846000000000004 - type: ndcg_at_1000 value: 57.691 - type: ndcg_at_20 value: 77.076 - type: ndcg_at_3 value: 84.83800000000001 - type: ndcg_at_5 value: 83.016 - type: precision_at_1 value: 84.0 - type: precision_at_10 value: 87.8 - type: precision_at_100 value: 66.10000000000001 - type: precision_at_1000 value: 25.764 - type: precision_at_20 value: 81.10000000000001 - type: precision_at_3 value: 91.333 - type: precision_at_5 value: 88.8 - type: recall_at_1 value: 0.20400000000000001 - type: recall_at_10 value: 2.294 - type: recall_at_100 value: 16.134999999999998 - type: recall_at_1000 value: 54.981 - type: recall_at_20 value: 4.201 - type: recall_at_3 value: 0.699 - type: recall_at_5 value: 1.141 --- <h1 align="center">FlagEmbedding</h1> For more details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). **BGE-Multilingual-Gemma2** is a LLM-based multilingual embedding model. It is trained on a diverse range of languages and tasks based on [google/gemma-2-9b](https://huggingface.co/google/gemma-2-9b). BGE-Multilingual-Gemma2 primarily demonstrates the following advancements: - Diverse training data: The model's training data spans a broad range of languages, including English, Chinese, Japanese, Korean, French, and more.Additionally, the data covers a variety of task types, such as retrieval, classification, and clustering. - Outstanding performance: The model exhibits state-of-the-art (SOTA) results on multilingual benchmarks like MIRACL, MTEB-pl, and MTEB-fr. It also achieves excellent performance on other major evaluations, including MTEB, C-MTEB and AIR-Bench. ## 📑 Open-source Plan - [x] Checkpoint - [ ] Training Data We will release the training data of **BGE-Multilingual-Gemma2** in the future. ## Usage ### Using FlagEmbedding ``` git clone https://github.com/FlagOpen/FlagEmbedding.git cd FlagEmbedding pip install -e . ``` ```python from FlagEmbedding import FlagLLMModel queries = ["how much protein should a female eat", "summit define"] documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] model = FlagLLMModel('BAAI/bge-multilingual-gemma2', query_instruction_for_retrieval="Given a web search query, retrieve relevant passages that answer the query.", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode_queries(queries) embeddings_2 = model.encode_corpus(documents) similarity = embeddings_1 @ embeddings_2.T print(similarity) # [[ 0.559 0.01654 ] # [-0.002575 0.4998 ]] ``` By default, FlagLLMModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. ### Using Sentence Transformers ```python from sentence_transformers import SentenceTransformer import torch # Load the model, optionally in float16 precision for faster inference model = SentenceTransformer("BAAI/bge-multilingual-gemma2", model_kwargs={"torch_dtype": torch.float16}) # Prepare a prompt given an instruction instruction = 'Given a web search query, retrieve relevant passages that answer the query.' prompt = f'<instruct>{instruction}\n<query>' # Prepare queries and documents queries = [ 'how much protein should a female eat', 'summit define', ] documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] # Compute the query and document embeddings query_embeddings = model.encode(queries, prompt=prompt) document_embeddings = model.encode(documents) # Compute the cosine similarity between the query and document embeddings similarities = model.similarity(query_embeddings, document_embeddings) print(similarities) # tensor([[ 0.5591, 0.0164], # [-0.0026, 0.4993]], dtype=torch.float16) ``` ### Using HuggingFace Transformers ```python import torch import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: left_padding = (attention_mask[:, -1].sum() == attention_mask.shape[0]) if left_padding: return last_hidden_states[:, -1] else: sequence_lengths = attention_mask.sum(dim=1) - 1 batch_size = last_hidden_states.shape[0] return last_hidden_states[torch.arange(batch_size, device=last_hidden_states.device), sequence_lengths] def get_detailed_instruct(task_description: str, query: str) -> str: return f'<instruct>{task_description}\n<query>{query}' task = 'Given a web search query, retrieve relevant passages that answer the query.' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, 'summit define') ] # No need to add instructions for documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] input_texts = queries + documents tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-multilingual-gemma2') model = AutoModel.from_pretrained('BAAI/bge-multilingual-gemma2') model.eval() max_length = 4096 # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors='pt', pad_to_multiple_of=8) with torch.no_grad(): outputs = model(**batch_dict) embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) # [[55.92064666748047, 1.6549524068832397], [-0.2698777914047241, 49.95653533935547]] ``` ## Evaluation `bge-multilingual-gemma2` exhibits **state-of-the-art (SOTA) results on benchmarks like MIRACL, MTEB-pl, and MTEB-fr**. It also achieves excellent performance on other major evaluations, including MTEB, C-MTEB and AIR-Bench. - [**MIRACL**](https://github.com/project-miracl/miracl) nDCG@10: <img src="./imgs/[email protected]" alt="MIRACL-nDCG@10" style="zoom:200%;" /> Recall@100: <img src="./imgs/[email protected]" alt="MIRACL-Recall@100" style="zoom:200%;" /> - [**MTEB-fr/pl**](https://huggingface.co/spaces/mteb/leaderboard) <img src="./imgs/MTEB_FR_PL.png" alt="MTEB-fr/pl" style="zoom:200%;" /> - [**MTEB**](https://huggingface.co/spaces/mteb/leaderboard) <img src="./imgs/MTEB.png" alt="MTEB" style="zoom:200%;" /> - [**BEIR**](https://huggingface.co/spaces/mteb/leaderboard) <img src="./imgs/BEIR.png" alt="BEIR" style="zoom:200%;" /> - [**C-MTEB**](https://huggingface.co/spaces/mteb/leaderboard) <img src="./imgs/C-MTEB.png" alt="C-MTEB" style="zoom:200%;" /> - [**AIR-Bench**](https://huggingface.co/spaces/AIR-Bench/leaderboard) Long-Doc (en, Recall@10): <img src="./imgs/AIR-Bench_Long-Doc_en.png" alt="AIR-Bench_Long-Doc" style="zoom:200%;" /> QA (en&zh, nDCG@10): <img src="./imgs/AIR-Bench_QA_en_zh.png" alt="AIR-Bench_QA" style="zoom:200%;" /> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | | :----------------------------------------------------------- | :-----------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [BAAI/bge-multilingual-gemma2](https://huggingface.co/BAAI/bge-multilingual-gemma2) | Multilingual | - | A LLM-based multilingual embedding model, trained on a diverse range of languages and tasks. | | [BAAI/bge-en-icl](https://huggingface.co/BAAI/bge-en-icl) | English | - | A LLM-based dense retriever with in-context learning capabilities can fully leverage the model's potential based on a few shot examples(4096 tokens) | Provide instructions and few-shot examples freely based on the given task. | | [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) | Multilingual | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3#usage) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/BGE_M3) | Multi-Functionality(dense retrieval, sparse retrieval, multi-vector(colbert)), Multi-Linguality, and Multi-Granularity(8192 tokens) | | | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge-m3, title={BGE M3-Embedding: Multi-Lingual, Multi-Functionality, Multi-Granularity Text Embeddings Through Self-Knowledge Distillation}, author={Jianlv Chen and Shitao Xiao and Peitian Zhang and Kun Luo and Defu Lian and Zheng Liu}, year={2024}, eprint={2402.03216}, archivePrefix={arXiv}, primaryClass={cs.CL} } @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "BIOSSES", "SCIFACT" ]
avsolatorio/NoInstruct-small-Embedding-v0
avsolatorio
sentence-similarity
[ "sentence-transformers", "safetensors", "bert", "feature-extraction", "mteb", "sentence-similarity", "transformers", "en", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-05-01T16:21:05Z"
2024-05-04T02:11:03+00:00
163,145
22
--- language: - en library_name: sentence-transformers license: mit pipeline_tag: sentence-similarity tags: - feature-extraction - mteb - sentence-similarity - sentence-transformers - transformers model-index: - name: NoInstruct-small-Embedding-v0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.76119402985074 - type: ap value: 39.03628777559392 - type: f1 value: 69.85860402259618 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.29920000000001 - type: ap value: 90.03479490717608 - type: f1 value: 93.28554395248467 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.98799999999999 - type: f1 value: 49.46151232451642 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 31.935000000000002 - type: map_at_10 value: 48.791000000000004 - type: map_at_100 value: 49.619 - type: map_at_1000 value: 49.623 - type: map_at_3 value: 44.334 - type: map_at_5 value: 46.908 - type: mrr_at_1 value: 32.93 - type: mrr_at_10 value: 49.158 - type: mrr_at_100 value: 50.00599999999999 - type: mrr_at_1000 value: 50.01 - type: mrr_at_3 value: 44.618 - type: mrr_at_5 value: 47.325 - type: ndcg_at_1 value: 31.935000000000002 - type: ndcg_at_10 value: 57.593 - type: ndcg_at_100 value: 60.841 - type: ndcg_at_1000 value: 60.924 - type: ndcg_at_3 value: 48.416 - type: ndcg_at_5 value: 53.05 - type: precision_at_1 value: 31.935000000000002 - type: precision_at_10 value: 8.549 - type: precision_at_100 value: 0.9900000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.081 - type: precision_at_5 value: 14.296000000000001 - type: recall_at_1 value: 31.935000000000002 - type: recall_at_10 value: 85.491 - type: recall_at_100 value: 99.004 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 60.242 - type: recall_at_5 value: 71.479 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.78438534940855 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 40.12916178519471 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.125361608299855 - type: mrr value: 74.92525172580574 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 88.64322910336641 - type: cos_sim_spearman value: 87.20138453306345 - type: euclidean_pearson value: 87.08547818178234 - type: euclidean_spearman value: 87.17066094143931 - type: manhattan_pearson value: 87.30053110771618 - type: manhattan_spearman value: 86.86824441211934 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 86.3961038961039 - type: f1 value: 86.3669961645295 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.40291404289857 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 35.102356817746816 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 31.013 - type: map_at_10 value: 42.681999999999995 - type: map_at_100 value: 44.24 - type: map_at_1000 value: 44.372 - type: map_at_3 value: 39.181 - type: map_at_5 value: 41.071999999999996 - type: mrr_at_1 value: 38.196999999999996 - type: mrr_at_10 value: 48.604 - type: mrr_at_100 value: 49.315 - type: mrr_at_1000 value: 49.363 - type: mrr_at_3 value: 45.756 - type: mrr_at_5 value: 47.43 - type: ndcg_at_1 value: 38.196999999999996 - type: ndcg_at_10 value: 49.344 - type: ndcg_at_100 value: 54.662 - type: ndcg_at_1000 value: 56.665 - type: ndcg_at_3 value: 44.146 - type: ndcg_at_5 value: 46.514 - type: precision_at_1 value: 38.196999999999996 - type: precision_at_10 value: 9.571 - type: precision_at_100 value: 1.542 - type: precision_at_1000 value: 0.202 - type: precision_at_3 value: 21.364 - type: precision_at_5 value: 15.336 - type: recall_at_1 value: 31.013 - type: recall_at_10 value: 61.934999999999995 - type: recall_at_100 value: 83.923 - type: recall_at_1000 value: 96.601 - type: recall_at_3 value: 46.86 - type: recall_at_5 value: 53.620000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 29.84 - type: map_at_10 value: 39.335 - type: map_at_100 value: 40.647 - type: map_at_1000 value: 40.778 - type: map_at_3 value: 36.556 - type: map_at_5 value: 38.048 - type: mrr_at_1 value: 36.815 - type: mrr_at_10 value: 45.175 - type: mrr_at_100 value: 45.907 - type: mrr_at_1000 value: 45.946999999999996 - type: mrr_at_3 value: 42.909000000000006 - type: mrr_at_5 value: 44.227 - type: ndcg_at_1 value: 36.815 - type: ndcg_at_10 value: 44.783 - type: ndcg_at_100 value: 49.551 - type: ndcg_at_1000 value: 51.612 - type: ndcg_at_3 value: 40.697 - type: ndcg_at_5 value: 42.558 - type: precision_at_1 value: 36.815 - type: precision_at_10 value: 8.363 - type: precision_at_100 value: 1.385 - type: precision_at_1000 value: 0.186 - type: precision_at_3 value: 19.342000000000002 - type: precision_at_5 value: 13.706999999999999 - type: recall_at_1 value: 29.84 - type: recall_at_10 value: 54.164 - type: recall_at_100 value: 74.36 - type: recall_at_1000 value: 87.484 - type: recall_at_3 value: 42.306 - type: recall_at_5 value: 47.371 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.231 - type: map_at_10 value: 51.44800000000001 - type: map_at_100 value: 52.574 - type: map_at_1000 value: 52.629999999999995 - type: map_at_3 value: 48.077 - type: map_at_5 value: 50.019000000000005 - type: mrr_at_1 value: 44.89 - type: mrr_at_10 value: 54.803000000000004 - type: mrr_at_100 value: 55.556000000000004 - type: mrr_at_1000 value: 55.584 - type: mrr_at_3 value: 52.32 - type: mrr_at_5 value: 53.846000000000004 - type: ndcg_at_1 value: 44.89 - type: ndcg_at_10 value: 57.228 - type: ndcg_at_100 value: 61.57 - type: ndcg_at_1000 value: 62.613 - type: ndcg_at_3 value: 51.727000000000004 - type: ndcg_at_5 value: 54.496 - type: precision_at_1 value: 44.89 - type: precision_at_10 value: 9.266 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 23.051 - type: precision_at_5 value: 15.987000000000002 - type: recall_at_1 value: 39.231 - type: recall_at_10 value: 70.82000000000001 - type: recall_at_100 value: 89.446 - type: recall_at_1000 value: 96.665 - type: recall_at_3 value: 56.40500000000001 - type: recall_at_5 value: 62.993 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 25.296000000000003 - type: map_at_10 value: 34.021 - type: map_at_100 value: 35.158 - type: map_at_1000 value: 35.233 - type: map_at_3 value: 31.424999999999997 - type: map_at_5 value: 33.046 - type: mrr_at_1 value: 27.232 - type: mrr_at_10 value: 36.103 - type: mrr_at_100 value: 37.076 - type: mrr_at_1000 value: 37.135 - type: mrr_at_3 value: 33.635 - type: mrr_at_5 value: 35.211 - type: ndcg_at_1 value: 27.232 - type: ndcg_at_10 value: 38.878 - type: ndcg_at_100 value: 44.284 - type: ndcg_at_1000 value: 46.268 - type: ndcg_at_3 value: 33.94 - type: ndcg_at_5 value: 36.687 - type: precision_at_1 value: 27.232 - type: precision_at_10 value: 5.921 - type: precision_at_100 value: 0.907 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 14.426 - type: precision_at_5 value: 10.215 - type: recall_at_1 value: 25.296000000000003 - type: recall_at_10 value: 51.708 - type: recall_at_100 value: 76.36699999999999 - type: recall_at_1000 value: 91.306 - type: recall_at_3 value: 38.651 - type: recall_at_5 value: 45.201 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 16.24 - type: map_at_10 value: 24.696 - type: map_at_100 value: 25.945 - type: map_at_1000 value: 26.069 - type: map_at_3 value: 22.542 - type: map_at_5 value: 23.526 - type: mrr_at_1 value: 20.149 - type: mrr_at_10 value: 29.584 - type: mrr_at_100 value: 30.548 - type: mrr_at_1000 value: 30.618000000000002 - type: mrr_at_3 value: 27.301 - type: mrr_at_5 value: 28.563 - type: ndcg_at_1 value: 20.149 - type: ndcg_at_10 value: 30.029 - type: ndcg_at_100 value: 35.812 - type: ndcg_at_1000 value: 38.755 - type: ndcg_at_3 value: 26.008 - type: ndcg_at_5 value: 27.517000000000003 - type: precision_at_1 value: 20.149 - type: precision_at_10 value: 5.647 - type: precision_at_100 value: 0.968 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 12.934999999999999 - type: precision_at_5 value: 8.955 - type: recall_at_1 value: 16.24 - type: recall_at_10 value: 41.464 - type: recall_at_100 value: 66.781 - type: recall_at_1000 value: 87.85300000000001 - type: recall_at_3 value: 29.822 - type: recall_at_5 value: 34.096 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 29.044999999999998 - type: map_at_10 value: 39.568999999999996 - type: map_at_100 value: 40.831 - type: map_at_1000 value: 40.948 - type: map_at_3 value: 36.495 - type: map_at_5 value: 38.21 - type: mrr_at_1 value: 35.611 - type: mrr_at_10 value: 45.175 - type: mrr_at_100 value: 45.974 - type: mrr_at_1000 value: 46.025 - type: mrr_at_3 value: 42.765 - type: mrr_at_5 value: 44.151 - type: ndcg_at_1 value: 35.611 - type: ndcg_at_10 value: 45.556999999999995 - type: ndcg_at_100 value: 50.86000000000001 - type: ndcg_at_1000 value: 52.983000000000004 - type: ndcg_at_3 value: 40.881 - type: ndcg_at_5 value: 43.035000000000004 - type: precision_at_1 value: 35.611 - type: precision_at_10 value: 8.306 - type: precision_at_100 value: 1.276 - type: precision_at_1000 value: 0.165 - type: precision_at_3 value: 19.57 - type: precision_at_5 value: 13.725000000000001 - type: recall_at_1 value: 29.044999999999998 - type: recall_at_10 value: 57.513999999999996 - type: recall_at_100 value: 80.152 - type: recall_at_1000 value: 93.982 - type: recall_at_3 value: 44.121 - type: recall_at_5 value: 50.007000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 22.349 - type: map_at_10 value: 33.434000000000005 - type: map_at_100 value: 34.8 - type: map_at_1000 value: 34.919 - type: map_at_3 value: 30.348000000000003 - type: map_at_5 value: 31.917 - type: mrr_at_1 value: 28.195999999999998 - type: mrr_at_10 value: 38.557 - type: mrr_at_100 value: 39.550999999999995 - type: mrr_at_1000 value: 39.607 - type: mrr_at_3 value: 36.035000000000004 - type: mrr_at_5 value: 37.364999999999995 - type: ndcg_at_1 value: 28.195999999999998 - type: ndcg_at_10 value: 39.656000000000006 - type: ndcg_at_100 value: 45.507999999999996 - type: ndcg_at_1000 value: 47.848 - type: ndcg_at_3 value: 34.609 - type: ndcg_at_5 value: 36.65 - type: precision_at_1 value: 28.195999999999998 - type: precision_at_10 value: 7.534000000000001 - type: precision_at_100 value: 1.217 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 17.085 - type: precision_at_5 value: 12.169 - type: recall_at_1 value: 22.349 - type: recall_at_10 value: 53.127 - type: recall_at_100 value: 77.884 - type: recall_at_1000 value: 93.705 - type: recall_at_3 value: 38.611000000000004 - type: recall_at_5 value: 44.182 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 25.215749999999996 - type: map_at_10 value: 34.332750000000004 - type: map_at_100 value: 35.58683333333333 - type: map_at_1000 value: 35.70458333333333 - type: map_at_3 value: 31.55441666666667 - type: map_at_5 value: 33.100833333333334 - type: mrr_at_1 value: 29.697250000000004 - type: mrr_at_10 value: 38.372249999999994 - type: mrr_at_100 value: 39.26708333333334 - type: mrr_at_1000 value: 39.3265 - type: mrr_at_3 value: 35.946083333333334 - type: mrr_at_5 value: 37.336999999999996 - type: ndcg_at_1 value: 29.697250000000004 - type: ndcg_at_10 value: 39.64575 - type: ndcg_at_100 value: 44.996833333333335 - type: ndcg_at_1000 value: 47.314499999999995 - type: ndcg_at_3 value: 34.93383333333334 - type: ndcg_at_5 value: 37.15291666666667 - type: precision_at_1 value: 29.697250000000004 - type: precision_at_10 value: 6.98825 - type: precision_at_100 value: 1.138 - type: precision_at_1000 value: 0.15283333333333332 - type: precision_at_3 value: 16.115583333333333 - type: precision_at_5 value: 11.460916666666666 - type: recall_at_1 value: 25.215749999999996 - type: recall_at_10 value: 51.261250000000004 - type: recall_at_100 value: 74.67258333333334 - type: recall_at_1000 value: 90.72033333333334 - type: recall_at_3 value: 38.1795 - type: recall_at_5 value: 43.90658333333334 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.352 - type: map_at_10 value: 30.576999999999998 - type: map_at_100 value: 31.545 - type: map_at_1000 value: 31.642 - type: map_at_3 value: 28.605000000000004 - type: map_at_5 value: 29.828 - type: mrr_at_1 value: 26.994 - type: mrr_at_10 value: 33.151 - type: mrr_at_100 value: 33.973 - type: mrr_at_1000 value: 34.044999999999995 - type: mrr_at_3 value: 31.135 - type: mrr_at_5 value: 32.262 - type: ndcg_at_1 value: 26.994 - type: ndcg_at_10 value: 34.307 - type: ndcg_at_100 value: 39.079 - type: ndcg_at_1000 value: 41.548 - type: ndcg_at_3 value: 30.581000000000003 - type: ndcg_at_5 value: 32.541 - type: precision_at_1 value: 26.994 - type: precision_at_10 value: 5.244999999999999 - type: precision_at_100 value: 0.831 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 12.781 - type: precision_at_5 value: 9.017999999999999 - type: recall_at_1 value: 24.352 - type: recall_at_10 value: 43.126999999999995 - type: recall_at_100 value: 64.845 - type: recall_at_1000 value: 83.244 - type: recall_at_3 value: 33.308 - type: recall_at_5 value: 37.984 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 16.592000000000002 - type: map_at_10 value: 23.29 - type: map_at_100 value: 24.423000000000002 - type: map_at_1000 value: 24.554000000000002 - type: map_at_3 value: 20.958 - type: map_at_5 value: 22.267 - type: mrr_at_1 value: 20.061999999999998 - type: mrr_at_10 value: 26.973999999999997 - type: mrr_at_100 value: 27.944999999999997 - type: mrr_at_1000 value: 28.023999999999997 - type: mrr_at_3 value: 24.839 - type: mrr_at_5 value: 26.033 - type: ndcg_at_1 value: 20.061999999999998 - type: ndcg_at_10 value: 27.682000000000002 - type: ndcg_at_100 value: 33.196 - type: ndcg_at_1000 value: 36.246 - type: ndcg_at_3 value: 23.559 - type: ndcg_at_5 value: 25.507 - type: precision_at_1 value: 20.061999999999998 - type: precision_at_10 value: 5.086 - type: precision_at_100 value: 0.9249999999999999 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 11.046 - type: precision_at_5 value: 8.149000000000001 - type: recall_at_1 value: 16.592000000000002 - type: recall_at_10 value: 37.181999999999995 - type: recall_at_100 value: 62.224999999999994 - type: recall_at_1000 value: 84.072 - type: recall_at_3 value: 25.776 - type: recall_at_5 value: 30.680000000000003 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 26.035999999999998 - type: map_at_10 value: 34.447 - type: map_at_100 value: 35.697 - type: map_at_1000 value: 35.802 - type: map_at_3 value: 31.64 - type: map_at_5 value: 33.056999999999995 - type: mrr_at_1 value: 29.851 - type: mrr_at_10 value: 38.143 - type: mrr_at_100 value: 39.113 - type: mrr_at_1000 value: 39.175 - type: mrr_at_3 value: 35.665 - type: mrr_at_5 value: 36.901 - type: ndcg_at_1 value: 29.851 - type: ndcg_at_10 value: 39.554 - type: ndcg_at_100 value: 45.091 - type: ndcg_at_1000 value: 47.504000000000005 - type: ndcg_at_3 value: 34.414 - type: ndcg_at_5 value: 36.508 - type: precision_at_1 value: 29.851 - type: precision_at_10 value: 6.614000000000001 - type: precision_at_100 value: 1.051 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 15.329999999999998 - type: precision_at_5 value: 10.671999999999999 - type: recall_at_1 value: 26.035999999999998 - type: recall_at_10 value: 51.396 - type: recall_at_100 value: 75.09 - type: recall_at_1000 value: 91.904 - type: recall_at_3 value: 37.378 - type: recall_at_5 value: 42.69 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 23.211000000000002 - type: map_at_10 value: 32.231 - type: map_at_100 value: 33.772999999999996 - type: map_at_1000 value: 33.982 - type: map_at_3 value: 29.128 - type: map_at_5 value: 31.002999999999997 - type: mrr_at_1 value: 27.668 - type: mrr_at_10 value: 36.388 - type: mrr_at_100 value: 37.384 - type: mrr_at_1000 value: 37.44 - type: mrr_at_3 value: 33.762 - type: mrr_at_5 value: 35.234 - type: ndcg_at_1 value: 27.668 - type: ndcg_at_10 value: 38.043 - type: ndcg_at_100 value: 44.21 - type: ndcg_at_1000 value: 46.748 - type: ndcg_at_3 value: 32.981 - type: ndcg_at_5 value: 35.58 - type: precision_at_1 value: 27.668 - type: precision_at_10 value: 7.352 - type: precision_at_100 value: 1.5 - type: precision_at_1000 value: 0.23700000000000002 - type: precision_at_3 value: 15.613 - type: precision_at_5 value: 11.501999999999999 - type: recall_at_1 value: 23.211000000000002 - type: recall_at_10 value: 49.851 - type: recall_at_100 value: 77.596 - type: recall_at_1000 value: 93.683 - type: recall_at_3 value: 35.403 - type: recall_at_5 value: 42.485 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 19.384 - type: map_at_10 value: 26.262999999999998 - type: map_at_100 value: 27.409 - type: map_at_1000 value: 27.526 - type: map_at_3 value: 23.698 - type: map_at_5 value: 25.217 - type: mrr_at_1 value: 20.702 - type: mrr_at_10 value: 27.810000000000002 - type: mrr_at_100 value: 28.863 - type: mrr_at_1000 value: 28.955 - type: mrr_at_3 value: 25.230999999999998 - type: mrr_at_5 value: 26.821 - type: ndcg_at_1 value: 20.702 - type: ndcg_at_10 value: 30.688 - type: ndcg_at_100 value: 36.138999999999996 - type: ndcg_at_1000 value: 38.984 - type: ndcg_at_3 value: 25.663000000000004 - type: ndcg_at_5 value: 28.242 - type: precision_at_1 value: 20.702 - type: precision_at_10 value: 4.954 - type: precision_at_100 value: 0.823 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 10.844 - type: precision_at_5 value: 8.096 - type: recall_at_1 value: 19.384 - type: recall_at_10 value: 42.847 - type: recall_at_100 value: 67.402 - type: recall_at_1000 value: 88.145 - type: recall_at_3 value: 29.513 - type: recall_at_5 value: 35.57 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 14.915000000000001 - type: map_at_10 value: 25.846999999999998 - type: map_at_100 value: 27.741 - type: map_at_1000 value: 27.921000000000003 - type: map_at_3 value: 21.718 - type: map_at_5 value: 23.948 - type: mrr_at_1 value: 33.941 - type: mrr_at_10 value: 46.897 - type: mrr_at_100 value: 47.63 - type: mrr_at_1000 value: 47.658 - type: mrr_at_3 value: 43.919999999999995 - type: mrr_at_5 value: 45.783 - type: ndcg_at_1 value: 33.941 - type: ndcg_at_10 value: 35.202 - type: ndcg_at_100 value: 42.132 - type: ndcg_at_1000 value: 45.190999999999995 - type: ndcg_at_3 value: 29.68 - type: ndcg_at_5 value: 31.631999999999998 - type: precision_at_1 value: 33.941 - type: precision_at_10 value: 10.906 - type: precision_at_100 value: 1.8339999999999999 - type: precision_at_1000 value: 0.241 - type: precision_at_3 value: 22.606 - type: precision_at_5 value: 17.081 - type: recall_at_1 value: 14.915000000000001 - type: recall_at_10 value: 40.737 - type: recall_at_100 value: 64.42 - type: recall_at_1000 value: 81.435 - type: recall_at_3 value: 26.767000000000003 - type: recall_at_5 value: 32.895 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 8.665000000000001 - type: map_at_10 value: 19.087 - type: map_at_100 value: 26.555 - type: map_at_1000 value: 28.105999999999998 - type: map_at_3 value: 13.858999999999998 - type: map_at_5 value: 16.083 - type: mrr_at_1 value: 68.5 - type: mrr_at_10 value: 76.725 - type: mrr_at_100 value: 76.974 - type: mrr_at_1000 value: 76.981 - type: mrr_at_3 value: 75.583 - type: mrr_at_5 value: 76.208 - type: ndcg_at_1 value: 55.875 - type: ndcg_at_10 value: 41.018 - type: ndcg_at_100 value: 44.982 - type: ndcg_at_1000 value: 52.43 - type: ndcg_at_3 value: 46.534 - type: ndcg_at_5 value: 43.083 - type: precision_at_1 value: 68.5 - type: precision_at_10 value: 32.35 - type: precision_at_100 value: 10.078 - type: precision_at_1000 value: 1.957 - type: precision_at_3 value: 50.083 - type: precision_at_5 value: 41.3 - type: recall_at_1 value: 8.665000000000001 - type: recall_at_10 value: 24.596999999999998 - type: recall_at_100 value: 50.612 - type: recall_at_1000 value: 74.24 - type: recall_at_3 value: 15.337 - type: recall_at_5 value: 18.796 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 55.06500000000001 - type: f1 value: 49.827367590822035 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 76.059 - type: map_at_10 value: 83.625 - type: map_at_100 value: 83.845 - type: map_at_1000 value: 83.858 - type: map_at_3 value: 82.67099999999999 - type: map_at_5 value: 83.223 - type: mrr_at_1 value: 82.013 - type: mrr_at_10 value: 88.44800000000001 - type: mrr_at_100 value: 88.535 - type: mrr_at_1000 value: 88.537 - type: mrr_at_3 value: 87.854 - type: mrr_at_5 value: 88.221 - type: ndcg_at_1 value: 82.013 - type: ndcg_at_10 value: 87.128 - type: ndcg_at_100 value: 87.922 - type: ndcg_at_1000 value: 88.166 - type: ndcg_at_3 value: 85.648 - type: ndcg_at_5 value: 86.366 - type: precision_at_1 value: 82.013 - type: precision_at_10 value: 10.32 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 32.408 - type: precision_at_5 value: 19.973 - type: recall_at_1 value: 76.059 - type: recall_at_10 value: 93.229 - type: recall_at_100 value: 96.387 - type: recall_at_1000 value: 97.916 - type: recall_at_3 value: 89.025 - type: recall_at_5 value: 90.96300000000001 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 20.479 - type: map_at_10 value: 33.109 - type: map_at_100 value: 34.803 - type: map_at_1000 value: 35.003 - type: map_at_3 value: 28.967 - type: map_at_5 value: 31.385 - type: mrr_at_1 value: 40.278000000000006 - type: mrr_at_10 value: 48.929 - type: mrr_at_100 value: 49.655 - type: mrr_at_1000 value: 49.691 - type: mrr_at_3 value: 46.605000000000004 - type: mrr_at_5 value: 48.056 - type: ndcg_at_1 value: 40.278000000000006 - type: ndcg_at_10 value: 40.649 - type: ndcg_at_100 value: 47.027 - type: ndcg_at_1000 value: 50.249 - type: ndcg_at_3 value: 37.364000000000004 - type: ndcg_at_5 value: 38.494 - type: precision_at_1 value: 40.278000000000006 - type: precision_at_10 value: 11.327 - type: precision_at_100 value: 1.802 - type: precision_at_1000 value: 0.23700000000000002 - type: precision_at_3 value: 25.102999999999998 - type: precision_at_5 value: 18.457 - type: recall_at_1 value: 20.479 - type: recall_at_10 value: 46.594 - type: recall_at_100 value: 71.101 - type: recall_at_1000 value: 90.31099999999999 - type: recall_at_3 value: 33.378 - type: recall_at_5 value: 39.587 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 36.59 - type: map_at_10 value: 58.178 - type: map_at_100 value: 59.095 - type: map_at_1000 value: 59.16400000000001 - type: map_at_3 value: 54.907 - type: map_at_5 value: 56.89999999999999 - type: mrr_at_1 value: 73.18 - type: mrr_at_10 value: 79.935 - type: mrr_at_100 value: 80.16799999999999 - type: mrr_at_1000 value: 80.17800000000001 - type: mrr_at_3 value: 78.776 - type: mrr_at_5 value: 79.522 - type: ndcg_at_1 value: 73.18 - type: ndcg_at_10 value: 66.538 - type: ndcg_at_100 value: 69.78 - type: ndcg_at_1000 value: 71.102 - type: ndcg_at_3 value: 61.739 - type: ndcg_at_5 value: 64.35600000000001 - type: precision_at_1 value: 73.18 - type: precision_at_10 value: 14.035 - type: precision_at_100 value: 1.657 - type: precision_at_1000 value: 0.183 - type: precision_at_3 value: 39.684999999999995 - type: precision_at_5 value: 25.885 - type: recall_at_1 value: 36.59 - type: recall_at_10 value: 70.176 - type: recall_at_100 value: 82.836 - type: recall_at_1000 value: 91.526 - type: recall_at_3 value: 59.526999999999994 - type: recall_at_5 value: 64.713 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 90.1472 - type: ap value: 85.73994227076815 - type: f1 value: 90.1271700788608 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 21.689 - type: map_at_10 value: 33.518 - type: map_at_100 value: 34.715 - type: map_at_1000 value: 34.766000000000005 - type: map_at_3 value: 29.781000000000002 - type: map_at_5 value: 31.838 - type: mrr_at_1 value: 22.249 - type: mrr_at_10 value: 34.085 - type: mrr_at_100 value: 35.223 - type: mrr_at_1000 value: 35.266999999999996 - type: mrr_at_3 value: 30.398999999999997 - type: mrr_at_5 value: 32.437 - type: ndcg_at_1 value: 22.249 - type: ndcg_at_10 value: 40.227000000000004 - type: ndcg_at_100 value: 45.961999999999996 - type: ndcg_at_1000 value: 47.248000000000005 - type: ndcg_at_3 value: 32.566 - type: ndcg_at_5 value: 36.229 - type: precision_at_1 value: 22.249 - type: precision_at_10 value: 6.358 - type: precision_at_100 value: 0.923 - type: precision_at_1000 value: 0.10300000000000001 - type: precision_at_3 value: 13.83 - type: precision_at_5 value: 10.145999999999999 - type: recall_at_1 value: 21.689 - type: recall_at_10 value: 60.92999999999999 - type: recall_at_100 value: 87.40599999999999 - type: recall_at_1000 value: 97.283 - type: recall_at_3 value: 40.01 - type: recall_at_5 value: 48.776 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.28727770177838 - type: f1 value: 95.02577308660041 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 79.5736434108527 - type: f1 value: 61.2451202054398 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.01210490921318 - type: f1 value: 73.70188053982473 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.33422999327504 - type: f1 value: 79.48369022509658 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.70891567267726 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.15203494451706 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.919517862194173 - type: mrr value: 33.15466289140483 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 5.992 - type: map_at_10 value: 13.197000000000001 - type: map_at_100 value: 16.907 - type: map_at_1000 value: 18.44 - type: map_at_3 value: 9.631 - type: map_at_5 value: 11.243 - type: mrr_at_1 value: 44.272 - type: mrr_at_10 value: 53.321 - type: mrr_at_100 value: 53.903 - type: mrr_at_1000 value: 53.952999999999996 - type: mrr_at_3 value: 51.393 - type: mrr_at_5 value: 52.708999999999996 - type: ndcg_at_1 value: 42.415000000000006 - type: ndcg_at_10 value: 34.921 - type: ndcg_at_100 value: 32.384 - type: ndcg_at_1000 value: 41.260000000000005 - type: ndcg_at_3 value: 40.186 - type: ndcg_at_5 value: 37.89 - type: precision_at_1 value: 44.272 - type: precision_at_10 value: 26.006 - type: precision_at_100 value: 8.44 - type: precision_at_1000 value: 2.136 - type: precision_at_3 value: 37.977 - type: precision_at_5 value: 32.755 - type: recall_at_1 value: 5.992 - type: recall_at_10 value: 17.01 - type: recall_at_100 value: 33.080999999999996 - type: recall_at_1000 value: 65.054 - type: recall_at_3 value: 10.528 - type: recall_at_5 value: 13.233 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 28.871999999999996 - type: map_at_10 value: 43.286 - type: map_at_100 value: 44.432 - type: map_at_1000 value: 44.464999999999996 - type: map_at_3 value: 38.856 - type: map_at_5 value: 41.514 - type: mrr_at_1 value: 32.619 - type: mrr_at_10 value: 45.75 - type: mrr_at_100 value: 46.622 - type: mrr_at_1000 value: 46.646 - type: mrr_at_3 value: 41.985 - type: mrr_at_5 value: 44.277 - type: ndcg_at_1 value: 32.59 - type: ndcg_at_10 value: 50.895999999999994 - type: ndcg_at_100 value: 55.711999999999996 - type: ndcg_at_1000 value: 56.48800000000001 - type: ndcg_at_3 value: 42.504999999999995 - type: ndcg_at_5 value: 46.969 - type: precision_at_1 value: 32.59 - type: precision_at_10 value: 8.543000000000001 - type: precision_at_100 value: 1.123 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 19.448 - type: precision_at_5 value: 14.218 - type: recall_at_1 value: 28.871999999999996 - type: recall_at_10 value: 71.748 - type: recall_at_100 value: 92.55499999999999 - type: recall_at_1000 value: 98.327 - type: recall_at_3 value: 49.944 - type: recall_at_5 value: 60.291 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 70.664 - type: map_at_10 value: 84.681 - type: map_at_100 value: 85.289 - type: map_at_1000 value: 85.306 - type: map_at_3 value: 81.719 - type: map_at_5 value: 83.601 - type: mrr_at_1 value: 81.35 - type: mrr_at_10 value: 87.591 - type: mrr_at_100 value: 87.691 - type: mrr_at_1000 value: 87.693 - type: mrr_at_3 value: 86.675 - type: mrr_at_5 value: 87.29299999999999 - type: ndcg_at_1 value: 81.33 - type: ndcg_at_10 value: 88.411 - type: ndcg_at_100 value: 89.579 - type: ndcg_at_1000 value: 89.687 - type: ndcg_at_3 value: 85.613 - type: ndcg_at_5 value: 87.17 - type: precision_at_1 value: 81.33 - type: precision_at_10 value: 13.422 - type: precision_at_100 value: 1.5270000000000001 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.463 - type: precision_at_5 value: 24.646 - type: recall_at_1 value: 70.664 - type: recall_at_10 value: 95.54 - type: recall_at_100 value: 99.496 - type: recall_at_1000 value: 99.978 - type: recall_at_3 value: 87.481 - type: recall_at_5 value: 91.88499999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 55.40341814991112 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 61.231318481346655 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 4.833 - type: map_at_10 value: 13.149 - type: map_at_100 value: 15.578 - type: map_at_1000 value: 15.963 - type: map_at_3 value: 9.269 - type: map_at_5 value: 11.182 - type: mrr_at_1 value: 23.9 - type: mrr_at_10 value: 35.978 - type: mrr_at_100 value: 37.076 - type: mrr_at_1000 value: 37.126 - type: mrr_at_3 value: 32.333 - type: mrr_at_5 value: 34.413 - type: ndcg_at_1 value: 23.9 - type: ndcg_at_10 value: 21.823 - type: ndcg_at_100 value: 30.833 - type: ndcg_at_1000 value: 36.991 - type: ndcg_at_3 value: 20.465 - type: ndcg_at_5 value: 17.965999999999998 - type: precision_at_1 value: 23.9 - type: precision_at_10 value: 11.49 - type: precision_at_100 value: 2.444 - type: precision_at_1000 value: 0.392 - type: precision_at_3 value: 19.3 - type: precision_at_5 value: 15.959999999999999 - type: recall_at_1 value: 4.833 - type: recall_at_10 value: 23.294999999999998 - type: recall_at_100 value: 49.63 - type: recall_at_1000 value: 79.49199999999999 - type: recall_at_3 value: 11.732 - type: recall_at_5 value: 16.167 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cos_sim_pearson value: 85.62938108735759 - type: cos_sim_spearman value: 80.30777094408789 - type: euclidean_pearson value: 82.94516686659536 - type: euclidean_spearman value: 80.34489663248169 - type: manhattan_pearson value: 82.85830094736245 - type: manhattan_spearman value: 80.24902623215449 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.23777464247604 - type: cos_sim_spearman value: 75.75714864112797 - type: euclidean_pearson value: 82.33806918604493 - type: euclidean_spearman value: 75.45282124387357 - type: manhattan_pearson value: 82.32555620660538 - type: manhattan_spearman value: 75.49228731684082 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.88151620954451 - type: cos_sim_spearman value: 86.08377598473446 - type: euclidean_pearson value: 85.36958329369413 - type: euclidean_spearman value: 86.10274219670679 - type: manhattan_pearson value: 85.25873897594711 - type: manhattan_spearman value: 85.98096461661584 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 84.29360558735978 - type: cos_sim_spearman value: 82.28284203795577 - type: euclidean_pearson value: 83.81636655536633 - type: euclidean_spearman value: 82.24340438530236 - type: manhattan_pearson value: 83.83914453428608 - type: manhattan_spearman value: 82.28391354080694 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 87.47344180426744 - type: cos_sim_spearman value: 88.90045649789438 - type: euclidean_pearson value: 88.43020815961273 - type: euclidean_spearman value: 89.0087449011776 - type: manhattan_pearson value: 88.37601826505525 - type: manhattan_spearman value: 88.96756360690617 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.35997025304613 - type: cos_sim_spearman value: 85.18237675717147 - type: euclidean_pearson value: 84.46478196990202 - type: euclidean_spearman value: 85.27748677712205 - type: manhattan_pearson value: 84.29342543953123 - type: manhattan_spearman value: 85.10579612516567 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.56668329596836 - type: cos_sim_spearman value: 88.72837234129177 - type: euclidean_pearson value: 89.39395650897828 - type: euclidean_spearman value: 88.82001247906778 - type: manhattan_pearson value: 89.41735354368878 - type: manhattan_spearman value: 88.95159141850039 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 67.466167902991 - type: cos_sim_spearman value: 68.54466147197274 - type: euclidean_pearson value: 69.35551179564695 - type: euclidean_spearman value: 68.75455717749132 - type: manhattan_pearson value: 69.42432368208264 - type: manhattan_spearman value: 68.83203709670562 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.33241300373689 - type: cos_sim_spearman value: 86.97909372129874 - type: euclidean_pearson value: 86.99526113559924 - type: euclidean_spearman value: 87.02644372623219 - type: manhattan_pearson value: 86.78744182759846 - type: manhattan_spearman value: 86.8886180198196 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 86.18374413668717 - type: mrr value: 95.93213068703264 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 58.31699999999999 - type: map_at_10 value: 67.691 - type: map_at_100 value: 68.201 - type: map_at_1000 value: 68.232 - type: map_at_3 value: 64.47800000000001 - type: map_at_5 value: 66.51 - type: mrr_at_1 value: 61.0 - type: mrr_at_10 value: 68.621 - type: mrr_at_100 value: 68.973 - type: mrr_at_1000 value: 69.002 - type: mrr_at_3 value: 66.111 - type: mrr_at_5 value: 67.578 - type: ndcg_at_1 value: 61.0 - type: ndcg_at_10 value: 72.219 - type: ndcg_at_100 value: 74.397 - type: ndcg_at_1000 value: 75.021 - type: ndcg_at_3 value: 66.747 - type: ndcg_at_5 value: 69.609 - type: precision_at_1 value: 61.0 - type: precision_at_10 value: 9.6 - type: precision_at_100 value: 1.08 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 25.667 - type: precision_at_5 value: 17.267 - type: recall_at_1 value: 58.31699999999999 - type: recall_at_10 value: 85.233 - type: recall_at_100 value: 95.167 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 70.589 - type: recall_at_5 value: 77.628 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.83267326732673 - type: cos_sim_ap value: 96.13707107038228 - type: cos_sim_f1 value: 91.48830263812842 - type: cos_sim_precision value: 91.0802775024777 - type: cos_sim_recall value: 91.9 - type: dot_accuracy value: 99.83069306930693 - type: dot_ap value: 96.21199069147254 - type: dot_f1 value: 91.36295556665004 - type: dot_precision value: 91.22632103688933 - type: dot_recall value: 91.5 - type: euclidean_accuracy value: 99.83267326732673 - type: euclidean_ap value: 96.08957801367436 - type: euclidean_f1 value: 91.33004926108374 - type: euclidean_precision value: 90.0 - type: euclidean_recall value: 92.7 - type: manhattan_accuracy value: 99.83564356435643 - type: manhattan_ap value: 96.10534946461945 - type: manhattan_f1 value: 91.74950298210736 - type: manhattan_precision value: 91.20553359683794 - type: manhattan_recall value: 92.30000000000001 - type: max_accuracy value: 99.83564356435643 - type: max_ap value: 96.21199069147254 - type: max_f1 value: 91.74950298210736 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 62.045718843534736 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.6501777041092 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.963913408053955 - type: mrr value: 53.87972423818012 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.44195730764998 - type: cos_sim_spearman value: 30.59626288679397 - type: dot_pearson value: 30.22974492404086 - type: dot_spearman value: 29.345245972906497 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.24 - type: map_at_10 value: 2.01 - type: map_at_100 value: 11.928999999999998 - type: map_at_1000 value: 29.034 - type: map_at_3 value: 0.679 - type: map_at_5 value: 1.064 - type: mrr_at_1 value: 92.0 - type: mrr_at_10 value: 96.0 - type: mrr_at_100 value: 96.0 - type: mrr_at_1000 value: 96.0 - type: mrr_at_3 value: 96.0 - type: mrr_at_5 value: 96.0 - type: ndcg_at_1 value: 87.0 - type: ndcg_at_10 value: 80.118 - type: ndcg_at_100 value: 60.753 - type: ndcg_at_1000 value: 54.632999999999996 - type: ndcg_at_3 value: 83.073 - type: ndcg_at_5 value: 80.733 - type: precision_at_1 value: 92.0 - type: precision_at_10 value: 84.8 - type: precision_at_100 value: 62.019999999999996 - type: precision_at_1000 value: 24.028 - type: precision_at_3 value: 87.333 - type: precision_at_5 value: 85.2 - type: recall_at_1 value: 0.24 - type: recall_at_10 value: 2.205 - type: recall_at_100 value: 15.068000000000001 - type: recall_at_1000 value: 51.796 - type: recall_at_3 value: 0.698 - type: recall_at_5 value: 1.1199999999999999 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.066 - type: map_at_10 value: 9.219 - type: map_at_100 value: 15.387 - type: map_at_1000 value: 16.957 - type: map_at_3 value: 5.146 - type: map_at_5 value: 6.6739999999999995 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 50.844 - type: mrr_at_100 value: 51.664 - type: mrr_at_1000 value: 51.664 - type: mrr_at_3 value: 46.259 - type: mrr_at_5 value: 49.116 - type: ndcg_at_1 value: 37.755 - type: ndcg_at_10 value: 23.477 - type: ndcg_at_100 value: 36.268 - type: ndcg_at_1000 value: 47.946 - type: ndcg_at_3 value: 25.832 - type: ndcg_at_5 value: 24.235 - type: precision_at_1 value: 40.816 - type: precision_at_10 value: 20.204 - type: precision_at_100 value: 7.611999999999999 - type: precision_at_1000 value: 1.543 - type: precision_at_3 value: 25.169999999999998 - type: precision_at_5 value: 23.265 - type: recall_at_1 value: 3.066 - type: recall_at_10 value: 14.985999999999999 - type: recall_at_100 value: 47.902 - type: recall_at_1000 value: 83.56400000000001 - type: recall_at_3 value: 5.755 - type: recall_at_5 value: 8.741999999999999 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 69.437 - type: ap value: 12.844066827082706 - type: f1 value: 52.74974809872495 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.26768534238823 - type: f1 value: 61.65100187399282 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.860968711078804 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.7423854085951 - type: cos_sim_ap value: 73.47560303339571 - type: cos_sim_f1 value: 67.372778183589 - type: cos_sim_precision value: 62.54520795660036 - type: cos_sim_recall value: 73.00791556728232 - type: dot_accuracy value: 85.36091077069798 - type: dot_ap value: 72.42521572307255 - type: dot_f1 value: 66.90576304724215 - type: dot_precision value: 62.96554934823091 - type: dot_recall value: 71.37203166226914 - type: euclidean_accuracy value: 85.76026703224653 - type: euclidean_ap value: 73.44852563860128 - type: euclidean_f1 value: 67.3 - type: euclidean_precision value: 63.94299287410926 - type: euclidean_recall value: 71.02902374670185 - type: manhattan_accuracy value: 85.7423854085951 - type: manhattan_ap value: 73.2635034755551 - type: manhattan_f1 value: 67.3180263800684 - type: manhattan_precision value: 62.66484765802638 - type: manhattan_recall value: 72.71767810026385 - type: max_accuracy value: 85.76026703224653 - type: max_ap value: 73.47560303339571 - type: max_f1 value: 67.372778183589 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.67543757519307 - type: cos_sim_ap value: 85.35516518531304 - type: cos_sim_f1 value: 77.58197635511934 - type: cos_sim_precision value: 75.01078360891445 - type: cos_sim_recall value: 80.33569448721897 - type: dot_accuracy value: 87.61400240617844 - type: dot_ap value: 83.0774968268665 - type: dot_f1 value: 75.68229012162561 - type: dot_precision value: 72.99713876967095 - type: dot_recall value: 78.57252848783493 - type: euclidean_accuracy value: 88.73753250281368 - type: euclidean_ap value: 85.48043564821317 - type: euclidean_f1 value: 77.75975862719216 - type: euclidean_precision value: 76.21054187920456 - type: euclidean_recall value: 79.37326763166 - type: manhattan_accuracy value: 88.75111576823068 - type: manhattan_ap value: 85.44993439423668 - type: manhattan_f1 value: 77.6861329994845 - type: manhattan_precision value: 74.44601270289344 - type: manhattan_recall value: 81.22112719433323 - type: max_accuracy value: 88.75111576823068 - type: max_ap value: 85.48043564821317 - type: max_f1 value: 77.75975862719216 --- <h1 align="center">NoInstruct small Embedding v0</h1> *NoInstruct Embedding: Asymmetric Pooling is All You Need* This model has improved retrieval performance compared to the [avsolatorio/GIST-small-Embedding-v0](https://huggingface.co/avsolatorio/GIST-small-Embedding-v0) model. One of the things that the `GIST` family of models fell short on is the performance on retrieval tasks. We propose a method that produces improved retrieval performance while maintaining independence on crafting arbitrary instructions, a trending paradigm in embedding models for retrieval tasks, when encoding a query. Technical details of the model will be published shortly. # Usage ```Python from typing import Union import torch import torch.nn.functional as F from transformers import AutoModel, AutoTokenizer model = AutoModel.from_pretrained("avsolatorio/NoInstruct-small-Embedding-v0") tokenizer = AutoTokenizer.from_pretrained("avsolatorio/NoInstruct-small-Embedding-v0") def get_embedding(text: Union[str, list[str]], mode: str = "sentence"): model.eval() assert mode in ("query", "sentence"), f"mode={mode} was passed but only `query` and `sentence` are the supported modes." if isinstance(text, str): text = [text] inp = tokenizer(text, return_tensors="pt", padding=True, truncation=True) with torch.no_grad(): output = model(**inp) # The model is optimized to use the mean pooling for queries, # while the sentence / document embedding uses the [CLS] representation. if mode == "query": vectors = output.last_hidden_state * inp["attention_mask"].unsqueeze(2) vectors = vectors.sum(dim=1) / inp["attention_mask"].sum(dim=-1).view(-1, 1) else: vectors = output.last_hidden_state[:, 0, :] return vectors texts = [ "Illustration of the REaLTabFormer model. The left block shows the non-relational tabular data model using GPT-2 with a causal LM head. In contrast, the right block shows how a relational dataset's child table is modeled using a sequence-to-sequence (Seq2Seq) model. The Seq2Seq model uses the observations in the parent table to condition the generation of the observations in the child table. The trained GPT-2 model on the parent table, with weights frozen, is also used as the encoder in the Seq2Seq model.", "Predicting human mobility holds significant practical value, with applications ranging from enhancing disaster risk planning to simulating epidemic spread. In this paper, we present the GeoFormer, a decoder-only transformer model adapted from the GPT architecture to forecast human mobility.", "As the economies of Southeast Asia continue adopting digital technologies, policy makers increasingly ask how to prepare the workforce for emerging labor demands. However, little is known about the skills that workers need to adapt to these changes" ] # Compute embeddings embeddings = get_embedding(texts, mode="sentence") # Compute cosine-similarity for each pair of sentences scores = F.cosine_similarity(embeddings.unsqueeze(1), embeddings.unsqueeze(0), dim=-1) print(scores.cpu().numpy()) # Test the retrieval performance. query = get_embedding("Which sentence talks about concept on jobs?", mode="query") scores = F.cosine_similarity(query, embeddings, dim=-1) print(scores.cpu().numpy()) ``` Support for the Sentence Transformers library will follow soon.
[ "BIOSSES", "SCIFACT" ]
Snowflake/snowflake-arctic-embed-m-v2.0
Snowflake
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "gte", "feature-extraction", "sentence-similarity", "mteb", "arctic", "snowflake-arctic-embed", "transformers.js", "custom_code", "af", "ar", "az", "be", "bg", "bn", "ca", "ceb", "cs", "cy", "da", "de", "el", "en", "es", "et", "eu", "fa", "fi", "fr", "gl", "gu", "he", "hi", "hr", "ht", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ky", "lo", "lt", "lv", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "pa", "pl", "pt", "qu", "ro", "ru", "si", "sk", "sl", "so", "sq", "sr", "sv", "sw", "ta", "te", "th", "tl", "tr", "uk", "ur", "vi", "yo", "zh", "arxiv:2412.04506", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2024-11-08T16:52:25Z"
2024-12-14T00:20:05+00:00
161,691
63
--- language: - af - ar - az - be - bg - bn - ca - ceb - cs - cy - da - de - el - en - es - et - eu - fa - fi - fr - gl - gu - he - hi - hr - ht - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ky - lo - lt - lv - mk - ml - mn - mr - ms - my - ne - nl - pa - pl - pt - qu - ro - ru - si - sk - sl - so - sq - sr - sv - sw - ta - te - th - tl - tr - uk - ur - vi - yo - zh license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb - arctic - snowflake-arctic-embed - transformers.js model-index: - name: snowflake-arctic-embed-m-v2.0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 66.6867 - type: f1 value: 55.0373 - type: f1_weighted value: 73.07430000000001 - type: ap value: 18.077399999999997 - type: ap_weighted value: 18.077399999999997 - type: main_score value: 66.6867 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 66.194 - type: f1 value: 60.854299999999995 - type: f1_weighted value: 69.57339999999999 - type: ap value: 30.279099999999996 - type: ap_weighted value: 30.279099999999996 - type: main_score value: 66.194 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 70.3589 - type: f1 value: 70.0409 - type: f1_weighted value: 70.0409 - type: ap value: 64.81949999999999 - type: ap_weighted value: 64.81949999999999 - type: main_score value: 70.3589 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 33.766 - type: f1 value: 33.3656 - type: f1_weighted value: 33.3656 - type: main_score value: 33.766 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: ndcg_at_1 value: 33.144 - type: ndcg_at_3 value: 47.909 - type: ndcg_at_5 value: 52.932 - type: ndcg_at_10 value: 58.011 - type: ndcg_at_20 value: 60.168 - type: ndcg_at_100 value: 60.928000000000004 - type: ndcg_at_1000 value: 61.046 - type: map_at_1 value: 33.144 - type: map_at_3 value: 44.156 - type: map_at_5 value: 46.951 - type: map_at_10 value: 49.071999999999996 - type: map_at_20 value: 49.692 - type: map_at_100 value: 49.809 - type: map_at_1000 value: 49.815 - type: recall_at_1 value: 33.144 - type: recall_at_3 value: 58.819 - type: recall_at_5 value: 70.982 - type: recall_at_10 value: 86.558 - type: recall_at_20 value: 94.879 - type: recall_at_100 value: 98.791 - type: recall_at_1000 value: 99.644 - type: precision_at_1 value: 33.144 - type: precision_at_3 value: 19.606 - type: precision_at_5 value: 14.196 - type: precision_at_10 value: 8.656 - type: precision_at_20 value: 4.744000000000001 - type: precision_at_100 value: 0.988 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 33.4993 - type: mrr_at_3 value: 44.393100000000004 - type: mrr_at_5 value: 47.131299999999996 - type: mrr_at_10 value: 49.264599999999994 - type: mrr_at_20 value: 49.8707 - type: mrr_at_100 value: 49.987700000000004 - type: mrr_at_1000 value: 49.993700000000004 - type: nauc_ndcg_at_1_max value: -10.8287 - type: nauc_ndcg_at_1_std value: -17.1177 - type: nauc_ndcg_at_1_diff1 value: 14.4508 - type: nauc_ndcg_at_3_max value: -7.7004 - type: nauc_ndcg_at_3_std value: -16.6705 - type: nauc_ndcg_at_3_diff1 value: 10.0448 - type: nauc_ndcg_at_5_max value: -7.0436 - type: nauc_ndcg_at_5_std value: -15.8744 - type: nauc_ndcg_at_5_diff1 value: 9.1132 - type: nauc_ndcg_at_10_max value: -7.4729 - type: nauc_ndcg_at_10_std value: -14.9349 - type: nauc_ndcg_at_10_diff1 value: 8.527700000000001 - type: nauc_ndcg_at_20_max value: -6.997000000000001 - type: nauc_ndcg_at_20_std value: -14.688399999999998 - type: nauc_ndcg_at_20_diff1 value: 9.7605 - type: nauc_ndcg_at_100_max value: -7.5599 - type: nauc_ndcg_at_100_std value: -15.0565 - type: nauc_ndcg_at_100_diff1 value: 10.2688 - type: nauc_ndcg_at_1000_max value: -7.675800000000001 - type: nauc_ndcg_at_1000_std value: -15.223500000000001 - type: nauc_ndcg_at_1000_diff1 value: 10.32 - type: nauc_map_at_1_max value: -10.8287 - type: nauc_map_at_1_std value: -17.1177 - type: nauc_map_at_1_diff1 value: 14.4508 - type: nauc_map_at_3_max value: -8.5473 - type: nauc_map_at_3_std value: -16.6674 - type: nauc_map_at_3_diff1 value: 11.1004 - type: nauc_map_at_5_max value: -8.1927 - type: nauc_map_at_5_std value: -16.2275 - type: nauc_map_at_5_diff1 value: 10.678600000000001 - type: nauc_map_at_10_max value: -8.3855 - type: nauc_map_at_10_std value: -15.8309 - type: nauc_map_at_10_diff1 value: 10.5414 - type: nauc_map_at_20_max value: -8.277700000000001 - type: nauc_map_at_20_std value: -15.824 - type: nauc_map_at_20_diff1 value: 10.8494 - type: nauc_map_at_100_max value: -8.3178 - type: nauc_map_at_100_std value: -15.848300000000002 - type: nauc_map_at_100_diff1 value: 10.9384 - type: nauc_map_at_1000_max value: -8.319799999999999 - type: nauc_map_at_1000_std value: -15.8522 - type: nauc_map_at_1000_diff1 value: 10.9401 - type: nauc_recall_at_1_max value: -10.8287 - type: nauc_recall_at_1_std value: -17.1177 - type: nauc_recall_at_1_diff1 value: 14.4508 - type: nauc_recall_at_3_max value: -5.0587 - type: nauc_recall_at_3_std value: -16.730800000000002 - type: nauc_recall_at_3_diff1 value: 6.8079 - type: nauc_recall_at_5_max value: -2.6783 - type: nauc_recall_at_5_std value: -14.5046 - type: nauc_recall_at_5_diff1 value: 3.096 - type: nauc_recall_at_10_max value: -1.5855000000000001 - type: nauc_recall_at_10_std value: -8.2276 - type: nauc_recall_at_10_diff1 value: -6.1741 - type: nauc_recall_at_20_max value: 15.754299999999999 - type: nauc_recall_at_20_std value: 8.1974 - type: nauc_recall_at_20_diff1 value: -4.9207 - type: nauc_recall_at_100_max value: 20.4574 - type: nauc_recall_at_100_std value: 36.3741 - type: nauc_recall_at_100_diff1 value: -7.9483 - type: nauc_recall_at_1000_max value: 21.6023 - type: nauc_recall_at_1000_std value: 68.7296 - type: nauc_recall_at_1000_diff1 value: -24.9261 - type: nauc_precision_at_1_max value: -10.8287 - type: nauc_precision_at_1_std value: -17.1177 - type: nauc_precision_at_1_diff1 value: 14.4508 - type: nauc_precision_at_3_max value: -5.0587 - type: nauc_precision_at_3_std value: -16.730800000000002 - type: nauc_precision_at_3_diff1 value: 6.8079 - type: nauc_precision_at_5_max value: -2.6783 - type: nauc_precision_at_5_std value: -14.5046 - type: nauc_precision_at_5_diff1 value: 3.096 - type: nauc_precision_at_10_max value: -1.5855000000000001 - type: nauc_precision_at_10_std value: -8.2276 - type: nauc_precision_at_10_diff1 value: -6.1741 - type: nauc_precision_at_20_max value: 15.754299999999999 - type: nauc_precision_at_20_std value: 8.1974 - type: nauc_precision_at_20_diff1 value: -4.9207 - type: nauc_precision_at_100_max value: 20.4574 - type: nauc_precision_at_100_std value: 36.3741 - type: nauc_precision_at_100_diff1 value: -7.9483 - type: nauc_precision_at_1000_max value: 21.6023 - type: nauc_precision_at_1000_std value: 68.7296 - type: nauc_precision_at_1000_diff1 value: -24.9261 - type: nauc_mrr_at_1_max value: -11.251999999999999 - type: nauc_mrr_at_1_std value: -17.4386 - type: nauc_mrr_at_1_diff1 value: 13.414200000000001 - type: nauc_mrr_at_3_max value: -9.7985 - type: nauc_mrr_at_3_std value: -16.650000000000002 - type: nauc_mrr_at_3_diff1 value: 9.5099 - type: nauc_mrr_at_5_max value: -9.064 - type: nauc_mrr_at_5_std value: -16.4409 - type: nauc_mrr_at_5_diff1 value: 9.4773 - type: nauc_mrr_at_10_max value: -9.310400000000001 - type: nauc_mrr_at_10_std value: -16.0546 - type: nauc_mrr_at_10_diff1 value: 9.2528 - type: nauc_mrr_at_20_max value: -9.223099999999999 - type: nauc_mrr_at_20_std value: -16.0659 - type: nauc_mrr_at_20_diff1 value: 9.5259 - type: nauc_mrr_at_100_max value: -9.2678 - type: nauc_mrr_at_100_std value: -16.0911 - type: nauc_mrr_at_100_diff1 value: 9.608600000000001 - type: nauc_mrr_at_1000_max value: -9.2699 - type: nauc_mrr_at_1000_std value: -16.095100000000002 - type: nauc_mrr_at_1000_diff1 value: 9.6099 - type: main_score value: 58.011 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 44.684400000000004 - type: v_measure_std value: 13.5064 - type: main_score value: 44.684400000000004 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 35.0503 - type: v_measure_std value: 13.9543 - type: main_score value: 35.0503 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 60.648500000000006 - type: mrr value: 74.528 - type: nAUC_map_max value: 19.4239 - type: nAUC_map_std value: 20.0729 - type: nAUC_map_diff1 value: 10.0382 - type: nAUC_mrr_max value: 30.693199999999997 - type: nAUC_mrr_std value: 27.1279 - type: nAUC_mrr_diff1 value: 23.0291 - type: main_score value: 60.648500000000006 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: pearson value: 89.5081 - type: spearman value: 87.0568 - type: cosine_pearson value: 89.5081 - type: cosine_spearman value: 87.0568 - type: manhattan_pearson value: 88.1247 - type: manhattan_spearman value: 87.2556 - type: euclidean_pearson value: 88.3266 - type: euclidean_spearman value: 87.0568 - type: main_score value: 87.0568 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 80.18180000000001 - type: f1 value: 79.5538 - type: f1_weighted value: 79.5538 - type: main_score value: 80.18180000000001 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 36.0126 - type: v_measure_std value: 0.47019999999999995 - type: main_score value: 36.0126 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 28.6331 - type: v_measure_std value: 0.8607999999999999 - type: main_score value: 28.6331 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: ndcg_at_1 value: 45.207 - type: ndcg_at_3 value: 51.31400000000001 - type: ndcg_at_5 value: 54.093999999999994 - type: ndcg_at_10 value: 56.31 - type: ndcg_at_20 value: 58.378 - type: ndcg_at_100 value: 61.307 - type: ndcg_at_1000 value: 62.724999999999994 - type: map_at_1 value: 37.732 - type: map_at_3 value: 46.263 - type: map_at_5 value: 48.553000000000004 - type: map_at_10 value: 49.984 - type: map_at_20 value: 50.888999999999996 - type: map_at_100 value: 51.568999999999996 - type: map_at_1000 value: 51.666999999999994 - type: recall_at_1 value: 37.732 - type: recall_at_3 value: 53.736 - type: recall_at_5 value: 60.95399999999999 - type: recall_at_10 value: 68.062 - type: recall_at_20 value: 75.149 - type: recall_at_100 value: 88.075 - type: recall_at_1000 value: 96.878 - type: precision_at_1 value: 45.207 - type: precision_at_3 value: 24.368000000000002 - type: precision_at_5 value: 17.854 - type: precision_at_10 value: 10.558 - type: precision_at_20 value: 6.23 - type: precision_at_100 value: 1.614 - type: precision_at_1000 value: 0.202 - type: mrr_at_1 value: 45.2074 - type: mrr_at_3 value: 52.9804 - type: mrr_at_5 value: 54.718599999999995 - type: mrr_at_10 value: 55.5713 - type: mrr_at_20 value: 55.94 - type: mrr_at_100 value: 56.21699999999999 - type: mrr_at_1000 value: 56.2504 - type: nauc_ndcg_at_1_max value: 43.7697 - type: nauc_ndcg_at_1_std value: -3.9530000000000003 - type: nauc_ndcg_at_1_diff1 value: 57.75320000000001 - type: nauc_ndcg_at_3_max value: 42.7238 - type: nauc_ndcg_at_3_std value: -3.5654 - type: nauc_ndcg_at_3_diff1 value: 53.552299999999995 - type: nauc_ndcg_at_5_max value: 43.115500000000004 - type: nauc_ndcg_at_5_std value: -2.1444 - type: nauc_ndcg_at_5_diff1 value: 53.130500000000005 - type: nauc_ndcg_at_10_max value: 43.0188 - type: nauc_ndcg_at_10_std value: -3.1515 - type: nauc_ndcg_at_10_diff1 value: 53.593199999999996 - type: nauc_ndcg_at_20_max value: 43.4617 - type: nauc_ndcg_at_20_std value: -2.9284 - type: nauc_ndcg_at_20_diff1 value: 53.28000000000001 - type: nauc_ndcg_at_100_max value: 44.0704 - type: nauc_ndcg_at_100_std value: -0.5772 - type: nauc_ndcg_at_100_diff1 value: 53.439899999999994 - type: nauc_ndcg_at_1000_max value: 44.256099999999996 - type: nauc_ndcg_at_1000_std value: -1.1407 - type: nauc_ndcg_at_1000_diff1 value: 53.8728 - type: nauc_map_at_1_max value: 36.613800000000005 - type: nauc_map_at_1_std value: -5.8014 - type: nauc_map_at_1_diff1 value: 59.0186 - type: nauc_map_at_3_max value: 40.8666 - type: nauc_map_at_3_std value: -4.886299999999999 - type: nauc_map_at_3_diff1 value: 55.324600000000004 - type: nauc_map_at_5_max value: 41.9942 - type: nauc_map_at_5_std value: -3.9361 - type: nauc_map_at_5_diff1 value: 54.8805 - type: nauc_map_at_10_max value: 42.1621 - type: nauc_map_at_10_std value: -4.3264 - type: nauc_map_at_10_diff1 value: 55.0133 - type: nauc_map_at_20_max value: 42.5837 - type: nauc_map_at_20_std value: -3.8526 - type: nauc_map_at_20_diff1 value: 54.895700000000005 - type: nauc_map_at_100_max value: 42.7645 - type: nauc_map_at_100_std value: -3.4568000000000003 - type: nauc_map_at_100_diff1 value: 54.98030000000001 - type: nauc_map_at_1000_max value: 42.7915 - type: nauc_map_at_1000_std value: -3.4715999999999996 - type: nauc_map_at_1000_diff1 value: 55.0117 - type: nauc_recall_at_1_max value: 36.613800000000005 - type: nauc_recall_at_1_std value: -5.8014 - type: nauc_recall_at_1_diff1 value: 59.0186 - type: nauc_recall_at_3_max value: 39.3588 - type: nauc_recall_at_3_std value: -3.29 - type: nauc_recall_at_3_diff1 value: 50.1633 - type: nauc_recall_at_5_max value: 39.7596 - type: nauc_recall_at_5_std value: 0.4483 - type: nauc_recall_at_5_diff1 value: 47.598600000000005 - type: nauc_recall_at_10_max value: 37.5367 - type: nauc_recall_at_10_std value: -2.5935 - type: nauc_recall_at_10_diff1 value: 46.824799999999996 - type: nauc_recall_at_20_max value: 38.521100000000004 - type: nauc_recall_at_20_std value: -2.5774 - type: nauc_recall_at_20_diff1 value: 44.099 - type: nauc_recall_at_100_max value: 44.043 - type: nauc_recall_at_100_std value: 22.724 - type: nauc_recall_at_100_diff1 value: 40.4973 - type: nauc_recall_at_1000_max value: 59.780100000000004 - type: nauc_recall_at_1000_std value: 52.512 - type: nauc_recall_at_1000_diff1 value: 45.2841 - type: nauc_precision_at_1_max value: 43.7697 - type: nauc_precision_at_1_std value: -3.9530000000000003 - type: nauc_precision_at_1_diff1 value: 57.75320000000001 - type: nauc_precision_at_3_max value: 37.486000000000004 - type: nauc_precision_at_3_std value: -1.0619 - type: nauc_precision_at_3_diff1 value: 28.264699999999998 - type: nauc_precision_at_5_max value: 31.613599999999998 - type: nauc_precision_at_5_std value: 3.6863 - type: nauc_precision_at_5_diff1 value: 16.0838 - type: nauc_precision_at_10_max value: 23.4082 - type: nauc_precision_at_10_std value: 3.3977 - type: nauc_precision_at_10_diff1 value: 7.3632 - type: nauc_precision_at_20_max value: 16.7236 - type: nauc_precision_at_20_std value: 5.7516 - type: nauc_precision_at_20_diff1 value: -0.8460000000000001 - type: nauc_precision_at_100_max value: 3.9043 - type: nauc_precision_at_100_std value: 7.7799 - type: nauc_precision_at_100_diff1 value: -11.0756 - type: nauc_precision_at_1000_max value: -7.728 - type: nauc_precision_at_1000_std value: -1.9303000000000001 - type: nauc_precision_at_1000_diff1 value: -17.025000000000002 - type: nauc_mrr_at_1_max value: 43.7697 - type: nauc_mrr_at_1_std value: -3.9530000000000003 - type: nauc_mrr_at_1_diff1 value: 57.75320000000001 - type: nauc_mrr_at_3_max value: 44.8007 - type: nauc_mrr_at_3_std value: -2.9754 - type: nauc_mrr_at_3_diff1 value: 53.7928 - type: nauc_mrr_at_5_max value: 44.860499999999995 - type: nauc_mrr_at_5_std value: -1.7683 - type: nauc_mrr_at_5_diff1 value: 53.5852 - type: nauc_mrr_at_10_max value: 44.8025 - type: nauc_mrr_at_10_std value: -2.1691 - type: nauc_mrr_at_10_diff1 value: 53.880300000000005 - type: nauc_mrr_at_20_max value: 44.7838 - type: nauc_mrr_at_20_std value: -2.3529 - type: nauc_mrr_at_20_diff1 value: 53.890499999999996 - type: nauc_mrr_at_100_max value: 44.7905 - type: nauc_mrr_at_100_std value: -2.1931 - type: nauc_mrr_at_100_diff1 value: 53.9458 - type: nauc_mrr_at_1000_max value: 44.7943 - type: nauc_mrr_at_1000_std value: -2.2006 - type: nauc_mrr_at_1000_diff1 value: 53.954800000000006 - type: main_score value: 56.31 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: ndcg_at_1 value: 44.840999999999994 - type: ndcg_at_3 value: 49.217 - type: ndcg_at_5 value: 50.934000000000005 - type: ndcg_at_10 value: 53.142999999999994 - type: ndcg_at_20 value: 54.778000000000006 - type: ndcg_at_100 value: 57.241 - type: ndcg_at_1000 value: 58.967999999999996 - type: map_at_1 value: 35.675000000000004 - type: map_at_3 value: 44.017 - type: map_at_5 value: 45.786 - type: map_at_10 value: 47.204 - type: map_at_20 value: 47.946 - type: map_at_100 value: 48.564 - type: map_at_1000 value: 48.684 - type: recall_at_1 value: 35.675000000000004 - type: recall_at_3 value: 50.641000000000005 - type: recall_at_5 value: 55.897 - type: recall_at_10 value: 62.873999999999995 - type: recall_at_20 value: 68.766 - type: recall_at_100 value: 79.90899999999999 - type: recall_at_1000 value: 90.78399999999999 - type: precision_at_1 value: 44.840999999999994 - type: precision_at_3 value: 23.843 - type: precision_at_5 value: 16.637 - type: precision_at_10 value: 9.968 - type: precision_at_20 value: 5.863 - type: precision_at_100 value: 1.562 - type: precision_at_1000 value: 0.197 - type: mrr_at_1 value: 44.840799999999994 - type: mrr_at_3 value: 51.634800000000006 - type: mrr_at_5 value: 52.746300000000005 - type: mrr_at_10 value: 53.6323 - type: mrr_at_20 value: 53.9565 - type: mrr_at_100 value: 54.198 - type: mrr_at_1000 value: 54.234899999999996 - type: nauc_ndcg_at_1_max value: 50.3827 - type: nauc_ndcg_at_1_std value: -0.8129000000000001 - type: nauc_ndcg_at_1_diff1 value: 59.7518 - type: nauc_ndcg_at_3_max value: 49.6676 - type: nauc_ndcg_at_3_std value: -2.1006 - type: nauc_ndcg_at_3_diff1 value: 52.7373 - type: nauc_ndcg_at_5_max value: 50.5186 - type: nauc_ndcg_at_5_std value: -1.5242 - type: nauc_ndcg_at_5_diff1 value: 53.234300000000005 - type: nauc_ndcg_at_10_max value: 50.5247 - type: nauc_ndcg_at_10_std value: -1.2392 - type: nauc_ndcg_at_10_diff1 value: 53.1045 - type: nauc_ndcg_at_20_max value: 51.3292 - type: nauc_ndcg_at_20_std value: -0.06570000000000001 - type: nauc_ndcg_at_20_diff1 value: 53.48349999999999 - type: nauc_ndcg_at_100_max value: 51.588100000000004 - type: nauc_ndcg_at_100_std value: 1.9398 - type: nauc_ndcg_at_100_diff1 value: 52.755399999999995 - type: nauc_ndcg_at_1000_max value: 51.5558 - type: nauc_ndcg_at_1000_std value: 2.3446000000000002 - type: nauc_ndcg_at_1000_diff1 value: 52.9377 - type: nauc_map_at_1_max value: 40.0957 - type: nauc_map_at_1_std value: -11.972 - type: nauc_map_at_1_diff1 value: 61.88249999999999 - type: nauc_map_at_3_max value: 45.6088 - type: nauc_map_at_3_std value: -9.249699999999999 - type: nauc_map_at_3_diff1 value: 56.260299999999994 - type: nauc_map_at_5_max value: 47.2279 - type: nauc_map_at_5_std value: -7.407500000000001 - type: nauc_map_at_5_diff1 value: 55.7894 - type: nauc_map_at_10_max value: 48.0167 - type: nauc_map_at_10_std value: -6.1371 - type: nauc_map_at_10_diff1 value: 55.4646 - type: nauc_map_at_20_max value: 48.6024 - type: nauc_map_at_20_std value: -5.1559 - type: nauc_map_at_20_diff1 value: 55.338100000000004 - type: nauc_map_at_100_max value: 48.993700000000004 - type: nauc_map_at_100_std value: -4.1873000000000005 - type: nauc_map_at_100_diff1 value: 55.1214 - type: nauc_map_at_1000_max value: 49.054500000000004 - type: nauc_map_at_1000_std value: -4.0072 - type: nauc_map_at_1000_diff1 value: 55.109300000000005 - type: nauc_recall_at_1_max value: 40.0957 - type: nauc_recall_at_1_std value: -11.972 - type: nauc_recall_at_1_diff1 value: 61.88249999999999 - type: nauc_recall_at_3_max value: 44.188 - type: nauc_recall_at_3_std value: -8.3756 - type: nauc_recall_at_3_diff1 value: 48.6817 - type: nauc_recall_at_5_max value: 46.6706 - type: nauc_recall_at_5_std value: -4.1561 - type: nauc_recall_at_5_diff1 value: 47.6738 - type: nauc_recall_at_10_max value: 47.614200000000004 - type: nauc_recall_at_10_std value: -1.1676 - type: nauc_recall_at_10_diff1 value: 45.628099999999996 - type: nauc_recall_at_20_max value: 51.490100000000005 - type: nauc_recall_at_20_std value: 5.111000000000001 - type: nauc_recall_at_20_diff1 value: 45.730199999999996 - type: nauc_recall_at_100_max value: 54.0635 - type: nauc_recall_at_100_std value: 19.8381 - type: nauc_recall_at_100_diff1 value: 39.1924 - type: nauc_recall_at_1000_max value: 56.3672 - type: nauc_recall_at_1000_std value: 33.9274 - type: nauc_recall_at_1000_diff1 value: 38.1103 - type: nauc_precision_at_1_max value: 50.3827 - type: nauc_precision_at_1_std value: -0.8129000000000001 - type: nauc_precision_at_1_diff1 value: 59.7518 - type: nauc_precision_at_3_max value: 46.281299999999995 - type: nauc_precision_at_3_std value: 14.7166 - type: nauc_precision_at_3_diff1 value: 24.211 - type: nauc_precision_at_5_max value: 44.466899999999995 - type: nauc_precision_at_5_std value: 22.5103 - type: nauc_precision_at_5_diff1 value: 15.746099999999998 - type: nauc_precision_at_10_max value: 38.0804 - type: nauc_precision_at_10_std value: 29.677999999999997 - type: nauc_precision_at_10_diff1 value: 4.886299999999999 - type: nauc_precision_at_20_max value: 32.302 - type: nauc_precision_at_20_std value: 34.8443 - type: nauc_precision_at_20_diff1 value: -2.9212 - type: nauc_precision_at_100_max value: 21.4725 - type: nauc_precision_at_100_std value: 41.8747 - type: nauc_precision_at_100_diff1 value: -14.976600000000001 - type: nauc_precision_at_1000_max value: 10.3891 - type: nauc_precision_at_1000_std value: 39.4181 - type: nauc_precision_at_1000_diff1 value: -21.9914 - type: nauc_mrr_at_1_max value: 50.3827 - type: nauc_mrr_at_1_std value: -0.8129000000000001 - type: nauc_mrr_at_1_diff1 value: 59.7518 - type: nauc_mrr_at_3_max value: 51.9937 - type: nauc_mrr_at_3_std value: 2.1604 - type: nauc_mrr_at_3_diff1 value: 54.58539999999999 - type: nauc_mrr_at_5_max value: 52.39319999999999 - type: nauc_mrr_at_5_std value: 2.8171 - type: nauc_mrr_at_5_diff1 value: 54.825100000000006 - type: nauc_mrr_at_10_max value: 52.2047 - type: nauc_mrr_at_10_std value: 2.6525 - type: nauc_mrr_at_10_diff1 value: 54.703500000000005 - type: nauc_mrr_at_20_max value: 52.251999999999995 - type: nauc_mrr_at_20_std value: 2.7842 - type: nauc_mrr_at_20_diff1 value: 54.76689999999999 - type: nauc_mrr_at_100_max value: 52.2776 - type: nauc_mrr_at_100_std value: 2.9701999999999997 - type: nauc_mrr_at_100_diff1 value: 54.712799999999994 - type: nauc_mrr_at_1000_max value: 52.274699999999996 - type: nauc_mrr_at_1000_std value: 2.9652000000000003 - type: nauc_mrr_at_1000_diff1 value: 54.7296 - type: main_score value: 53.142999999999994 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: ndcg_at_1 value: 53.542 - type: ndcg_at_3 value: 60.098 - type: ndcg_at_5 value: 62.515 - type: ndcg_at_10 value: 65.315 - type: ndcg_at_20 value: 66.683 - type: ndcg_at_100 value: 68.47800000000001 - type: ndcg_at_1000 value: 69.329 - type: map_at_1 value: 47.135 - type: map_at_3 value: 56.548 - type: map_at_5 value: 58.306000000000004 - type: map_at_10 value: 59.819 - type: map_at_20 value: 60.328 - type: map_at_100 value: 60.653999999999996 - type: map_at_1000 value: 60.699000000000005 - type: recall_at_1 value: 47.135 - type: recall_at_3 value: 64.371 - type: recall_at_5 value: 70.293 - type: recall_at_10 value: 78.346 - type: recall_at_20 value: 83.369 - type: recall_at_100 value: 92.04599999999999 - type: recall_at_1000 value: 97.933 - type: precision_at_1 value: 53.542 - type: precision_at_3 value: 26.395000000000003 - type: precision_at_5 value: 17.806 - type: precision_at_10 value: 10.238 - type: precision_at_20 value: 5.586 - type: precision_at_100 value: 1.266 - type: precision_at_1000 value: 0.13799999999999998 - type: mrr_at_1 value: 53.5423 - type: mrr_at_3 value: 60.595600000000005 - type: mrr_at_5 value: 61.931000000000004 - type: mrr_at_10 value: 62.8406 - type: mrr_at_20 value: 63.1667 - type: mrr_at_100 value: 63.347699999999996 - type: mrr_at_1000 value: 63.368100000000005 - type: nauc_ndcg_at_1_max value: 50.004599999999996 - type: nauc_ndcg_at_1_std value: -4.3123000000000005 - type: nauc_ndcg_at_1_diff1 value: 61.1973 - type: nauc_ndcg_at_3_max value: 48.65 - type: nauc_ndcg_at_3_std value: -6.0419 - type: nauc_ndcg_at_3_diff1 value: 56.712700000000005 - type: nauc_ndcg_at_5_max value: 50.0908 - type: nauc_ndcg_at_5_std value: -4.4674 - type: nauc_ndcg_at_5_diff1 value: 56.216 - type: nauc_ndcg_at_10_max value: 50.578 - type: nauc_ndcg_at_10_std value: -2.661 - type: nauc_ndcg_at_10_diff1 value: 55.9162 - type: nauc_ndcg_at_20_max value: 51.3801 - type: nauc_ndcg_at_20_std value: -0.8059999999999999 - type: nauc_ndcg_at_20_diff1 value: 55.8654 - type: nauc_ndcg_at_100_max value: 51.4594 - type: nauc_ndcg_at_100_std value: -0.3524 - type: nauc_ndcg_at_100_diff1 value: 56.131699999999995 - type: nauc_ndcg_at_1000_max value: 51.6105 - type: nauc_ndcg_at_1000_std value: -0.8832 - type: nauc_ndcg_at_1000_diff1 value: 56.6507 - type: nauc_map_at_1_max value: 42.7316 - type: nauc_map_at_1_std value: -6.979100000000001 - type: nauc_map_at_1_diff1 value: 61.6382 - type: nauc_map_at_3_max value: 47.6139 - type: nauc_map_at_3_std value: -7.0931 - type: nauc_map_at_3_diff1 value: 58.2923 - type: nauc_map_at_5_max value: 48.6039 - type: nauc_map_at_5_std value: -5.9601 - type: nauc_map_at_5_diff1 value: 57.7052 - type: nauc_map_at_10_max value: 49.2631 - type: nauc_map_at_10_std value: -4.808 - type: nauc_map_at_10_diff1 value: 57.5979 - type: nauc_map_at_20_max value: 49.6783 - type: nauc_map_at_20_std value: -4.0106 - type: nauc_map_at_20_diff1 value: 57.5781 - type: nauc_map_at_100_max value: 49.775000000000006 - type: nauc_map_at_100_std value: -3.8082 - type: nauc_map_at_100_diff1 value: 57.6013 - type: nauc_map_at_1000_max value: 49.8135 - type: nauc_map_at_1000_std value: -3.7974 - type: nauc_map_at_1000_diff1 value: 57.6323 - type: nauc_recall_at_1_max value: 42.7316 - type: nauc_recall_at_1_std value: -6.979100000000001 - type: nauc_recall_at_1_diff1 value: 61.6382 - type: nauc_recall_at_3_max value: 46.1138 - type: nauc_recall_at_3_std value: -8.6906 - type: nauc_recall_at_3_diff1 value: 52.6263 - type: nauc_recall_at_5_max value: 49.074200000000005 - type: nauc_recall_at_5_std value: -4.5975 - type: nauc_recall_at_5_diff1 value: 49.994 - type: nauc_recall_at_10_max value: 49.696 - type: nauc_recall_at_10_std value: 2.049 - type: nauc_recall_at_10_diff1 value: 46.7897 - type: nauc_recall_at_20_max value: 54.03980000000001 - type: nauc_recall_at_20_std value: 14.4898 - type: nauc_recall_at_20_diff1 value: 43.8642 - type: nauc_recall_at_100_max value: 57.23629999999999 - type: nauc_recall_at_100_std value: 32.6507 - type: nauc_recall_at_100_diff1 value: 38.4662 - type: nauc_recall_at_1000_max value: 81.5918 - type: nauc_recall_at_1000_std value: 67.0848 - type: nauc_recall_at_1000_diff1 value: 40.5123 - type: nauc_precision_at_1_max value: 50.004599999999996 - type: nauc_precision_at_1_std value: -4.3123000000000005 - type: nauc_precision_at_1_diff1 value: 61.1973 - type: nauc_precision_at_3_max value: 41.0359 - type: nauc_precision_at_3_std value: 2.2363 - type: nauc_precision_at_3_diff1 value: 26.9914 - type: nauc_precision_at_5_max value: 38.3114 - type: nauc_precision_at_5_std value: 8.7643 - type: nauc_precision_at_5_diff1 value: 17.0673 - type: nauc_precision_at_10_max value: 31.1391 - type: nauc_precision_at_10_std value: 17.1411 - type: nauc_precision_at_10_diff1 value: 4.9287 - type: nauc_precision_at_20_max value: 27.7595 - type: nauc_precision_at_20_std value: 25.470399999999998 - type: nauc_precision_at_20_diff1 value: -2.6803 - type: nauc_precision_at_100_max value: 18.2146 - type: nauc_precision_at_100_std value: 29.244300000000003 - type: nauc_precision_at_100_diff1 value: -13.083 - type: nauc_precision_at_1000_max value: 13.5621 - type: nauc_precision_at_1000_std value: 26.3405 - type: nauc_precision_at_1000_diff1 value: -15.398200000000001 - type: nauc_mrr_at_1_max value: 50.004599999999996 - type: nauc_mrr_at_1_std value: -4.3123000000000005 - type: nauc_mrr_at_1_diff1 value: 61.1973 - type: nauc_mrr_at_3_max value: 50.114599999999996 - type: nauc_mrr_at_3_std value: -4.7759 - type: nauc_mrr_at_3_diff1 value: 57.9624 - type: nauc_mrr_at_5_max value: 50.956900000000005 - type: nauc_mrr_at_5_std value: -3.7144999999999997 - type: nauc_mrr_at_5_diff1 value: 57.784400000000005 - type: nauc_mrr_at_10_max value: 50.8112 - type: nauc_mrr_at_10_std value: -3.3526 - type: nauc_mrr_at_10_diff1 value: 57.674499999999995 - type: nauc_mrr_at_20_max value: 50.9425 - type: nauc_mrr_at_20_std value: -2.9598 - type: nauc_mrr_at_20_diff1 value: 57.6704 - type: nauc_mrr_at_100_max value: 50.901799999999994 - type: nauc_mrr_at_100_std value: -3.0112 - type: nauc_mrr_at_100_diff1 value: 57.736200000000004 - type: nauc_mrr_at_1000_max value: 50.901399999999995 - type: nauc_mrr_at_1000_std value: -3.0314 - type: nauc_mrr_at_1000_diff1 value: 57.747400000000006 - type: main_score value: 65.315 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: ndcg_at_1 value: 33.898 - type: ndcg_at_3 value: 39.875 - type: ndcg_at_5 value: 42.455999999999996 - type: ndcg_at_10 value: 45.4 - type: ndcg_at_20 value: 47.831 - type: ndcg_at_100 value: 50.428 - type: ndcg_at_1000 value: 52.037 - type: map_at_1 value: 31.357000000000003 - type: map_at_3 value: 37.358999999999995 - type: map_at_5 value: 38.948 - type: map_at_10 value: 40.243 - type: map_at_20 value: 40.98 - type: map_at_100 value: 41.349999999999994 - type: map_at_1000 value: 41.418 - type: recall_at_1 value: 31.357000000000003 - type: recall_at_3 value: 44.324000000000005 - type: recall_at_5 value: 50.449 - type: recall_at_10 value: 59.17400000000001 - type: recall_at_20 value: 68.272 - type: recall_at_100 value: 81.672 - type: recall_at_1000 value: 93.572 - type: precision_at_1 value: 33.898 - type: precision_at_3 value: 16.648 - type: precision_at_5 value: 11.503 - type: precision_at_10 value: 6.847 - type: precision_at_20 value: 3.9890000000000003 - type: precision_at_100 value: 0.9809999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: mrr_at_1 value: 33.8983 - type: mrr_at_3 value: 39.8117 - type: mrr_at_5 value: 41.2354 - type: mrr_at_10 value: 42.4212 - type: mrr_at_20 value: 43.0404 - type: mrr_at_100 value: 43.3429 - type: mrr_at_1000 value: 43.3894 - type: nauc_ndcg_at_1_max value: 36.1482 - type: nauc_ndcg_at_1_std value: -4.471 - type: nauc_ndcg_at_1_diff1 value: 44.1333 - type: nauc_ndcg_at_3_max value: 35.404 - type: nauc_ndcg_at_3_std value: -4.487 - type: nauc_ndcg_at_3_diff1 value: 40.3399 - type: nauc_ndcg_at_5_max value: 35.0036 - type: nauc_ndcg_at_5_std value: -4.0964 - type: nauc_ndcg_at_5_diff1 value: 38.2164 - type: nauc_ndcg_at_10_max value: 34.7255 - type: nauc_ndcg_at_10_std value: -2.9356 - type: nauc_ndcg_at_10_diff1 value: 37.3216 - type: nauc_ndcg_at_20_max value: 35.5433 - type: nauc_ndcg_at_20_std value: -1.8858 - type: nauc_ndcg_at_20_diff1 value: 36.6106 - type: nauc_ndcg_at_100_max value: 35.9643 - type: nauc_ndcg_at_100_std value: -1.6303 - type: nauc_ndcg_at_100_diff1 value: 37.515100000000004 - type: nauc_ndcg_at_1000_max value: 35.9222 - type: nauc_ndcg_at_1000_std value: -2.1452999999999998 - type: nauc_ndcg_at_1000_diff1 value: 37.472100000000005 - type: nauc_map_at_1_max value: 32.413599999999995 - type: nauc_map_at_1_std value: -7.391300000000001 - type: nauc_map_at_1_diff1 value: 45.5299 - type: nauc_map_at_3_max value: 34.1688 - type: nauc_map_at_3_std value: -5.6375 - type: nauc_map_at_3_diff1 value: 41.5371 - type: nauc_map_at_5_max value: 34.2057 - type: nauc_map_at_5_std value: -5.4512 - type: nauc_map_at_5_diff1 value: 40.3839 - type: nauc_map_at_10_max value: 34.3355 - type: nauc_map_at_10_std value: -4.7743 - type: nauc_map_at_10_diff1 value: 40.1027 - type: nauc_map_at_20_max value: 34.638400000000004 - type: nauc_map_at_20_std value: -4.4951 - type: nauc_map_at_20_diff1 value: 39.8905 - type: nauc_map_at_100_max value: 34.6621 - type: nauc_map_at_100_std value: -4.4568 - type: nauc_map_at_100_diff1 value: 39.9854 - type: nauc_map_at_1000_max value: 34.6674 - type: nauc_map_at_1000_std value: -4.4651000000000005 - type: nauc_map_at_1000_diff1 value: 39.9739 - type: nauc_recall_at_1_max value: 32.413599999999995 - type: nauc_recall_at_1_std value: -7.391300000000001 - type: nauc_recall_at_1_diff1 value: 45.5299 - type: nauc_recall_at_3_max value: 34.374500000000005 - type: nauc_recall_at_3_std value: -3.8977999999999997 - type: nauc_recall_at_3_diff1 value: 36.9855 - type: nauc_recall_at_5_max value: 33.5608 - type: nauc_recall_at_5_std value: -2.9009 - type: nauc_recall_at_5_diff1 value: 31.9638 - type: nauc_recall_at_10_max value: 32.1813 - type: nauc_recall_at_10_std value: 0.8024999999999999 - type: nauc_recall_at_10_diff1 value: 28.3153 - type: nauc_recall_at_20_max value: 35.0617 - type: nauc_recall_at_20_std value: 6.531199999999999 - type: nauc_recall_at_20_diff1 value: 23.6762 - type: nauc_recall_at_100_max value: 38.9147 - type: nauc_recall_at_100_std value: 12.4753 - type: nauc_recall_at_100_diff1 value: 26.1627 - type: nauc_recall_at_1000_max value: 45.8191 - type: nauc_recall_at_1000_std value: 17.1419 - type: nauc_recall_at_1000_diff1 value: 13.2284 - type: nauc_precision_at_1_max value: 36.1482 - type: nauc_precision_at_1_std value: -4.471 - type: nauc_precision_at_1_diff1 value: 44.1333 - type: nauc_precision_at_3_max value: 38.315 - type: nauc_precision_at_3_std value: -0.16019999999999998 - type: nauc_precision_at_3_diff1 value: 32.4158 - type: nauc_precision_at_5_max value: 36.3912 - type: nauc_precision_at_5_std value: 0.9605 - type: nauc_precision_at_5_diff1 value: 25.7513 - type: nauc_precision_at_10_max value: 34.043 - type: nauc_precision_at_10_std value: 5.6308 - type: nauc_precision_at_10_diff1 value: 20.5638 - type: nauc_precision_at_20_max value: 34.5796 - type: nauc_precision_at_20_std value: 10.0006 - type: nauc_precision_at_20_diff1 value: 13.069500000000001 - type: nauc_precision_at_100_max value: 27.5607 - type: nauc_precision_at_100_std value: 13.173399999999999 - type: nauc_precision_at_100_diff1 value: 6.1834 - type: nauc_precision_at_1000_max value: 15.5825 - type: nauc_precision_at_1000_std value: 9.9148 - type: nauc_precision_at_1000_diff1 value: -8.7873 - type: nauc_mrr_at_1_max value: 36.1482 - type: nauc_mrr_at_1_std value: -4.471 - type: nauc_mrr_at_1_diff1 value: 44.1333 - type: nauc_mrr_at_3_max value: 37.059799999999996 - type: nauc_mrr_at_3_std value: -2.7984999999999998 - type: nauc_mrr_at_3_diff1 value: 40.3801 - type: nauc_mrr_at_5_max value: 36.921 - type: nauc_mrr_at_5_std value: -2.5107 - type: nauc_mrr_at_5_diff1 value: 39.3331 - type: nauc_mrr_at_10_max value: 36.5977 - type: nauc_mrr_at_10_std value: -2.3744 - type: nauc_mrr_at_10_diff1 value: 38.851200000000006 - type: nauc_mrr_at_20_max value: 36.7083 - type: nauc_mrr_at_20_std value: -2.164 - type: nauc_mrr_at_20_diff1 value: 38.729200000000006 - type: nauc_mrr_at_100_max value: 36.7448 - type: nauc_mrr_at_100_std value: -2.1399999999999997 - type: nauc_mrr_at_100_diff1 value: 38.8403 - type: nauc_mrr_at_1000_max value: 36.742200000000004 - type: nauc_mrr_at_1000_std value: -2.1506999999999996 - type: nauc_mrr_at_1000_diff1 value: 38.8393 - type: main_score value: 45.4 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: ndcg_at_1 value: 25.124000000000002 - type: ndcg_at_3 value: 29.798000000000002 - type: ndcg_at_5 value: 32.112 - type: ndcg_at_10 value: 34.926 - type: ndcg_at_20 value: 37.317 - type: ndcg_at_100 value: 40.903 - type: ndcg_at_1000 value: 43.18 - type: map_at_1 value: 20.279 - type: map_at_3 value: 26.551000000000002 - type: map_at_5 value: 28.051 - type: map_at_10 value: 29.37 - type: map_at_20 value: 30.085 - type: map_at_100 value: 30.668 - type: map_at_1000 value: 30.774 - type: recall_at_1 value: 20.279 - type: recall_at_3 value: 33.043 - type: recall_at_5 value: 38.991 - type: recall_at_10 value: 47.355999999999995 - type: recall_at_20 value: 55.873 - type: recall_at_100 value: 72.90100000000001 - type: recall_at_1000 value: 88.678 - type: precision_at_1 value: 25.124000000000002 - type: precision_at_3 value: 14.221 - type: precision_at_5 value: 10.323 - type: precision_at_10 value: 6.381 - type: precision_at_20 value: 3.8739999999999997 - type: precision_at_100 value: 1.082 - type: precision_at_1000 value: 0.13999999999999999 - type: mrr_at_1 value: 25.1244 - type: mrr_at_3 value: 31.3847 - type: mrr_at_5 value: 32.9768 - type: mrr_at_10 value: 34.1348 - type: mrr_at_20 value: 34.7501 - type: mrr_at_100 value: 35.1367 - type: mrr_at_1000 value: 35.191 - type: nauc_ndcg_at_1_max value: 27.160600000000002 - type: nauc_ndcg_at_1_std value: 1.7711999999999999 - type: nauc_ndcg_at_1_diff1 value: 39.8547 - type: nauc_ndcg_at_3_max value: 23.7332 - type: nauc_ndcg_at_3_std value: 0.4508 - type: nauc_ndcg_at_3_diff1 value: 34.3668 - type: nauc_ndcg_at_5_max value: 24.6552 - type: nauc_ndcg_at_5_std value: 1.7423000000000002 - type: nauc_ndcg_at_5_diff1 value: 34.8806 - type: nauc_ndcg_at_10_max value: 24.3869 - type: nauc_ndcg_at_10_std value: 1.3054 - type: nauc_ndcg_at_10_diff1 value: 33.7015 - type: nauc_ndcg_at_20_max value: 24.449 - type: nauc_ndcg_at_20_std value: 2.4919000000000002 - type: nauc_ndcg_at_20_diff1 value: 32.9483 - type: nauc_ndcg_at_100_max value: 25.3655 - type: nauc_ndcg_at_100_std value: 2.7169 - type: nauc_ndcg_at_100_diff1 value: 32.8817 - type: nauc_ndcg_at_1000_max value: 25.524599999999996 - type: nauc_ndcg_at_1000_std value: 3.1405000000000003 - type: nauc_ndcg_at_1000_diff1 value: 32.7208 - type: nauc_map_at_1_max value: 24.9051 - type: nauc_map_at_1_std value: 2.788 - type: nauc_map_at_1_diff1 value: 38.9946 - type: nauc_map_at_3_max value: 23.061 - type: nauc_map_at_3_std value: 1.0529 - type: nauc_map_at_3_diff1 value: 35.0109 - type: nauc_map_at_5_max value: 23.704800000000002 - type: nauc_map_at_5_std value: 1.7375999999999998 - type: nauc_map_at_5_diff1 value: 35.2714 - type: nauc_map_at_10_max value: 23.7351 - type: nauc_map_at_10_std value: 1.5004 - type: nauc_map_at_10_diff1 value: 34.8483 - type: nauc_map_at_20_max value: 23.7699 - type: nauc_map_at_20_std value: 1.8925999999999998 - type: nauc_map_at_20_diff1 value: 34.6198 - type: nauc_map_at_100_max value: 23.962600000000002 - type: nauc_map_at_100_std value: 1.9238000000000002 - type: nauc_map_at_100_diff1 value: 34.7253 - type: nauc_map_at_1000_max value: 23.965 - type: nauc_map_at_1000_std value: 1.9339 - type: nauc_map_at_1000_diff1 value: 34.719899999999996 - type: nauc_recall_at_1_max value: 24.9051 - type: nauc_recall_at_1_std value: 2.788 - type: nauc_recall_at_1_diff1 value: 38.9946 - type: nauc_recall_at_3_max value: 21.8415 - type: nauc_recall_at_3_std value: 0.5292 - type: nauc_recall_at_3_diff1 value: 30.811 - type: nauc_recall_at_5_max value: 23.8237 - type: nauc_recall_at_5_std value: 2.5335 - type: nauc_recall_at_5_diff1 value: 31.928800000000003 - type: nauc_recall_at_10_max value: 22.5541 - type: nauc_recall_at_10_std value: 0.9076000000000001 - type: nauc_recall_at_10_diff1 value: 27.8364 - type: nauc_recall_at_20_max value: 22.0853 - type: nauc_recall_at_20_std value: 4.9954 - type: nauc_recall_at_20_diff1 value: 24.2376 - type: nauc_recall_at_100_max value: 26.4301 - type: nauc_recall_at_100_std value: 8.5471 - type: nauc_recall_at_100_diff1 value: 19.2131 - type: nauc_recall_at_1000_max value: 36.3726 - type: nauc_recall_at_1000_std value: 26.9247 - type: nauc_recall_at_1000_diff1 value: 3.8798 - type: nauc_precision_at_1_max value: 27.160600000000002 - type: nauc_precision_at_1_std value: 1.7711999999999999 - type: nauc_precision_at_1_diff1 value: 39.8547 - type: nauc_precision_at_3_max value: 23.8679 - type: nauc_precision_at_3_std value: -1.052 - type: nauc_precision_at_3_diff1 value: 29.999100000000002 - type: nauc_precision_at_5_max value: 24.7345 - type: nauc_precision_at_5_std value: 1.3604 - type: nauc_precision_at_5_diff1 value: 29.8611 - type: nauc_precision_at_10_max value: 21.5396 - type: nauc_precision_at_10_std value: -1.0137 - type: nauc_precision_at_10_diff1 value: 23.519000000000002 - type: nauc_precision_at_20_max value: 18.4431 - type: nauc_precision_at_20_std value: 1.5350000000000001 - type: nauc_precision_at_20_diff1 value: 16.5031 - type: nauc_precision_at_100_max value: 13.9255 - type: nauc_precision_at_100_std value: -0.48650000000000004 - type: nauc_precision_at_100_diff1 value: 7.700799999999999 - type: nauc_precision_at_1000_max value: 3.6421 - type: nauc_precision_at_1000_std value: -4.7682 - type: nauc_precision_at_1000_diff1 value: -1.4256 - type: nauc_mrr_at_1_max value: 27.160600000000002 - type: nauc_mrr_at_1_std value: 1.7711999999999999 - type: nauc_mrr_at_1_diff1 value: 39.8547 - type: nauc_mrr_at_3_max value: 25.44 - type: nauc_mrr_at_3_std value: 0.08639999999999999 - type: nauc_mrr_at_3_diff1 value: 35.381800000000005 - type: nauc_mrr_at_5_max value: 26.011899999999997 - type: nauc_mrr_at_5_std value: 0.6948 - type: nauc_mrr_at_5_diff1 value: 36.246 - type: nauc_mrr_at_10_max value: 25.8141 - type: nauc_mrr_at_10_std value: 0.5511 - type: nauc_mrr_at_10_diff1 value: 35.7313 - type: nauc_mrr_at_20_max value: 25.805899999999998 - type: nauc_mrr_at_20_std value: 0.8933 - type: nauc_mrr_at_20_diff1 value: 35.4972 - type: nauc_mrr_at_100_max value: 25.909 - type: nauc_mrr_at_100_std value: 0.8796999999999999 - type: nauc_mrr_at_100_diff1 value: 35.5299 - type: nauc_mrr_at_1000_max value: 25.910800000000002 - type: nauc_mrr_at_1000_std value: 0.9046000000000001 - type: nauc_mrr_at_1000_diff1 value: 35.522999999999996 - type: main_score value: 34.926 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: ndcg_at_1 value: 42.059999999999995 - type: ndcg_at_3 value: 46.461999999999996 - type: ndcg_at_5 value: 48.662 - type: ndcg_at_10 value: 50.925 - type: ndcg_at_20 value: 53.120999999999995 - type: ndcg_at_100 value: 56.189 - type: ndcg_at_1000 value: 57.972 - type: map_at_1 value: 33.919 - type: map_at_3 value: 41.858000000000004 - type: map_at_5 value: 43.629 - type: map_at_10 value: 45.01 - type: map_at_20 value: 45.781 - type: map_at_100 value: 46.372 - type: map_at_1000 value: 46.477000000000004 - type: recall_at_1 value: 33.919 - type: recall_at_3 value: 49.153999999999996 - type: recall_at_5 value: 55.422000000000004 - type: recall_at_10 value: 62.204 - type: recall_at_20 value: 69.819 - type: recall_at_100 value: 83.67599999999999 - type: recall_at_1000 value: 95.093 - type: precision_at_1 value: 42.059999999999995 - type: precision_at_3 value: 22.201 - type: precision_at_5 value: 15.342 - type: precision_at_10 value: 9.038 - type: precision_at_20 value: 5.244999999999999 - type: precision_at_100 value: 1.348 - type: precision_at_1000 value: 0.168 - type: mrr_at_1 value: 42.0597 - type: mrr_at_3 value: 49.005500000000005 - type: mrr_at_5 value: 50.3673 - type: mrr_at_10 value: 51.14959999999999 - type: mrr_at_20 value: 51.656 - type: mrr_at_100 value: 51.969 - type: mrr_at_1000 value: 52.0088 - type: nauc_ndcg_at_1_max value: 39.321400000000004 - type: nauc_ndcg_at_1_std value: -3.3204 - type: nauc_ndcg_at_1_diff1 value: 50.999300000000005 - type: nauc_ndcg_at_3_max value: 37.6896 - type: nauc_ndcg_at_3_std value: -4.7356 - type: nauc_ndcg_at_3_diff1 value: 48.0551 - type: nauc_ndcg_at_5_max value: 36.9149 - type: nauc_ndcg_at_5_std value: -5.8358 - type: nauc_ndcg_at_5_diff1 value: 48.4085 - type: nauc_ndcg_at_10_max value: 36.9047 - type: nauc_ndcg_at_10_std value: -5.1284 - type: nauc_ndcg_at_10_diff1 value: 48.3356 - type: nauc_ndcg_at_20_max value: 36.9876 - type: nauc_ndcg_at_20_std value: -4.0274 - type: nauc_ndcg_at_20_diff1 value: 48.0203 - type: nauc_ndcg_at_100_max value: 38.472899999999996 - type: nauc_ndcg_at_100_std value: -1.1645 - type: nauc_ndcg_at_100_diff1 value: 47.734 - type: nauc_ndcg_at_1000_max value: 38.828 - type: nauc_ndcg_at_1000_std value: -1.5388000000000002 - type: nauc_ndcg_at_1000_diff1 value: 47.8951 - type: nauc_map_at_1_max value: 32.8495 - type: nauc_map_at_1_std value: -11.1224 - type: nauc_map_at_1_diff1 value: 52.8561 - type: nauc_map_at_3_max value: 35.2472 - type: nauc_map_at_3_std value: -7.8861 - type: nauc_map_at_3_diff1 value: 49.2087 - type: nauc_map_at_5_max value: 35.5165 - type: nauc_map_at_5_std value: -7.8567 - type: nauc_map_at_5_diff1 value: 49.3185 - type: nauc_map_at_10_max value: 36.2371 - type: nauc_map_at_10_std value: -6.7322999999999995 - type: nauc_map_at_10_diff1 value: 49.3669 - type: nauc_map_at_20_max value: 36.3245 - type: nauc_map_at_20_std value: -6.2256 - type: nauc_map_at_20_diff1 value: 49.242999999999995 - type: nauc_map_at_100_max value: 36.6375 - type: nauc_map_at_100_std value: -5.694599999999999 - type: nauc_map_at_100_diff1 value: 49.1942 - type: nauc_map_at_1000_max value: 36.6734 - type: nauc_map_at_1000_std value: -5.6653 - type: nauc_map_at_1000_diff1 value: 49.1813 - type: nauc_recall_at_1_max value: 32.8495 - type: nauc_recall_at_1_std value: -11.1224 - type: nauc_recall_at_1_diff1 value: 52.8561 - type: nauc_recall_at_3_max value: 33.2098 - type: nauc_recall_at_3_std value: -7.4756 - type: nauc_recall_at_3_diff1 value: 44.6512 - type: nauc_recall_at_5_max value: 32.0734 - type: nauc_recall_at_5_std value: -8.552 - type: nauc_recall_at_5_diff1 value: 43.2098 - type: nauc_recall_at_10_max value: 32.452999999999996 - type: nauc_recall_at_10_std value: -5.631 - type: nauc_recall_at_10_diff1 value: 42.4641 - type: nauc_recall_at_20_max value: 31.660300000000003 - type: nauc_recall_at_20_std value: -1.5259 - type: nauc_recall_at_20_diff1 value: 40.5356 - type: nauc_recall_at_100_max value: 40.3906 - type: nauc_recall_at_100_std value: 22.5792 - type: nauc_recall_at_100_diff1 value: 36.2667 - type: nauc_recall_at_1000_max value: 61.422399999999996 - type: nauc_recall_at_1000_std value: 46.7038 - type: nauc_recall_at_1000_diff1 value: 36.4218 - type: nauc_precision_at_1_max value: 39.321400000000004 - type: nauc_precision_at_1_std value: -3.3204 - type: nauc_precision_at_1_diff1 value: 50.999300000000005 - type: nauc_precision_at_3_max value: 35.7839 - type: nauc_precision_at_3_std value: 7.773199999999999 - type: nauc_precision_at_3_diff1 value: 29.8081 - type: nauc_precision_at_5_max value: 32.7723 - type: nauc_precision_at_5_std value: 9.8457 - type: nauc_precision_at_5_diff1 value: 24.9104 - type: nauc_precision_at_10_max value: 30.6076 - type: nauc_precision_at_10_std value: 16.5018 - type: nauc_precision_at_10_diff1 value: 17.5733 - type: nauc_precision_at_20_max value: 25.8982 - type: nauc_precision_at_20_std value: 20.4936 - type: nauc_precision_at_20_diff1 value: 9.4253 - type: nauc_precision_at_100_max value: 20.5147 - type: nauc_precision_at_100_std value: 28.0537 - type: nauc_precision_at_100_diff1 value: -3.5682 - type: nauc_precision_at_1000_max value: 8.9834 - type: nauc_precision_at_1000_std value: 21.330099999999998 - type: nauc_precision_at_1000_diff1 value: -13.9467 - type: nauc_mrr_at_1_max value: 39.321400000000004 - type: nauc_mrr_at_1_std value: -3.3204 - type: nauc_mrr_at_1_diff1 value: 50.999300000000005 - type: nauc_mrr_at_3_max value: 39.537099999999995 - type: nauc_mrr_at_3_std value: -1.8964999999999999 - type: nauc_mrr_at_3_diff1 value: 48.790499999999994 - type: nauc_mrr_at_5_max value: 39.5914 - type: nauc_mrr_at_5_std value: -2.1046 - type: nauc_mrr_at_5_diff1 value: 48.674099999999996 - type: nauc_mrr_at_10_max value: 39.4877 - type: nauc_mrr_at_10_std value: -2.1155 - type: nauc_mrr_at_10_diff1 value: 48.5082 - type: nauc_mrr_at_20_max value: 39.5837 - type: nauc_mrr_at_20_std value: -1.8568999999999998 - type: nauc_mrr_at_20_diff1 value: 48.4835 - type: nauc_mrr_at_100_max value: 39.6439 - type: nauc_mrr_at_100_std value: -1.6681000000000001 - type: nauc_mrr_at_100_diff1 value: 48.4452 - type: nauc_mrr_at_1000_max value: 39.6426 - type: nauc_mrr_at_1000_std value: -1.6824 - type: nauc_mrr_at_1000_diff1 value: 48.4594 - type: main_score value: 50.925 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: ndcg_at_1 value: 38.812999999999995 - type: ndcg_at_3 value: 43.126999999999995 - type: ndcg_at_5 value: 45.269999999999996 - type: ndcg_at_10 value: 48.181000000000004 - type: ndcg_at_20 value: 50.475 - type: ndcg_at_100 value: 53.378 - type: ndcg_at_1000 value: 55.372 - type: map_at_1 value: 31.228 - type: map_at_3 value: 38.727000000000004 - type: map_at_5 value: 40.544000000000004 - type: map_at_10 value: 42.022999999999996 - type: map_at_20 value: 42.815 - type: map_at_100 value: 43.336000000000006 - type: map_at_1000 value: 43.434 - type: recall_at_1 value: 31.228 - type: recall_at_3 value: 46.075 - type: recall_at_5 value: 52.065 - type: recall_at_10 value: 60.86 - type: recall_at_20 value: 68.916 - type: recall_at_100 value: 82.49600000000001 - type: recall_at_1000 value: 95.914 - type: precision_at_1 value: 38.812999999999995 - type: precision_at_3 value: 20.51 - type: precision_at_5 value: 14.405999999999999 - type: precision_at_10 value: 8.676 - type: precision_at_20 value: 5.08 - type: precision_at_100 value: 1.3 - type: precision_at_1000 value: 0.165 - type: mrr_at_1 value: 38.812799999999996 - type: mrr_at_3 value: 45.3957 - type: mrr_at_5 value: 46.8113 - type: mrr_at_10 value: 47.9132 - type: mrr_at_20 value: 48.4148 - type: mrr_at_100 value: 48.694900000000004 - type: mrr_at_1000 value: 48.74 - type: nauc_ndcg_at_1_max value: 46.951100000000004 - type: nauc_ndcg_at_1_std value: 4.750299999999999 - type: nauc_ndcg_at_1_diff1 value: 50.353300000000004 - type: nauc_ndcg_at_3_max value: 44.852 - type: nauc_ndcg_at_3_std value: 5.976 - type: nauc_ndcg_at_3_diff1 value: 44.8003 - type: nauc_ndcg_at_5_max value: 44.7999 - type: nauc_ndcg_at_5_std value: 7.138799999999999 - type: nauc_ndcg_at_5_diff1 value: 43.786 - type: nauc_ndcg_at_10_max value: 45.272800000000004 - type: nauc_ndcg_at_10_std value: 8.318200000000001 - type: nauc_ndcg_at_10_diff1 value: 43.5412 - type: nauc_ndcg_at_20_max value: 45.9439 - type: nauc_ndcg_at_20_std value: 9.5894 - type: nauc_ndcg_at_20_diff1 value: 43.635400000000004 - type: nauc_ndcg_at_100_max value: 46.555800000000005 - type: nauc_ndcg_at_100_std value: 11.4897 - type: nauc_ndcg_at_100_diff1 value: 43.2953 - type: nauc_ndcg_at_1000_max value: 46.4671 - type: nauc_ndcg_at_1000_std value: 10.198500000000001 - type: nauc_ndcg_at_1000_diff1 value: 43.9655 - type: nauc_map_at_1_max value: 41.2881 - type: nauc_map_at_1_std value: -1.7105 - type: nauc_map_at_1_diff1 value: 52.340900000000005 - type: nauc_map_at_3_max value: 43.2779 - type: nauc_map_at_3_std value: 3.1361 - type: nauc_map_at_3_diff1 value: 46.899499999999996 - type: nauc_map_at_5_max value: 44.034600000000005 - type: nauc_map_at_5_std value: 4.376 - type: nauc_map_at_5_diff1 value: 46.1768 - type: nauc_map_at_10_max value: 44.495200000000004 - type: nauc_map_at_10_std value: 5.1069 - type: nauc_map_at_10_diff1 value: 45.8036 - type: nauc_map_at_20_max value: 44.9796 - type: nauc_map_at_20_std value: 5.6501 - type: nauc_map_at_20_diff1 value: 45.8538 - type: nauc_map_at_100_max value: 45.178000000000004 - type: nauc_map_at_100_std value: 6.1053999999999995 - type: nauc_map_at_100_diff1 value: 45.7785 - type: nauc_map_at_1000_max value: 45.169599999999996 - type: nauc_map_at_1000_std value: 6.0758 - type: nauc_map_at_1000_diff1 value: 45.794200000000004 - type: nauc_recall_at_1_max value: 41.2881 - type: nauc_recall_at_1_std value: -1.7105 - type: nauc_recall_at_1_diff1 value: 52.340900000000005 - type: nauc_recall_at_3_max value: 40.213100000000004 - type: nauc_recall_at_3_std value: 5.0584 - type: nauc_recall_at_3_diff1 value: 39.8885 - type: nauc_recall_at_5_max value: 40.629799999999996 - type: nauc_recall_at_5_std value: 9.2891 - type: nauc_recall_at_5_diff1 value: 36.7529 - type: nauc_recall_at_10_max value: 41.1258 - type: nauc_recall_at_10_std value: 14.056 - type: nauc_recall_at_10_diff1 value: 34.416000000000004 - type: nauc_recall_at_20_max value: 42.2647 - type: nauc_recall_at_20_std value: 19.0659 - type: nauc_recall_at_20_diff1 value: 33.9025 - type: nauc_recall_at_100_max value: 45.4518 - type: nauc_recall_at_100_std value: 38.2567 - type: nauc_recall_at_100_diff1 value: 27.418300000000002 - type: nauc_recall_at_1000_max value: 52.1153 - type: nauc_recall_at_1000_std value: 54.8108 - type: nauc_recall_at_1000_diff1 value: 28.122200000000003 - type: nauc_precision_at_1_max value: 46.951100000000004 - type: nauc_precision_at_1_std value: 4.750299999999999 - type: nauc_precision_at_1_diff1 value: 50.353300000000004 - type: nauc_precision_at_3_max value: 43.3769 - type: nauc_precision_at_3_std value: 15.2362 - type: nauc_precision_at_3_diff1 value: 29.4925 - type: nauc_precision_at_5_max value: 40.0531 - type: nauc_precision_at_5_std value: 18.0719 - type: nauc_precision_at_5_diff1 value: 21.4607 - type: nauc_precision_at_10_max value: 34.558 - type: nauc_precision_at_10_std value: 20.2349 - type: nauc_precision_at_10_diff1 value: 13.0483 - type: nauc_precision_at_20_max value: 30.3112 - type: nauc_precision_at_20_std value: 23.7865 - type: nauc_precision_at_20_diff1 value: 6.678000000000001 - type: nauc_precision_at_100_max value: 15.782599999999999 - type: nauc_precision_at_100_std value: 23.3508 - type: nauc_precision_at_100_diff1 value: -5.356199999999999 - type: nauc_precision_at_1000_max value: -1.203 - type: nauc_precision_at_1000_std value: 9.2771 - type: nauc_precision_at_1000_diff1 value: -12.0167 - type: nauc_mrr_at_1_max value: 46.951100000000004 - type: nauc_mrr_at_1_std value: 4.750299999999999 - type: nauc_mrr_at_1_diff1 value: 50.353300000000004 - type: nauc_mrr_at_3_max value: 47.1661 - type: nauc_mrr_at_3_std value: 7.985 - type: nauc_mrr_at_3_diff1 value: 45.5407 - type: nauc_mrr_at_5_max value: 46.7954 - type: nauc_mrr_at_5_std value: 8.615200000000002 - type: nauc_mrr_at_5_diff1 value: 44.767 - type: nauc_mrr_at_10_max value: 46.874500000000005 - type: nauc_mrr_at_10_std value: 8.9973 - type: nauc_mrr_at_10_diff1 value: 44.7807 - type: nauc_mrr_at_20_max value: 46.8582 - type: nauc_mrr_at_20_std value: 9.1312 - type: nauc_mrr_at_20_diff1 value: 44.7926 - type: nauc_mrr_at_100_max value: 46.9119 - type: nauc_mrr_at_100_std value: 9.2225 - type: nauc_mrr_at_100_diff1 value: 44.7972 - type: nauc_mrr_at_1000_max value: 46.9139 - type: nauc_mrr_at_1000_std value: 9.1867 - type: nauc_mrr_at_1000_diff1 value: 44.8208 - type: main_score value: 48.181000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 47.198 - type: ndcg_at_10 value: 47.198 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: ndcg_at_1 value: 32.515 - type: ndcg_at_3 value: 36.754999999999995 - type: ndcg_at_5 value: 38.461 - type: ndcg_at_10 value: 41.113 - type: ndcg_at_20 value: 42.744 - type: ndcg_at_100 value: 45.607 - type: ndcg_at_1000 value: 47.769 - type: map_at_1 value: 28.877999999999997 - type: map_at_3 value: 34.111000000000004 - type: map_at_5 value: 35.296 - type: map_at_10 value: 36.516 - type: map_at_20 value: 37.031 - type: map_at_100 value: 37.455 - type: map_at_1000 value: 37.54 - type: recall_at_1 value: 28.877999999999997 - type: recall_at_3 value: 39.823 - type: recall_at_5 value: 44.074000000000005 - type: recall_at_10 value: 52.138 - type: recall_at_20 value: 58.268 - type: recall_at_100 value: 72.675 - type: recall_at_1000 value: 88.49900000000001 - type: precision_at_1 value: 32.515 - type: precision_at_3 value: 15.491 - type: precision_at_5 value: 10.613 - type: precision_at_10 value: 6.411 - type: precision_at_20 value: 3.604 - type: precision_at_100 value: 0.9390000000000001 - type: precision_at_1000 value: 0.121 - type: mrr_at_1 value: 32.5153 - type: mrr_at_3 value: 37.5256 - type: mrr_at_5 value: 38.507200000000005 - type: mrr_at_10 value: 39.6489 - type: mrr_at_20 value: 40.0734 - type: mrr_at_100 value: 40.408899999999996 - type: mrr_at_1000 value: 40.470600000000005 - type: nauc_ndcg_at_1_max value: 46.9541 - type: nauc_ndcg_at_1_std value: -0.6345 - type: nauc_ndcg_at_1_diff1 value: 56.4747 - type: nauc_ndcg_at_3_max value: 44.595600000000005 - type: nauc_ndcg_at_3_std value: -0.6883 - type: nauc_ndcg_at_3_diff1 value: 51.176100000000005 - type: nauc_ndcg_at_5_max value: 45.0672 - type: nauc_ndcg_at_5_std value: 0.7248 - type: nauc_ndcg_at_5_diff1 value: 50.6661 - type: nauc_ndcg_at_10_max value: 45.3702 - type: nauc_ndcg_at_10_std value: 3.7225 - type: nauc_ndcg_at_10_diff1 value: 48.5914 - type: nauc_ndcg_at_20_max value: 45.134800000000006 - type: nauc_ndcg_at_20_std value: 3.4250999999999996 - type: nauc_ndcg_at_20_diff1 value: 48.0876 - type: nauc_ndcg_at_100_max value: 45.848 - type: nauc_ndcg_at_100_std value: 5.0007 - type: nauc_ndcg_at_100_diff1 value: 48.4221 - type: nauc_ndcg_at_1000_max value: 46.0472 - type: nauc_ndcg_at_1000_std value: 4.8727 - type: nauc_ndcg_at_1000_diff1 value: 48.7787 - type: nauc_map_at_1_max value: 44.2723 - type: nauc_map_at_1_std value: -4.1624 - type: nauc_map_at_1_diff1 value: 56.3666 - type: nauc_map_at_3_max value: 44.368 - type: nauc_map_at_3_std value: -2.2338 - type: nauc_map_at_3_diff1 value: 52.662299999999995 - type: nauc_map_at_5_max value: 44.9376 - type: nauc_map_at_5_std value: -0.9258000000000001 - type: nauc_map_at_5_diff1 value: 52.2675 - type: nauc_map_at_10_max value: 45.162600000000005 - type: nauc_map_at_10_std value: 0.5709 - type: nauc_map_at_10_diff1 value: 51.2702 - type: nauc_map_at_20_max value: 45.088899999999995 - type: nauc_map_at_20_std value: 0.5163 - type: nauc_map_at_20_diff1 value: 51.1058 - type: nauc_map_at_100_max value: 45.203700000000005 - type: nauc_map_at_100_std value: 0.7443 - type: nauc_map_at_100_diff1 value: 51.1744 - type: nauc_map_at_1000_max value: 45.2121 - type: nauc_map_at_1000_std value: 0.7443 - type: nauc_map_at_1000_diff1 value: 51.186699999999995 - type: nauc_recall_at_1_max value: 44.2723 - type: nauc_recall_at_1_std value: -4.1624 - type: nauc_recall_at_1_diff1 value: 56.3666 - type: nauc_recall_at_3_max value: 41.484700000000004 - type: nauc_recall_at_3_std value: -1.5438 - type: nauc_recall_at_3_diff1 value: 47.3155 - type: nauc_recall_at_5_max value: 42.7926 - type: nauc_recall_at_5_std value: 2.2485999999999997 - type: nauc_recall_at_5_diff1 value: 45.7287 - type: nauc_recall_at_10_max value: 43.3757 - type: nauc_recall_at_10_std value: 11.1774 - type: nauc_recall_at_10_diff1 value: 38.699 - type: nauc_recall_at_20_max value: 41.9806 - type: nauc_recall_at_20_std value: 9.8464 - type: nauc_recall_at_20_diff1 value: 36.209599999999995 - type: nauc_recall_at_100_max value: 44.935399999999994 - type: nauc_recall_at_100_std value: 22.2528 - type: nauc_recall_at_100_diff1 value: 33.9811 - type: nauc_recall_at_1000_max value: 48.0178 - type: nauc_recall_at_1000_std value: 35.6656 - type: nauc_recall_at_1000_diff1 value: 27.0609 - type: nauc_precision_at_1_max value: 46.9541 - type: nauc_precision_at_1_std value: -0.6345 - type: nauc_precision_at_1_diff1 value: 56.4747 - type: nauc_precision_at_3_max value: 44.8235 - type: nauc_precision_at_3_std value: 6.392399999999999 - type: nauc_precision_at_3_diff1 value: 43.4139 - type: nauc_precision_at_5_max value: 44.1627 - type: nauc_precision_at_5_std value: 12.5801 - type: nauc_precision_at_5_diff1 value: 38.3975 - type: nauc_precision_at_10_max value: 42.2932 - type: nauc_precision_at_10_std value: 21.9445 - type: nauc_precision_at_10_diff1 value: 28.898200000000003 - type: nauc_precision_at_20_max value: 38.3815 - type: nauc_precision_at_20_std value: 21.2644 - type: nauc_precision_at_20_diff1 value: 22.902900000000002 - type: nauc_precision_at_100_max value: 30.0629 - type: nauc_precision_at_100_std value: 25.7938 - type: nauc_precision_at_100_diff1 value: 13.500599999999999 - type: nauc_precision_at_1000_max value: 16.1509 - type: nauc_precision_at_1000_std value: 22.168599999999998 - type: nauc_precision_at_1000_diff1 value: -0.5865 - type: nauc_mrr_at_1_max value: 46.9541 - type: nauc_mrr_at_1_std value: -0.6345 - type: nauc_mrr_at_1_diff1 value: 56.4747 - type: nauc_mrr_at_3_max value: 45.571 - type: nauc_mrr_at_3_std value: 0.5652 - type: nauc_mrr_at_3_diff1 value: 52.2878 - type: nauc_mrr_at_5_max value: 45.9243 - type: nauc_mrr_at_5_std value: 1.4102 - type: nauc_mrr_at_5_diff1 value: 52.0197 - type: nauc_mrr_at_10_max value: 46.090599999999995 - type: nauc_mrr_at_10_std value: 2.5422000000000002 - type: nauc_mrr_at_10_diff1 value: 51.1523 - type: nauc_mrr_at_20_max value: 46.0581 - type: nauc_mrr_at_20_std value: 2.4245 - type: nauc_mrr_at_20_diff1 value: 51.1149 - type: nauc_mrr_at_100_max value: 46.138200000000005 - type: nauc_mrr_at_100_std value: 2.5852 - type: nauc_mrr_at_100_diff1 value: 51.19200000000001 - type: nauc_mrr_at_1000_max value: 46.134 - type: nauc_mrr_at_1000_std value: 2.5724 - type: nauc_mrr_at_1000_diff1 value: 51.20099999999999 - type: main_score value: 41.113 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: ndcg_at_1 value: 26.358999999999998 - type: ndcg_at_3 value: 30.921 - type: ndcg_at_5 value: 33.083 - type: ndcg_at_10 value: 35.669000000000004 - type: ndcg_at_20 value: 37.486999999999995 - type: ndcg_at_100 value: 40.897 - type: ndcg_at_1000 value: 43.492999999999995 - type: map_at_1 value: 21.644 - type: map_at_3 value: 27.638 - type: map_at_5 value: 29.181 - type: map_at_10 value: 30.429000000000002 - type: map_at_20 value: 31.018 - type: map_at_100 value: 31.557000000000002 - type: map_at_1000 value: 31.676 - type: recall_at_1 value: 21.644 - type: recall_at_3 value: 33.727000000000004 - type: recall_at_5 value: 39.402 - type: recall_at_10 value: 47.166000000000004 - type: recall_at_20 value: 53.818 - type: recall_at_100 value: 70.625 - type: recall_at_1000 value: 88.848 - type: precision_at_1 value: 26.358999999999998 - type: precision_at_3 value: 14.602 - type: precision_at_5 value: 10.509 - type: precision_at_10 value: 6.468999999999999 - type: precision_at_20 value: 3.7969999999999997 - type: precision_at_100 value: 1.0619999999999998 - type: precision_at_1000 value: 0.147 - type: mrr_at_1 value: 26.3593 - type: mrr_at_3 value: 32.2379 - type: mrr_at_5 value: 33.5559 - type: mrr_at_10 value: 34.6105 - type: mrr_at_20 value: 35.0733 - type: mrr_at_100 value: 35.4832 - type: mrr_at_1000 value: 35.5508 - type: nauc_ndcg_at_1_max value: 38.821 - type: nauc_ndcg_at_1_std value: -0.9577 - type: nauc_ndcg_at_1_diff1 value: 49.477900000000005 - type: nauc_ndcg_at_3_max value: 36.9651 - type: nauc_ndcg_at_3_std value: 0.5652 - type: nauc_ndcg_at_3_diff1 value: 42.9649 - type: nauc_ndcg_at_5_max value: 36.9433 - type: nauc_ndcg_at_5_std value: 1.4069 - type: nauc_ndcg_at_5_diff1 value: 41.3321 - type: nauc_ndcg_at_10_max value: 37.0556 - type: nauc_ndcg_at_10_std value: 1.983 - type: nauc_ndcg_at_10_diff1 value: 40.6062 - type: nauc_ndcg_at_20_max value: 37.621 - type: nauc_ndcg_at_20_std value: 3.1833 - type: nauc_ndcg_at_20_diff1 value: 40.0768 - type: nauc_ndcg_at_100_max value: 37.5859 - type: nauc_ndcg_at_100_std value: 4.4883 - type: nauc_ndcg_at_100_diff1 value: 39.6131 - type: nauc_ndcg_at_1000_max value: 37.9037 - type: nauc_ndcg_at_1000_std value: 4.3155 - type: nauc_ndcg_at_1000_diff1 value: 40.393 - type: nauc_map_at_1_max value: 34.2335 - type: nauc_map_at_1_std value: -2.5663 - type: nauc_map_at_1_diff1 value: 49.3827 - type: nauc_map_at_3_max value: 35.1539 - type: nauc_map_at_3_std value: -0.4655 - type: nauc_map_at_3_diff1 value: 44.0299 - type: nauc_map_at_5_max value: 35.546499999999995 - type: nauc_map_at_5_std value: -0.0021 - type: nauc_map_at_5_diff1 value: 43.0138 - type: nauc_map_at_10_max value: 35.904799999999994 - type: nauc_map_at_10_std value: 0.367 - type: nauc_map_at_10_diff1 value: 42.762699999999995 - type: nauc_map_at_20_max value: 36.1855 - type: nauc_map_at_20_std value: 0.7818 - type: nauc_map_at_20_diff1 value: 42.6084 - type: nauc_map_at_100_max value: 36.2406 - type: nauc_map_at_100_std value: 0.9825999999999999 - type: nauc_map_at_100_diff1 value: 42.5375 - type: nauc_map_at_1000_max value: 36.2732 - type: nauc_map_at_1000_std value: 0.9912000000000001 - type: nauc_map_at_1000_diff1 value: 42.5821 - type: nauc_recall_at_1_max value: 34.2335 - type: nauc_recall_at_1_std value: -2.5663 - type: nauc_recall_at_1_diff1 value: 49.3827 - type: nauc_recall_at_3_max value: 34.2402 - type: nauc_recall_at_3_std value: 1.3011 - type: nauc_recall_at_3_diff1 value: 38.5403 - type: nauc_recall_at_5_max value: 34.2169 - type: nauc_recall_at_5_std value: 3.0383 - type: nauc_recall_at_5_diff1 value: 34.3078 - type: nauc_recall_at_10_max value: 34.2267 - type: nauc_recall_at_10_std value: 4.7303 - type: nauc_recall_at_10_diff1 value: 31.2869 - type: nauc_recall_at_20_max value: 35.6281 - type: nauc_recall_at_20_std value: 8.940199999999999 - type: nauc_recall_at_20_diff1 value: 28.655599999999996 - type: nauc_recall_at_100_max value: 34.0961 - type: nauc_recall_at_100_std value: 18.096799999999998 - type: nauc_recall_at_100_diff1 value: 22.490199999999998 - type: nauc_recall_at_1000_max value: 37.3724 - type: nauc_recall_at_1000_std value: 29.723699999999997 - type: nauc_recall_at_1000_diff1 value: 18.9603 - type: nauc_precision_at_1_max value: 38.821 - type: nauc_precision_at_1_std value: -0.9577 - type: nauc_precision_at_1_diff1 value: 49.477900000000005 - type: nauc_precision_at_3_max value: 38.9589 - type: nauc_precision_at_3_std value: 3.6894000000000005 - type: nauc_precision_at_3_diff1 value: 34.869499999999995 - type: nauc_precision_at_5_max value: 37.9132 - type: nauc_precision_at_5_std value: 6.1095 - type: nauc_precision_at_5_diff1 value: 28.7686 - type: nauc_precision_at_10_max value: 35.5564 - type: nauc_precision_at_10_std value: 7.4825 - type: nauc_precision_at_10_diff1 value: 24.0663 - type: nauc_precision_at_20_max value: 34.3717 - type: nauc_precision_at_20_std value: 10.989 - type: nauc_precision_at_20_diff1 value: 19.0117 - type: nauc_precision_at_100_max value: 25.595000000000002 - type: nauc_precision_at_100_std value: 13.692499999999999 - type: nauc_precision_at_100_diff1 value: 9.7287 - type: nauc_precision_at_1000_max value: 15.6194 - type: nauc_precision_at_1000_std value: 7.9235 - type: nauc_precision_at_1000_diff1 value: 3.5067 - type: nauc_mrr_at_1_max value: 38.821 - type: nauc_mrr_at_1_std value: -0.9577 - type: nauc_mrr_at_1_diff1 value: 49.477900000000005 - type: nauc_mrr_at_3_max value: 39.365899999999996 - type: nauc_mrr_at_3_std value: 0.8999999999999999 - type: nauc_mrr_at_3_diff1 value: 44.8801 - type: nauc_mrr_at_5_max value: 39.339400000000005 - type: nauc_mrr_at_5_std value: 1.6056000000000001 - type: nauc_mrr_at_5_diff1 value: 43.9725 - type: nauc_mrr_at_10_max value: 39.245200000000004 - type: nauc_mrr_at_10_std value: 1.6921 - type: nauc_mrr_at_10_diff1 value: 43.6805 - type: nauc_mrr_at_20_max value: 39.283699999999996 - type: nauc_mrr_at_20_std value: 1.9199000000000002 - type: nauc_mrr_at_20_diff1 value: 43.5636 - type: nauc_mrr_at_100_max value: 39.293299999999995 - type: nauc_mrr_at_100_std value: 2.0535 - type: nauc_mrr_at_100_diff1 value: 43.5431 - type: nauc_mrr_at_1000_max value: 39.299299999999995 - type: nauc_mrr_at_1000_std value: 2.0467 - type: nauc_mrr_at_1000_diff1 value: 43.5649 - type: main_score value: 35.669000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: ndcg_at_1 value: 37.407000000000004 - type: ndcg_at_3 value: 43.179 - type: ndcg_at_5 value: 45.540000000000006 - type: ndcg_at_10 value: 48.189 - type: ndcg_at_20 value: 50.308 - type: ndcg_at_100 value: 53.15800000000001 - type: ndcg_at_1000 value: 55.108999999999995 - type: map_at_1 value: 32.314 - type: map_at_3 value: 39.757 - type: map_at_5 value: 41.448 - type: map_at_10 value: 42.742999999999995 - type: map_at_20 value: 43.438 - type: map_at_100 value: 43.909 - type: map_at_1000 value: 44.005 - type: recall_at_1 value: 32.314 - type: recall_at_3 value: 46.852 - type: recall_at_5 value: 53.15 - type: recall_at_10 value: 60.748000000000005 - type: recall_at_20 value: 68.30199999999999 - type: recall_at_100 value: 81.846 - type: recall_at_1000 value: 94.92399999999999 - type: precision_at_1 value: 37.407000000000004 - type: precision_at_3 value: 19.59 - type: precision_at_5 value: 13.544999999999998 - type: precision_at_10 value: 8.013 - type: precision_at_20 value: 4.627 - type: precision_at_100 value: 1.172 - type: precision_at_1000 value: 0.14400000000000002 - type: mrr_at_1 value: 37.4067 - type: mrr_at_3 value: 43.9832 - type: mrr_at_5 value: 45.4291 - type: mrr_at_10 value: 46.4308 - type: mrr_at_20 value: 46.9435 - type: mrr_at_100 value: 47.2549 - type: mrr_at_1000 value: 47.3064 - type: nauc_ndcg_at_1_max value: 49.5683 - type: nauc_ndcg_at_1_std value: -4.5333 - type: nauc_ndcg_at_1_diff1 value: 59.0792 - type: nauc_ndcg_at_3_max value: 46.881 - type: nauc_ndcg_at_3_std value: -1.9335000000000002 - type: nauc_ndcg_at_3_diff1 value: 50.6091 - type: nauc_ndcg_at_5_max value: 46.596399999999996 - type: nauc_ndcg_at_5_std value: -1.6747 - type: nauc_ndcg_at_5_diff1 value: 50.731 - type: nauc_ndcg_at_10_max value: 47.119699999999995 - type: nauc_ndcg_at_10_std value: -1.8790999999999998 - type: nauc_ndcg_at_10_diff1 value: 50.4398 - type: nauc_ndcg_at_20_max value: 46.931400000000004 - type: nauc_ndcg_at_20_std value: -1.2184 - type: nauc_ndcg_at_20_diff1 value: 50.2302 - type: nauc_ndcg_at_100_max value: 47.4715 - type: nauc_ndcg_at_100_std value: 0.512 - type: nauc_ndcg_at_100_diff1 value: 49.831399999999995 - type: nauc_ndcg_at_1000_max value: 47.4049 - type: nauc_ndcg_at_1000_std value: -0.07730000000000001 - type: nauc_ndcg_at_1000_diff1 value: 50.045399999999994 - type: nauc_map_at_1_max value: 46.3138 - type: nauc_map_at_1_std value: -6.1365 - type: nauc_map_at_1_diff1 value: 59.1901 - type: nauc_map_at_3_max value: 46.4225 - type: nauc_map_at_3_std value: -3.3928 - type: nauc_map_at_3_diff1 value: 53.0394 - type: nauc_map_at_5_max value: 46.634 - type: nauc_map_at_5_std value: -2.8697 - type: nauc_map_at_5_diff1 value: 52.837500000000006 - type: nauc_map_at_10_max value: 46.9634 - type: nauc_map_at_10_std value: -2.8736 - type: nauc_map_at_10_diff1 value: 52.62670000000001 - type: nauc_map_at_20_max value: 46.943 - type: nauc_map_at_20_std value: -2.7709 - type: nauc_map_at_20_diff1 value: 52.525299999999994 - type: nauc_map_at_100_max value: 47.072 - type: nauc_map_at_100_std value: -2.4186 - type: nauc_map_at_100_diff1 value: 52.4223 - type: nauc_map_at_1000_max value: 47.058299999999996 - type: nauc_map_at_1000_std value: -2.4274 - type: nauc_map_at_1000_diff1 value: 52.410000000000004 - type: nauc_recall_at_1_max value: 46.3138 - type: nauc_recall_at_1_std value: -6.1365 - type: nauc_recall_at_1_diff1 value: 59.1901 - type: nauc_recall_at_3_max value: 43.556 - type: nauc_recall_at_3_std value: -1.0473 - type: nauc_recall_at_3_diff1 value: 45.3836 - type: nauc_recall_at_5_max value: 42.8197 - type: nauc_recall_at_5_std value: 0.364 - type: nauc_recall_at_5_diff1 value: 44.0828 - type: nauc_recall_at_10_max value: 43.5287 - type: nauc_recall_at_10_std value: -0.16999999999999998 - type: nauc_recall_at_10_diff1 value: 42.2532 - type: nauc_recall_at_20_max value: 41.9415 - type: nauc_recall_at_20_std value: 3.0739 - type: nauc_recall_at_20_diff1 value: 40.6138 - type: nauc_recall_at_100_max value: 43.648199999999996 - type: nauc_recall_at_100_std value: 17.8151 - type: nauc_recall_at_100_diff1 value: 34.7435 - type: nauc_recall_at_1000_max value: 42.9288 - type: nauc_recall_at_1000_std value: 34.9874 - type: nauc_recall_at_1000_diff1 value: 21.8361 - type: nauc_precision_at_1_max value: 49.5683 - type: nauc_precision_at_1_std value: -4.5333 - type: nauc_precision_at_1_diff1 value: 59.0792 - type: nauc_precision_at_3_max value: 40.726 - type: nauc_precision_at_3_std value: 3.6327 - type: nauc_precision_at_3_diff1 value: 32.726 - type: nauc_precision_at_5_max value: 37.575599999999994 - type: nauc_precision_at_5_std value: 5.4281999999999995 - type: nauc_precision_at_5_diff1 value: 26.8851 - type: nauc_precision_at_10_max value: 31.7382 - type: nauc_precision_at_10_std value: 4.0767999999999995 - type: nauc_precision_at_10_diff1 value: 18.174799999999998 - type: nauc_precision_at_20_max value: 25.4159 - type: nauc_precision_at_20_std value: 6.0251 - type: nauc_precision_at_20_diff1 value: 10.059800000000001 - type: nauc_precision_at_100_max value: 13.5296 - type: nauc_precision_at_100_std value: 14.0608 - type: nauc_precision_at_100_diff1 value: -7.792000000000001 - type: nauc_precision_at_1000_max value: -3.7522 - type: nauc_precision_at_1000_std value: 7.536099999999999 - type: nauc_precision_at_1000_diff1 value: -21.2683 - type: nauc_mrr_at_1_max value: 49.5683 - type: nauc_mrr_at_1_std value: -4.5333 - type: nauc_mrr_at_1_diff1 value: 59.0792 - type: nauc_mrr_at_3_max value: 48.3581 - type: nauc_mrr_at_3_std value: -1.8857 - type: nauc_mrr_at_3_diff1 value: 52.5945 - type: nauc_mrr_at_5_max value: 48.2651 - type: nauc_mrr_at_5_std value: -1.5519 - type: nauc_mrr_at_5_diff1 value: 52.323699999999995 - type: nauc_mrr_at_10_max value: 48.346000000000004 - type: nauc_mrr_at_10_std value: -1.7543 - type: nauc_mrr_at_10_diff1 value: 52.278999999999996 - type: nauc_mrr_at_20_max value: 48.2692 - type: nauc_mrr_at_20_std value: -1.5904000000000003 - type: nauc_mrr_at_20_diff1 value: 52.27460000000001 - type: nauc_mrr_at_100_max value: 48.273700000000005 - type: nauc_mrr_at_100_std value: -1.4659 - type: nauc_mrr_at_100_diff1 value: 52.278400000000005 - type: nauc_mrr_at_1000_max value: 48.2811 - type: nauc_mrr_at_1000_std value: -1.4881 - type: nauc_mrr_at_1000_diff1 value: 52.298500000000004 - type: main_score value: 48.189 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 38.141999999999996 - type: ndcg_at_3 value: 42.689 - type: ndcg_at_5 value: 44.318999999999996 - type: ndcg_at_10 value: 47.303 - type: ndcg_at_20 value: 49.236000000000004 - type: ndcg_at_100 value: 53.09700000000001 - type: ndcg_at_1000 value: 55.117000000000004 - type: map_at_1 value: 32.468 - type: map_at_3 value: 38.573 - type: map_at_5 value: 39.926 - type: map_at_10 value: 41.482 - type: map_at_20 value: 42.370000000000005 - type: map_at_100 value: 43.204 - type: map_at_1000 value: 43.425999999999995 - type: recall_at_1 value: 32.468 - type: recall_at_3 value: 44.241 - type: recall_at_5 value: 49.177 - type: recall_at_10 value: 57.63399999999999 - type: recall_at_20 value: 64.724 - type: recall_at_100 value: 83.817 - type: recall_at_1000 value: 95.91 - type: precision_at_1 value: 38.141999999999996 - type: precision_at_3 value: 19.499 - type: precision_at_5 value: 13.478000000000002 - type: precision_at_10 value: 8.774999999999999 - type: precision_at_20 value: 5.455 - type: precision_at_100 value: 1.6760000000000002 - type: precision_at_1000 value: 0.251 - type: mrr_at_1 value: 38.1423 - type: mrr_at_3 value: 44.005300000000005 - type: mrr_at_5 value: 45.1515 - type: mrr_at_10 value: 46.3542 - type: mrr_at_20 value: 46.7589 - type: mrr_at_100 value: 47.185100000000006 - type: mrr_at_1000 value: 47.2249 - type: nauc_ndcg_at_1_max value: 47.905300000000004 - type: nauc_ndcg_at_1_std value: 7.8307 - type: nauc_ndcg_at_1_diff1 value: 51.3311 - type: nauc_ndcg_at_3_max value: 46.8119 - type: nauc_ndcg_at_3_std value: 6.993099999999999 - type: nauc_ndcg_at_3_diff1 value: 48.3281 - type: nauc_ndcg_at_5_max value: 47.5687 - type: nauc_ndcg_at_5_std value: 8.7295 - type: nauc_ndcg_at_5_diff1 value: 49.106300000000005 - type: nauc_ndcg_at_10_max value: 47.3786 - type: nauc_ndcg_at_10_std value: 8.9795 - type: nauc_ndcg_at_10_diff1 value: 47.5348 - type: nauc_ndcg_at_20_max value: 47.9792 - type: nauc_ndcg_at_20_std value: 10.2734 - type: nauc_ndcg_at_20_diff1 value: 48.3578 - type: nauc_ndcg_at_100_max value: 48.5313 - type: nauc_ndcg_at_100_std value: 11.2393 - type: nauc_ndcg_at_100_diff1 value: 47.497299999999996 - type: nauc_ndcg_at_1000_max value: 48.4189 - type: nauc_ndcg_at_1000_std value: 10.857700000000001 - type: nauc_ndcg_at_1000_diff1 value: 47.9808 - type: nauc_map_at_1_max value: 45.0797 - type: nauc_map_at_1_std value: 1.9601 - type: nauc_map_at_1_diff1 value: 55.33050000000001 - type: nauc_map_at_3_max value: 46.6641 - type: nauc_map_at_3_std value: 3.9848000000000003 - type: nauc_map_at_3_diff1 value: 51.4752 - type: nauc_map_at_5_max value: 47.2652 - type: nauc_map_at_5_std value: 5.0378 - type: nauc_map_at_5_diff1 value: 51.3051 - type: nauc_map_at_10_max value: 47.3629 - type: nauc_map_at_10_std value: 5.4796 - type: nauc_map_at_10_diff1 value: 50.43450000000001 - type: nauc_map_at_20_max value: 47.5858 - type: nauc_map_at_20_std value: 6.4494 - type: nauc_map_at_20_diff1 value: 50.3333 - type: nauc_map_at_100_max value: 47.6506 - type: nauc_map_at_100_std value: 7.1591000000000005 - type: nauc_map_at_100_diff1 value: 50.138000000000005 - type: nauc_map_at_1000_max value: 47.516999999999996 - type: nauc_map_at_1000_std value: 7.2322 - type: nauc_map_at_1000_diff1 value: 50.132299999999994 - type: nauc_recall_at_1_max value: 45.0797 - type: nauc_recall_at_1_std value: 1.9601 - type: nauc_recall_at_1_diff1 value: 55.33050000000001 - type: nauc_recall_at_3_max value: 44.9897 - type: nauc_recall_at_3_std value: 5.6308 - type: nauc_recall_at_3_diff1 value: 46.6793 - type: nauc_recall_at_5_max value: 46.6283 - type: nauc_recall_at_5_std value: 9.998999999999999 - type: nauc_recall_at_5_diff1 value: 45.9247 - type: nauc_recall_at_10_max value: 44.714 - type: nauc_recall_at_10_std value: 10.8319 - type: nauc_recall_at_10_diff1 value: 40.291900000000005 - type: nauc_recall_at_20_max value: 46.361200000000004 - type: nauc_recall_at_20_std value: 17.9809 - type: nauc_recall_at_20_diff1 value: 42.4004 - type: nauc_recall_at_100_max value: 48.9864 - type: nauc_recall_at_100_std value: 31.7118 - type: nauc_recall_at_100_diff1 value: 30.9676 - type: nauc_recall_at_1000_max value: 59.9606 - type: nauc_recall_at_1000_std value: 64.66229999999999 - type: nauc_recall_at_1000_diff1 value: 27.669 - type: nauc_precision_at_1_max value: 47.905300000000004 - type: nauc_precision_at_1_std value: 7.8307 - type: nauc_precision_at_1_diff1 value: 51.3311 - type: nauc_precision_at_3_max value: 38.4644 - type: nauc_precision_at_3_std value: 11.7975 - type: nauc_precision_at_3_diff1 value: 27.7451 - type: nauc_precision_at_5_max value: 36.8955 - type: nauc_precision_at_5_std value: 17.702399999999997 - type: nauc_precision_at_5_diff1 value: 24.6268 - type: nauc_precision_at_10_max value: 26.5975 - type: nauc_precision_at_10_std value: 22.3993 - type: nauc_precision_at_10_diff1 value: 8.6213 - type: nauc_precision_at_20_max value: 17.3127 - type: nauc_precision_at_20_std value: 24.7139 - type: nauc_precision_at_20_diff1 value: 1.3941000000000001 - type: nauc_precision_at_100_max value: -0.882 - type: nauc_precision_at_100_std value: 24.5949 - type: nauc_precision_at_100_diff1 value: -10.3409 - type: nauc_precision_at_1000_max value: -15.3829 - type: nauc_precision_at_1000_std value: 15.4108 - type: nauc_precision_at_1000_diff1 value: -19.8547 - type: nauc_mrr_at_1_max value: 47.905300000000004 - type: nauc_mrr_at_1_std value: 7.8307 - type: nauc_mrr_at_1_diff1 value: 51.3311 - type: nauc_mrr_at_3_max value: 46.6702 - type: nauc_mrr_at_3_std value: 8.4343 - type: nauc_mrr_at_3_diff1 value: 47.7232 - type: nauc_mrr_at_5_max value: 47.439 - type: nauc_mrr_at_5_std value: 9.8287 - type: nauc_mrr_at_5_diff1 value: 48.2284 - type: nauc_mrr_at_10_max value: 47.477000000000004 - type: nauc_mrr_at_10_std value: 9.9349 - type: nauc_mrr_at_10_diff1 value: 47.7388 - type: nauc_mrr_at_20_max value: 47.5871 - type: nauc_mrr_at_20_std value: 10.137400000000001 - type: nauc_mrr_at_20_diff1 value: 47.949000000000005 - type: nauc_mrr_at_100_max value: 47.5206 - type: nauc_mrr_at_100_std value: 10.0871 - type: nauc_mrr_at_100_diff1 value: 47.875299999999996 - type: nauc_mrr_at_1000_max value: 47.5212 - type: nauc_mrr_at_1000_std value: 10.0739 - type: nauc_mrr_at_1000_diff1 value: 47.8953 - type: main_score value: 47.303 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_1 value: 29.759999999999998 - type: ndcg_at_3 value: 33.824 - type: ndcg_at_5 value: 36.766 - type: ndcg_at_10 value: 39.902 - type: ndcg_at_20 value: 41.618 - type: ndcg_at_100 value: 44.983000000000004 - type: ndcg_at_1000 value: 46.938 - type: map_at_1 value: 27.181 - type: map_at_3 value: 31.526 - type: map_at_5 value: 33.397 - type: map_at_10 value: 34.766999999999996 - type: map_at_20 value: 35.244 - type: map_at_100 value: 35.757 - type: map_at_1000 value: 35.836 - type: recall_at_1 value: 27.181 - type: recall_at_3 value: 37.19 - type: recall_at_5 value: 44.153999999999996 - type: recall_at_10 value: 53.705000000000005 - type: recall_at_20 value: 60.22 - type: recall_at_100 value: 77.39200000000001 - type: recall_at_1000 value: 91.77 - type: precision_at_1 value: 29.759999999999998 - type: precision_at_3 value: 13.925 - type: precision_at_5 value: 10.24 - type: precision_at_10 value: 6.265999999999999 - type: precision_at_20 value: 3.549 - type: precision_at_100 value: 0.9520000000000001 - type: precision_at_1000 value: 0.122 - type: mrr_at_1 value: 29.7597 - type: mrr_at_3 value: 34.4732 - type: mrr_at_5 value: 35.915 - type: mrr_at_10 value: 37.1488 - type: mrr_at_20 value: 37.637100000000004 - type: mrr_at_100 value: 38.0403 - type: mrr_at_1000 value: 38.096999999999994 - type: nauc_ndcg_at_1_max value: 35.7865 - type: nauc_ndcg_at_1_std value: 1.9512 - type: nauc_ndcg_at_1_diff1 value: 54.9311 - type: nauc_ndcg_at_3_max value: 32.6952 - type: nauc_ndcg_at_3_std value: 6.2215 - type: nauc_ndcg_at_3_diff1 value: 48.2731 - type: nauc_ndcg_at_5_max value: 33.893 - type: nauc_ndcg_at_5_std value: 5.418 - type: nauc_ndcg_at_5_diff1 value: 47.5903 - type: nauc_ndcg_at_10_max value: 31.5442 - type: nauc_ndcg_at_10_std value: 6.4778 - type: nauc_ndcg_at_10_diff1 value: 46.1388 - type: nauc_ndcg_at_20_max value: 31.613200000000003 - type: nauc_ndcg_at_20_std value: 7.0572 - type: nauc_ndcg_at_20_diff1 value: 46.5949 - type: nauc_ndcg_at_100_max value: 32.8054 - type: nauc_ndcg_at_100_std value: 9.4452 - type: nauc_ndcg_at_100_diff1 value: 46.8179 - type: nauc_ndcg_at_1000_max value: 33.0064 - type: nauc_ndcg_at_1000_std value: 8.8104 - type: nauc_ndcg_at_1000_diff1 value: 47.4082 - type: nauc_map_at_1_max value: 32.9731 - type: nauc_map_at_1_std value: 0.6048 - type: nauc_map_at_1_diff1 value: 53.8662 - type: nauc_map_at_3_max value: 32.1607 - type: nauc_map_at_3_std value: 4.4275 - type: nauc_map_at_3_diff1 value: 49.648900000000005 - type: nauc_map_at_5_max value: 33.0496 - type: nauc_map_at_5_std value: 4.3251 - type: nauc_map_at_5_diff1 value: 49.1433 - type: nauc_map_at_10_max value: 32.2061 - type: nauc_map_at_10_std value: 4.7649 - type: nauc_map_at_10_diff1 value: 48.5962 - type: nauc_map_at_20_max value: 32.2822 - type: nauc_map_at_20_std value: 4.8831 - type: nauc_map_at_20_diff1 value: 48.766799999999996 - type: nauc_map_at_100_max value: 32.521699999999996 - type: nauc_map_at_100_std value: 5.2962 - type: nauc_map_at_100_diff1 value: 48.7986 - type: nauc_map_at_1000_max value: 32.5074 - type: nauc_map_at_1000_std value: 5.2721 - type: nauc_map_at_1000_diff1 value: 48.803000000000004 - type: nauc_recall_at_1_max value: 32.9731 - type: nauc_recall_at_1_std value: 0.6048 - type: nauc_recall_at_1_diff1 value: 53.8662 - type: nauc_recall_at_3_max value: 29.308699999999998 - type: nauc_recall_at_3_std value: 7.6516 - type: nauc_recall_at_3_diff1 value: 42.4534 - type: nauc_recall_at_5_max value: 32.1131 - type: nauc_recall_at_5_std value: 6.260599999999999 - type: nauc_recall_at_5_diff1 value: 40.5131 - type: nauc_recall_at_10_max value: 24.2332 - type: nauc_recall_at_10_std value: 9.7985 - type: nauc_recall_at_10_diff1 value: 34.911500000000004 - type: nauc_recall_at_20_max value: 23.692 - type: nauc_recall_at_20_std value: 12.088799999999999 - type: nauc_recall_at_20_diff1 value: 35.8843 - type: nauc_recall_at_100_max value: 27.729300000000002 - type: nauc_recall_at_100_std value: 31.9796 - type: nauc_recall_at_100_diff1 value: 32.5991 - type: nauc_recall_at_1000_max value: 32.483200000000004 - type: nauc_recall_at_1000_std value: 48.2299 - type: nauc_recall_at_1000_diff1 value: 35.8086 - type: nauc_precision_at_1_max value: 35.7865 - type: nauc_precision_at_1_std value: 1.9512 - type: nauc_precision_at_1_diff1 value: 54.9311 - type: nauc_precision_at_3_max value: 35.729 - type: nauc_precision_at_3_std value: 12.873499999999998 - type: nauc_precision_at_3_diff1 value: 43.6572 - type: nauc_precision_at_5_max value: 35.9285 - type: nauc_precision_at_5_std value: 11.120099999999999 - type: nauc_precision_at_5_diff1 value: 37.458999999999996 - type: nauc_precision_at_10_max value: 29.4037 - type: nauc_precision_at_10_std value: 16.1533 - type: nauc_precision_at_10_diff1 value: 30.7829 - type: nauc_precision_at_20_max value: 28.733700000000002 - type: nauc_precision_at_20_std value: 19.4687 - type: nauc_precision_at_20_diff1 value: 29.154999999999998 - type: nauc_precision_at_100_max value: 28.109099999999998 - type: nauc_precision_at_100_std value: 31.4104 - type: nauc_precision_at_100_diff1 value: 17.7183 - type: nauc_precision_at_1000_max value: 5.8763000000000005 - type: nauc_precision_at_1000_std value: 18.5651 - type: nauc_precision_at_1000_diff1 value: -0.5546 - type: nauc_mrr_at_1_max value: 35.7865 - type: nauc_mrr_at_1_std value: 1.9512 - type: nauc_mrr_at_1_diff1 value: 54.9311 - type: nauc_mrr_at_3_max value: 35.371 - type: nauc_mrr_at_3_std value: 6.447700000000001 - type: nauc_mrr_at_3_diff1 value: 50.998900000000006 - type: nauc_mrr_at_5_max value: 36.2682 - type: nauc_mrr_at_5_std value: 5.8895 - type: nauc_mrr_at_5_diff1 value: 50.72879999999999 - type: nauc_mrr_at_10_max value: 35.1719 - type: nauc_mrr_at_10_std value: 6.074199999999999 - type: nauc_mrr_at_10_diff1 value: 50.087 - type: nauc_mrr_at_20_max value: 35.0608 - type: nauc_mrr_at_20_std value: 6.2545 - type: nauc_mrr_at_20_diff1 value: 50.1754 - type: nauc_mrr_at_100_max value: 35.1314 - type: nauc_mrr_at_100_std value: 6.417299999999999 - type: nauc_mrr_at_100_diff1 value: 50.1819 - type: nauc_mrr_at_1000_max value: 35.124 - type: nauc_mrr_at_1000_std value: 6.3942 - type: nauc_mrr_at_1000_diff1 value: 50.1926 - type: main_score value: 39.902 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: ndcg_at_1 value: 40.129999999999995 - type: ndcg_at_3 value: 33.11 - type: ndcg_at_5 value: 34.721999999999994 - type: ndcg_at_10 value: 38.314 - type: ndcg_at_20 value: 41.006 - type: ndcg_at_100 value: 44.651 - type: ndcg_at_1000 value: 47.262 - type: map_at_1 value: 17.72 - type: map_at_3 value: 24.807000000000002 - type: map_at_5 value: 26.931 - type: map_at_10 value: 28.923 - type: map_at_20 value: 29.970999999999997 - type: map_at_100 value: 30.720999999999997 - type: map_at_1000 value: 30.866 - type: recall_at_1 value: 17.72 - type: recall_at_3 value: 29.421000000000003 - type: recall_at_5 value: 35.089 - type: recall_at_10 value: 42.962 - type: recall_at_20 value: 50.46000000000001 - type: recall_at_100 value: 64.39399999999999 - type: recall_at_1000 value: 78.93599999999999 - type: precision_at_1 value: 40.129999999999995 - type: precision_at_3 value: 24.407999999999998 - type: precision_at_5 value: 17.954 - type: precision_at_10 value: 11.375 - type: precision_at_20 value: 6.857 - type: precision_at_100 value: 1.812 - type: precision_at_1000 value: 0.231 - type: mrr_at_1 value: 40.130300000000005 - type: mrr_at_3 value: 48.7296 - type: mrr_at_5 value: 50.3583 - type: mrr_at_10 value: 51.415299999999995 - type: mrr_at_20 value: 51.831700000000005 - type: mrr_at_100 value: 52.0518 - type: mrr_at_1000 value: 52.0826 - type: nauc_ndcg_at_1_max value: 40.104299999999995 - type: nauc_ndcg_at_1_std value: 18.0912 - type: nauc_ndcg_at_1_diff1 value: 37.8955 - type: nauc_ndcg_at_3_max value: 42.9593 - type: nauc_ndcg_at_3_std value: 19.1131 - type: nauc_ndcg_at_3_diff1 value: 30.6546 - type: nauc_ndcg_at_5_max value: 44.351 - type: nauc_ndcg_at_5_std value: 21.026500000000002 - type: nauc_ndcg_at_5_diff1 value: 29.723100000000002 - type: nauc_ndcg_at_10_max value: 45.1246 - type: nauc_ndcg_at_10_std value: 23.4349 - type: nauc_ndcg_at_10_diff1 value: 29.488599999999998 - type: nauc_ndcg_at_20_max value: 45.2818 - type: nauc_ndcg_at_20_std value: 24.904899999999998 - type: nauc_ndcg_at_20_diff1 value: 28.9215 - type: nauc_ndcg_at_100_max value: 46.7221 - type: nauc_ndcg_at_100_std value: 28.011799999999997 - type: nauc_ndcg_at_100_diff1 value: 29.6544 - type: nauc_ndcg_at_1000_max value: 46.7951 - type: nauc_ndcg_at_1000_std value: 28.5671 - type: nauc_ndcg_at_1000_diff1 value: 29.7716 - type: nauc_map_at_1_max value: 41.754400000000004 - type: nauc_map_at_1_std value: 11.7817 - type: nauc_map_at_1_diff1 value: 39.7588 - type: nauc_map_at_3_max value: 43.086 - type: nauc_map_at_3_std value: 16.2776 - type: nauc_map_at_3_diff1 value: 31.2632 - type: nauc_map_at_5_max value: 43.8303 - type: nauc_map_at_5_std value: 18.2317 - type: nauc_map_at_5_diff1 value: 30.451099999999997 - type: nauc_map_at_10_max value: 44.1511 - type: nauc_map_at_10_std value: 19.9622 - type: nauc_map_at_10_diff1 value: 30.1447 - type: nauc_map_at_20_max value: 44.2367 - type: nauc_map_at_20_std value: 20.6727 - type: nauc_map_at_20_diff1 value: 29.7979 - type: nauc_map_at_100_max value: 44.6514 - type: nauc_map_at_100_std value: 21.451999999999998 - type: nauc_map_at_100_diff1 value: 29.9572 - type: nauc_map_at_1000_max value: 44.6665 - type: nauc_map_at_1000_std value: 21.507 - type: nauc_map_at_1000_diff1 value: 29.9788 - type: nauc_recall_at_1_max value: 41.754400000000004 - type: nauc_recall_at_1_std value: 11.7817 - type: nauc_recall_at_1_diff1 value: 39.7588 - type: nauc_recall_at_3_max value: 42.1306 - type: nauc_recall_at_3_std value: 17.397299999999998 - type: nauc_recall_at_3_diff1 value: 26.3229 - type: nauc_recall_at_5_max value: 41.9516 - type: nauc_recall_at_5_std value: 20.566699999999997 - type: nauc_recall_at_5_diff1 value: 23.4934 - type: nauc_recall_at_10_max value: 41.260400000000004 - type: nauc_recall_at_10_std value: 24.0061 - type: nauc_recall_at_10_diff1 value: 21.6158 - type: nauc_recall_at_20_max value: 39.8437 - type: nauc_recall_at_20_std value: 26.892100000000003 - type: nauc_recall_at_20_diff1 value: 19.1214 - type: nauc_recall_at_100_max value: 42.9589 - type: nauc_recall_at_100_std value: 37.7833 - type: nauc_recall_at_100_diff1 value: 19.575899999999997 - type: nauc_recall_at_1000_max value: 43.292500000000004 - type: nauc_recall_at_1000_std value: 46.5189 - type: nauc_recall_at_1000_diff1 value: 16.3096 - type: nauc_precision_at_1_max value: 40.104299999999995 - type: nauc_precision_at_1_std value: 18.0912 - type: nauc_precision_at_1_diff1 value: 37.8955 - type: nauc_precision_at_3_max value: 37.2383 - type: nauc_precision_at_3_std value: 24.0517 - type: nauc_precision_at_3_diff1 value: 19.169800000000002 - type: nauc_precision_at_5_max value: 34.6764 - type: nauc_precision_at_5_std value: 26.4407 - type: nauc_precision_at_5_diff1 value: 14.188 - type: nauc_precision_at_10_max value: 31.1544 - type: nauc_precision_at_10_std value: 28.997099999999996 - type: nauc_precision_at_10_diff1 value: 11.4475 - type: nauc_precision_at_20_max value: 27.065499999999997 - type: nauc_precision_at_20_std value: 29.658099999999997 - type: nauc_precision_at_20_diff1 value: 7.388999999999999 - type: nauc_precision_at_100_max value: 22.5635 - type: nauc_precision_at_100_std value: 35.1885 - type: nauc_precision_at_100_diff1 value: 4.612900000000001 - type: nauc_precision_at_1000_max value: 9.4366 - type: nauc_precision_at_1000_std value: 29.399399999999996 - type: nauc_precision_at_1000_diff1 value: -2.8055 - type: nauc_mrr_at_1_max value: 40.104299999999995 - type: nauc_mrr_at_1_std value: 18.0912 - type: nauc_mrr_at_1_diff1 value: 37.8955 - type: nauc_mrr_at_3_max value: 43.088300000000004 - type: nauc_mrr_at_3_std value: 21.658 - type: nauc_mrr_at_3_diff1 value: 34.4445 - type: nauc_mrr_at_5_max value: 43.2876 - type: nauc_mrr_at_5_std value: 22.6188 - type: nauc_mrr_at_5_diff1 value: 34.143699999999995 - type: nauc_mrr_at_10_max value: 43.4627 - type: nauc_mrr_at_10_std value: 22.7775 - type: nauc_mrr_at_10_diff1 value: 34.3108 - type: nauc_mrr_at_20_max value: 43.5013 - type: nauc_mrr_at_20_std value: 22.825599999999998 - type: nauc_mrr_at_20_diff1 value: 34.4236 - type: nauc_mrr_at_100_max value: 43.543 - type: nauc_mrr_at_100_std value: 22.8566 - type: nauc_mrr_at_100_diff1 value: 34.5171 - type: nauc_mrr_at_1000_max value: 43.5287 - type: nauc_mrr_at_1000_std value: 22.8398 - type: nauc_mrr_at_1000_diff1 value: 34.5149 - type: main_score value: 38.314 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: ndcg_at_1 value: 57.875 - type: ndcg_at_3 value: 48.424 - type: ndcg_at_5 value: 45.907 - type: ndcg_at_10 value: 43.881 - type: ndcg_at_20 value: 43.047000000000004 - type: ndcg_at_100 value: 47.892 - type: ndcg_at_1000 value: 55.175 - type: map_at_1 value: 9.705 - type: map_at_3 value: 14.984 - type: map_at_5 value: 17.579 - type: map_at_10 value: 20.901 - type: map_at_20 value: 24.244 - type: map_at_100 value: 29.263 - type: map_at_1000 value: 30.953000000000003 - type: recall_at_1 value: 9.705 - type: recall_at_3 value: 16.136 - type: recall_at_5 value: 20.4 - type: recall_at_10 value: 26.3 - type: recall_at_20 value: 33.719 - type: recall_at_100 value: 53.080000000000005 - type: recall_at_1000 value: 75.732 - type: precision_at_1 value: 70.75 - type: precision_at_3 value: 51.833 - type: precision_at_5 value: 44.2 - type: precision_at_10 value: 34.8 - type: precision_at_20 value: 26.174999999999997 - type: precision_at_100 value: 10.879999999999999 - type: precision_at_1000 value: 2.073 - type: mrr_at_1 value: 70.75 - type: mrr_at_3 value: 76.66669999999999 - type: mrr_at_5 value: 77.7667 - type: mrr_at_10 value: 78.2846 - type: mrr_at_20 value: 78.4431 - type: mrr_at_100 value: 78.5246 - type: mrr_at_1000 value: 78.5325 - type: nauc_ndcg_at_1_max value: 47.8626 - type: nauc_ndcg_at_1_std value: 29.184500000000003 - type: nauc_ndcg_at_1_diff1 value: 51.1817 - type: nauc_ndcg_at_3_max value: 40.4824 - type: nauc_ndcg_at_3_std value: 27.226899999999997 - type: nauc_ndcg_at_3_diff1 value: 29.3703 - type: nauc_ndcg_at_5_max value: 38.145 - type: nauc_ndcg_at_5_std value: 27.050600000000003 - type: nauc_ndcg_at_5_diff1 value: 27.043 - type: nauc_ndcg_at_10_max value: 36.7997 - type: nauc_ndcg_at_10_std value: 25.5961 - type: nauc_ndcg_at_10_diff1 value: 26.062800000000003 - type: nauc_ndcg_at_20_max value: 33.0901 - type: nauc_ndcg_at_20_std value: 21.3937 - type: nauc_ndcg_at_20_diff1 value: 24.8751 - type: nauc_ndcg_at_100_max value: 36.032199999999996 - type: nauc_ndcg_at_100_std value: 26.6399 - type: nauc_ndcg_at_100_diff1 value: 25.341399999999997 - type: nauc_ndcg_at_1000_max value: 42.1806 - type: nauc_ndcg_at_1000_std value: 36.6225 - type: nauc_ndcg_at_1000_diff1 value: 26.957700000000003 - type: nauc_map_at_1_max value: -1.8065000000000002 - type: nauc_map_at_1_std value: -23.1418 - type: nauc_map_at_1_diff1 value: 26.009700000000002 - type: nauc_map_at_3_max value: 4.5538 - type: nauc_map_at_3_std value: -19.7685 - type: nauc_map_at_3_diff1 value: 18.431900000000002 - type: nauc_map_at_5_max value: 7.6586 - type: nauc_map_at_5_std value: -15.1836 - type: nauc_map_at_5_diff1 value: 17.1768 - type: nauc_map_at_10_max value: 12.3345 - type: nauc_map_at_10_std value: -7.3311 - type: nauc_map_at_10_diff1 value: 16.467399999999998 - type: nauc_map_at_20_max value: 16.9535 - type: nauc_map_at_20_std value: 2.3999 - type: nauc_map_at_20_diff1 value: 16.1074 - type: nauc_map_at_100_max value: 24.238699999999998 - type: nauc_map_at_100_std value: 17.0193 - type: nauc_map_at_100_diff1 value: 17.179 - type: nauc_map_at_1000_max value: 26.147199999999998 - type: nauc_map_at_1000_std value: 20.597199999999997 - type: nauc_map_at_1000_diff1 value: 17.3145 - type: nauc_recall_at_1_max value: -1.8065000000000002 - type: nauc_recall_at_1_std value: -23.1418 - type: nauc_recall_at_1_diff1 value: 26.009700000000002 - type: nauc_recall_at_3_max value: 1.7474 - type: nauc_recall_at_3_std value: -21.331 - type: nauc_recall_at_3_diff1 value: 14.844899999999999 - type: nauc_recall_at_5_max value: 3.9203 - type: nauc_recall_at_5_std value: -17.225299999999997 - type: nauc_recall_at_5_diff1 value: 13.3026 - type: nauc_recall_at_10_max value: 7.484399999999999 - type: nauc_recall_at_10_std value: -10.879800000000001 - type: nauc_recall_at_10_diff1 value: 11.187 - type: nauc_recall_at_20_max value: 12.327499999999999 - type: nauc_recall_at_20_std value: -1.7592 - type: nauc_recall_at_20_diff1 value: 12.3485 - type: nauc_recall_at_100_max value: 26.868799999999997 - type: nauc_recall_at_100_std value: 23.4846 - type: nauc_recall_at_100_diff1 value: 16.4859 - type: nauc_recall_at_1000_max value: 35.4478 - type: nauc_recall_at_1000_std value: 42.7445 - type: nauc_recall_at_1000_diff1 value: 17.108 - type: nauc_precision_at_1_max value: 59.8572 - type: nauc_precision_at_1_std value: 39.1 - type: nauc_precision_at_1_diff1 value: 57.475 - type: nauc_precision_at_3_max value: 42.9945 - type: nauc_precision_at_3_std value: 41.5933 - type: nauc_precision_at_3_diff1 value: 12.3299 - type: nauc_precision_at_5_max value: 39.8975 - type: nauc_precision_at_5_std value: 46.3626 - type: nauc_precision_at_5_diff1 value: 7.990600000000001 - type: nauc_precision_at_10_max value: 37.501200000000004 - type: nauc_precision_at_10_std value: 51.9395 - type: nauc_precision_at_10_diff1 value: 4.8036 - type: nauc_precision_at_20_max value: 34.9806 - type: nauc_precision_at_20_std value: 53.513999999999996 - type: nauc_precision_at_20_diff1 value: 3.8808000000000002 - type: nauc_precision_at_100_max value: 29.6714 - type: nauc_precision_at_100_std value: 50.9404 - type: nauc_precision_at_100_diff1 value: 1.7782 - type: nauc_precision_at_1000_max value: 4.9528 - type: nauc_precision_at_1000_std value: 23.0701 - type: nauc_precision_at_1000_diff1 value: -11.6606 - type: nauc_mrr_at_1_max value: 59.8572 - type: nauc_mrr_at_1_std value: 39.1 - type: nauc_mrr_at_1_diff1 value: 57.475 - type: nauc_mrr_at_3_max value: 61.6508 - type: nauc_mrr_at_3_std value: 43.013400000000004 - type: nauc_mrr_at_3_diff1 value: 55.14170000000001 - type: nauc_mrr_at_5_max value: 61.8982 - type: nauc_mrr_at_5_std value: 42.4903 - type: nauc_mrr_at_5_diff1 value: 55.880300000000005 - type: nauc_mrr_at_10_max value: 61.6843 - type: nauc_mrr_at_10_std value: 42.8332 - type: nauc_mrr_at_10_diff1 value: 55.7773 - type: nauc_mrr_at_20_max value: 61.7877 - type: nauc_mrr_at_20_std value: 42.6655 - type: nauc_mrr_at_20_diff1 value: 55.9627 - type: nauc_mrr_at_100_max value: 61.755300000000005 - type: nauc_mrr_at_100_std value: 42.681799999999996 - type: nauc_mrr_at_100_diff1 value: 55.97410000000001 - type: nauc_mrr_at_1000_max value: 61.7454 - type: nauc_mrr_at_1000_std value: 42.6813 - type: nauc_mrr_at_1000_diff1 value: 55.9732 - type: main_score value: 43.881 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 42.385 - type: f1 value: 38.2581 - type: f1_weighted value: 44.6657 - type: main_score value: 42.385 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: ndcg_at_1 value: 89.81400000000001 - type: ndcg_at_3 value: 90.789 - type: ndcg_at_5 value: 91.266 - type: ndcg_at_10 value: 91.552 - type: ndcg_at_20 value: 91.759 - type: ndcg_at_100 value: 92.04 - type: ndcg_at_1000 value: 92.264 - type: map_at_1 value: 83.343 - type: map_at_3 value: 88.293 - type: map_at_5 value: 88.709 - type: map_at_10 value: 88.895 - type: map_at_20 value: 88.985 - type: map_at_100 value: 89.046 - type: map_at_1000 value: 89.059 - type: recall_at_1 value: 83.343 - type: recall_at_3 value: 92.545 - type: recall_at_5 value: 93.944 - type: recall_at_10 value: 94.82300000000001 - type: recall_at_20 value: 95.48100000000001 - type: recall_at_100 value: 96.64 - type: recall_at_1000 value: 97.989 - type: precision_at_1 value: 89.81400000000001 - type: precision_at_3 value: 33.698 - type: precision_at_5 value: 20.602999999999998 - type: precision_at_10 value: 10.453 - type: precision_at_20 value: 5.299 - type: precision_at_100 value: 1.091 - type: precision_at_1000 value: 0.11299999999999999 - type: mrr_at_1 value: 89.81400000000001 - type: mrr_at_3 value: 93.7594 - type: mrr_at_5 value: 94.0144 - type: mrr_at_10 value: 94.073 - type: mrr_at_20 value: 94.0835 - type: mrr_at_100 value: 94.0871 - type: mrr_at_1000 value: 94.0873 - type: nauc_ndcg_at_1_max value: 23.8983 - type: nauc_ndcg_at_1_std value: -16.226 - type: nauc_ndcg_at_1_diff1 value: 78.4902 - type: nauc_ndcg_at_3_max value: 15.106 - type: nauc_ndcg_at_3_std value: -11.4 - type: nauc_ndcg_at_3_diff1 value: 41.9768 - type: nauc_ndcg_at_5_max value: 14.6485 - type: nauc_ndcg_at_5_std value: -9.5441 - type: nauc_ndcg_at_5_diff1 value: 39.7958 - type: nauc_ndcg_at_10_max value: 14.241100000000001 - type: nauc_ndcg_at_10_std value: -8.4259 - type: nauc_ndcg_at_10_diff1 value: 38.8701 - type: nauc_ndcg_at_20_max value: 14.211199999999998 - type: nauc_ndcg_at_20_std value: -7.916399999999999 - type: nauc_ndcg_at_20_diff1 value: 39.3907 - type: nauc_ndcg_at_100_max value: 14.871400000000001 - type: nauc_ndcg_at_100_std value: -7.4491000000000005 - type: nauc_ndcg_at_100_diff1 value: 40.7175 - type: nauc_ndcg_at_1000_max value: 15.386800000000001 - type: nauc_ndcg_at_1000_std value: -7.939100000000001 - type: nauc_ndcg_at_1000_diff1 value: 42.1499 - type: nauc_map_at_1_max value: 13.431199999999999 - type: nauc_map_at_1_std value: -10.2714 - type: nauc_map_at_1_diff1 value: 50.8151 - type: nauc_map_at_3_max value: 13.2276 - type: nauc_map_at_3_std value: -9.8315 - type: nauc_map_at_3_diff1 value: 39.6441 - type: nauc_map_at_5_max value: 13.4859 - type: nauc_map_at_5_std value: -9.284 - type: nauc_map_at_5_diff1 value: 39.4358 - type: nauc_map_at_10_max value: 13.578399999999998 - type: nauc_map_at_10_std value: -8.828800000000001 - type: nauc_map_at_10_diff1 value: 39.338499999999996 - type: nauc_map_at_20_max value: 13.600200000000001 - type: nauc_map_at_20_std value: -8.6524 - type: nauc_map_at_20_diff1 value: 39.5327 - type: nauc_map_at_100_max value: 13.7266 - type: nauc_map_at_100_std value: -8.583 - type: nauc_map_at_100_diff1 value: 39.749 - type: nauc_map_at_1000_max value: 13.7522 - type: nauc_map_at_1000_std value: -8.5978 - type: nauc_map_at_1000_diff1 value: 39.8105 - type: nauc_recall_at_1_max value: 13.431199999999999 - type: nauc_recall_at_1_std value: -10.2714 - type: nauc_recall_at_1_diff1 value: 50.8151 - type: nauc_recall_at_3_max value: 7.7703999999999995 - type: nauc_recall_at_3_std value: -7.5428999999999995 - type: nauc_recall_at_3_diff1 value: 14.6511 - type: nauc_recall_at_5_max value: 7.7514 - type: nauc_recall_at_5_std value: -0.9165 - type: nauc_recall_at_5_diff1 value: 5.1985 - type: nauc_recall_at_10_max value: 5.4695 - type: nauc_recall_at_10_std value: 4.8362 - type: nauc_recall_at_10_diff1 value: -2.3994 - type: nauc_recall_at_20_max value: 3.7693 - type: nauc_recall_at_20_std value: 9.4046 - type: nauc_recall_at_20_diff1 value: -5.3729 - type: nauc_recall_at_100_max value: 4.6496 - type: nauc_recall_at_100_std value: 19.605700000000002 - type: nauc_recall_at_100_diff1 value: -9.1885 - type: nauc_recall_at_1000_max value: 7.266 - type: nauc_recall_at_1000_std value: 25.461699999999997 - type: nauc_recall_at_1000_diff1 value: -11.698699999999999 - type: nauc_precision_at_1_max value: 23.8983 - type: nauc_precision_at_1_std value: -16.226 - type: nauc_precision_at_1_diff1 value: 78.4902 - type: nauc_precision_at_3_max value: 14.686399999999999 - type: nauc_precision_at_3_std value: -5.6663 - type: nauc_precision_at_3_diff1 value: 0.5428999999999999 - type: nauc_precision_at_5_max value: 12.9569 - type: nauc_precision_at_5_std value: 1.145 - type: nauc_precision_at_5_diff1 value: -10.0661 - type: nauc_precision_at_10_max value: 9.8558 - type: nauc_precision_at_10_std value: 6.1638 - type: nauc_precision_at_10_diff1 value: -14.3308 - type: nauc_precision_at_20_max value: 7.1591000000000005 - type: nauc_precision_at_20_std value: 8.4559 - type: nauc_precision_at_20_diff1 value: -12.226099999999999 - type: nauc_precision_at_100_max value: 7.6160000000000005 - type: nauc_precision_at_100_std value: 8.6876 - type: nauc_precision_at_100_diff1 value: -5.8182 - type: nauc_precision_at_1000_max value: 7.3231 - type: nauc_precision_at_1000_std value: 4.929399999999999 - type: nauc_precision_at_1000_diff1 value: -1.187 - type: nauc_mrr_at_1_max value: 23.8983 - type: nauc_mrr_at_1_std value: -16.226 - type: nauc_mrr_at_1_diff1 value: 78.4902 - type: nauc_mrr_at_3_max value: 25.2759 - type: nauc_mrr_at_3_std value: -20.4713 - type: nauc_mrr_at_3_diff1 value: 77.55030000000001 - type: nauc_mrr_at_5_max value: 25.709799999999998 - type: nauc_mrr_at_5_std value: -19.3177 - type: nauc_mrr_at_5_diff1 value: 77.7659 - type: nauc_mrr_at_10_max value: 25.4059 - type: nauc_mrr_at_10_std value: -19.128600000000002 - type: nauc_mrr_at_10_diff1 value: 77.78580000000001 - type: nauc_mrr_at_20_max value: 25.303399999999996 - type: nauc_mrr_at_20_std value: -19.137999999999998 - type: nauc_mrr_at_20_diff1 value: 77.7914 - type: nauc_mrr_at_100_max value: 25.2918 - type: nauc_mrr_at_100_std value: -19.1132 - type: nauc_mrr_at_100_diff1 value: 77.7997 - type: nauc_mrr_at_1000_max value: 25.2892 - type: nauc_mrr_at_1000_std value: -19.1172 - type: nauc_mrr_at_1000_diff1 value: 77.7992 - type: main_score value: 91.552 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: ndcg_at_1 value: 44.907000000000004 - type: ndcg_at_3 value: 40.095 - type: ndcg_at_5 value: 41.464 - type: ndcg_at_10 value: 43.958999999999996 - type: ndcg_at_20 value: 46.931 - type: ndcg_at_100 value: 50.656 - type: ndcg_at_1000 value: 53.474999999999994 - type: map_at_1 value: 22.846 - type: map_at_3 value: 31.533 - type: map_at_5 value: 34.175 - type: map_at_10 value: 36.105 - type: map_at_20 value: 37.232 - type: map_at_100 value: 37.993 - type: map_at_1000 value: 38.171 - type: recall_at_1 value: 22.846 - type: recall_at_3 value: 36.065000000000005 - type: recall_at_5 value: 42.754999999999995 - type: recall_at_10 value: 50.595 - type: recall_at_20 value: 59.85 - type: recall_at_100 value: 75.08 - type: recall_at_1000 value: 91.685 - type: precision_at_1 value: 44.907000000000004 - type: precision_at_3 value: 26.183 - type: precision_at_5 value: 19.29 - type: precision_at_10 value: 11.883000000000001 - type: precision_at_20 value: 7.191 - type: precision_at_100 value: 1.8870000000000002 - type: precision_at_1000 value: 0.23900000000000002 - type: mrr_at_1 value: 44.907399999999996 - type: mrr_at_3 value: 50.10289999999999 - type: mrr_at_5 value: 51.5303 - type: mrr_at_10 value: 52.61169999999999 - type: mrr_at_20 value: 53.13290000000001 - type: mrr_at_100 value: 53.3809 - type: mrr_at_1000 value: 53.4181 - type: nauc_ndcg_at_1_max value: 50.2672 - type: nauc_ndcg_at_1_std value: -5.858 - type: nauc_ndcg_at_1_diff1 value: 55.1067 - type: nauc_ndcg_at_3_max value: 40.9279 - type: nauc_ndcg_at_3_std value: -6.954000000000001 - type: nauc_ndcg_at_3_diff1 value: 43.9096 - type: nauc_ndcg_at_5_max value: 38.406400000000005 - type: nauc_ndcg_at_5_std value: -5.951 - type: nauc_ndcg_at_5_diff1 value: 42.9537 - type: nauc_ndcg_at_10_max value: 40.1602 - type: nauc_ndcg_at_10_std value: -3.486 - type: nauc_ndcg_at_10_diff1 value: 43.693 - type: nauc_ndcg_at_20_max value: 40.3159 - type: nauc_ndcg_at_20_std value: -1.6125 - type: nauc_ndcg_at_20_diff1 value: 43.0649 - type: nauc_ndcg_at_100_max value: 42.5543 - type: nauc_ndcg_at_100_std value: 0.133 - type: nauc_ndcg_at_100_diff1 value: 44.263799999999996 - type: nauc_ndcg_at_1000_max value: 43.520399999999995 - type: nauc_ndcg_at_1000_std value: -0.49300000000000005 - type: nauc_ndcg_at_1000_diff1 value: 44.550200000000004 - type: nauc_map_at_1_max value: 26.930300000000003 - type: nauc_map_at_1_std value: -6.8881 - type: nauc_map_at_1_diff1 value: 45.905499999999996 - type: nauc_map_at_3_max value: 32.3991 - type: nauc_map_at_3_std value: -8.1954 - type: nauc_map_at_3_diff1 value: 42.9392 - type: nauc_map_at_5_max value: 34.0031 - type: nauc_map_at_5_std value: -6.9963999999999995 - type: nauc_map_at_5_diff1 value: 42.7737 - type: nauc_map_at_10_max value: 36.38 - type: nauc_map_at_10_std value: -5.663 - type: nauc_map_at_10_diff1 value: 43.1583 - type: nauc_map_at_20_max value: 36.6981 - type: nauc_map_at_20_std value: -4.9736 - type: nauc_map_at_20_diff1 value: 42.924800000000005 - type: nauc_map_at_100_max value: 37.268699999999995 - type: nauc_map_at_100_std value: -4.6967 - type: nauc_map_at_100_diff1 value: 43.024 - type: nauc_map_at_1000_max value: 37.3818 - type: nauc_map_at_1000_std value: -4.7077 - type: nauc_map_at_1000_diff1 value: 43.0575 - type: nauc_recall_at_1_max value: 26.930300000000003 - type: nauc_recall_at_1_std value: -6.8881 - type: nauc_recall_at_1_diff1 value: 45.905499999999996 - type: nauc_recall_at_3_max value: 27.860200000000003 - type: nauc_recall_at_3_std value: -7.8473 - type: nauc_recall_at_3_diff1 value: 36.569 - type: nauc_recall_at_5_max value: 27.1751 - type: nauc_recall_at_5_std value: -5.0796 - type: nauc_recall_at_5_diff1 value: 33.9236 - type: nauc_recall_at_10_max value: 32.0004 - type: nauc_recall_at_10_std value: 1.0071 - type: nauc_recall_at_10_diff1 value: 33.1849 - type: nauc_recall_at_20_max value: 30.6595 - type: nauc_recall_at_20_std value: 7.3179 - type: nauc_recall_at_20_diff1 value: 29.751300000000004 - type: nauc_recall_at_100_max value: 35.9924 - type: nauc_recall_at_100_std value: 21.691399999999998 - type: nauc_recall_at_100_diff1 value: 31.397100000000002 - type: nauc_recall_at_1000_max value: 47.176899999999996 - type: nauc_recall_at_1000_std value: 37.8536 - type: nauc_recall_at_1000_diff1 value: 30.2447 - type: nauc_precision_at_1_max value: 50.2672 - type: nauc_precision_at_1_std value: -5.858 - type: nauc_precision_at_1_diff1 value: 55.1067 - type: nauc_precision_at_3_max value: 44.4071 - type: nauc_precision_at_3_std value: -4.4772 - type: nauc_precision_at_3_diff1 value: 32.6195 - type: nauc_precision_at_5_max value: 42.6336 - type: nauc_precision_at_5_std value: -0.9528 - type: nauc_precision_at_5_diff1 value: 27.821299999999997 - type: nauc_precision_at_10_max value: 45.5267 - type: nauc_precision_at_10_std value: 4.0484 - type: nauc_precision_at_10_diff1 value: 23.8886 - type: nauc_precision_at_20_max value: 41.7389 - type: nauc_precision_at_20_std value: 9.3544 - type: nauc_precision_at_20_diff1 value: 16.236700000000003 - type: nauc_precision_at_100_max value: 38.4564 - type: nauc_precision_at_100_std value: 12.544 - type: nauc_precision_at_100_diff1 value: 10.5924 - type: nauc_precision_at_1000_max value: 31.2525 - type: nauc_precision_at_1000_std value: 10.641399999999999 - type: nauc_precision_at_1000_diff1 value: 1.5966 - type: nauc_mrr_at_1_max value: 50.2672 - type: nauc_mrr_at_1_std value: -5.858 - type: nauc_mrr_at_1_diff1 value: 55.1067 - type: nauc_mrr_at_3_max value: 49.1124 - type: nauc_mrr_at_3_std value: -5.0685 - type: nauc_mrr_at_3_diff1 value: 51.1787 - type: nauc_mrr_at_5_max value: 48.5671 - type: nauc_mrr_at_5_std value: -4.6053999999999995 - type: nauc_mrr_at_5_diff1 value: 50.688599999999994 - type: nauc_mrr_at_10_max value: 49.2018 - type: nauc_mrr_at_10_std value: -3.8524000000000003 - type: nauc_mrr_at_10_diff1 value: 50.4746 - type: nauc_mrr_at_20_max value: 49.2589 - type: nauc_mrr_at_20_std value: -3.5479 - type: nauc_mrr_at_20_diff1 value: 50.4304 - type: nauc_mrr_at_100_max value: 49.3016 - type: nauc_mrr_at_100_std value: -3.5770999999999997 - type: nauc_mrr_at_100_diff1 value: 50.6172 - type: nauc_mrr_at_1000_max value: 49.2911 - type: nauc_mrr_at_1000_std value: -3.6117999999999997 - type: nauc_mrr_at_1000_diff1 value: 50.6268 - type: main_score value: 43.958999999999996 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: ndcg_at_1 value: 85.955 - type: ndcg_at_3 value: 68.83 - type: ndcg_at_5 value: 70.894 - type: ndcg_at_10 value: 72.399 - type: ndcg_at_20 value: 73.328 - type: ndcg_at_100 value: 74.765 - type: ndcg_at_1000 value: 75.87899999999999 - type: map_at_1 value: 42.978 - type: map_at_3 value: 61.568 - type: map_at_5 value: 63.241 - type: map_at_10 value: 64.18199999999999 - type: map_at_20 value: 64.562 - type: map_at_100 value: 64.865 - type: map_at_1000 value: 64.922 - type: recall_at_1 value: 42.978 - type: recall_at_3 value: 64.801 - type: recall_at_5 value: 68.866 - type: recall_at_10 value: 72.627 - type: recall_at_20 value: 75.625 - type: recall_at_100 value: 81.951 - type: recall_at_1000 value: 89.37899999999999 - type: precision_at_1 value: 85.955 - type: precision_at_3 value: 43.201 - type: precision_at_5 value: 27.546 - type: precision_at_10 value: 14.524999999999999 - type: precision_at_20 value: 7.562 - type: precision_at_100 value: 1.6389999999999998 - type: precision_at_1000 value: 0.179 - type: mrr_at_1 value: 85.9554 - type: mrr_at_3 value: 89.2753 - type: mrr_at_5 value: 89.6838 - type: mrr_at_10 value: 89.8559 - type: mrr_at_20 value: 89.92569999999999 - type: mrr_at_100 value: 89.96600000000001 - type: mrr_at_1000 value: 89.97070000000001 - type: nauc_ndcg_at_1_max value: 57.1837 - type: nauc_ndcg_at_1_std value: -4.2725 - type: nauc_ndcg_at_1_diff1 value: 74.8832 - type: nauc_ndcg_at_3_max value: 13.953399999999998 - type: nauc_ndcg_at_3_std value: 0.9547 - type: nauc_ndcg_at_3_diff1 value: 4.6952 - type: nauc_ndcg_at_5_max value: 12.1892 - type: nauc_ndcg_at_5_std value: 1.7878 - type: nauc_ndcg_at_5_diff1 value: 2.1255 - type: nauc_ndcg_at_10_max value: 11.4909 - type: nauc_ndcg_at_10_std value: 2.9917 - type: nauc_ndcg_at_10_diff1 value: 1.111 - type: nauc_ndcg_at_20_max value: 11.183800000000002 - type: nauc_ndcg_at_20_std value: 3.8205999999999998 - type: nauc_ndcg_at_20_diff1 value: 0.5191 - type: nauc_ndcg_at_100_max value: 11.4582 - type: nauc_ndcg_at_100_std value: 5.2234 - type: nauc_ndcg_at_100_diff1 value: 0.7051 - type: nauc_ndcg_at_1000_max value: 11.8891 - type: nauc_ndcg_at_1000_std value: 5.0018 - type: nauc_ndcg_at_1000_diff1 value: 1.3516 - type: nauc_map_at_1_max value: 57.1837 - type: nauc_map_at_1_std value: -4.2725 - type: nauc_map_at_1_diff1 value: 74.8832 - type: nauc_map_at_3_max value: 8.7588 - type: nauc_map_at_3_std value: 0.8586 - type: nauc_map_at_3_diff1 value: -2.1179 - type: nauc_map_at_5_max value: 7.8513 - type: nauc_map_at_5_std value: 1.4206999999999999 - type: nauc_map_at_5_diff1 value: -3.5381000000000005 - type: nauc_map_at_10_max value: 7.603999999999999 - type: nauc_map_at_10_std value: 2.0785 - type: nauc_map_at_10_diff1 value: -3.9354 - type: nauc_map_at_20_max value: 7.5393 - type: nauc_map_at_20_std value: 2.3233 - type: nauc_map_at_20_diff1 value: -4.0794999999999995 - type: nauc_map_at_100_max value: 7.593500000000001 - type: nauc_map_at_100_std value: 2.5528 - type: nauc_map_at_100_diff1 value: -4.0459000000000005 - type: nauc_map_at_1000_max value: 7.6116 - type: nauc_map_at_1000_std value: 2.5475000000000003 - type: nauc_map_at_1000_diff1 value: -4.0208 - type: nauc_recall_at_1_max value: 57.1837 - type: nauc_recall_at_1_std value: -4.2725 - type: nauc_recall_at_1_diff1 value: 74.8832 - type: nauc_recall_at_3_max value: 5.1265 - type: nauc_recall_at_3_std value: 2.3453999999999997 - type: nauc_recall_at_3_diff1 value: -9.5534 - type: nauc_recall_at_5_max value: 1.3988 - type: nauc_recall_at_5_std value: 3.8738 - type: nauc_recall_at_5_diff1 value: -14.770900000000001 - type: nauc_recall_at_10_max value: -1.1159999999999999 - type: nauc_recall_at_10_std value: 6.7406999999999995 - type: nauc_recall_at_10_diff1 value: -18.08 - type: nauc_recall_at_20_max value: -2.9072 - type: nauc_recall_at_20_std value: 9.6567 - type: nauc_recall_at_20_diff1 value: -21.197 - type: nauc_recall_at_100_max value: -4.4864 - type: nauc_recall_at_100_std value: 17.8761 - type: nauc_recall_at_100_diff1 value: -24.5792 - type: nauc_recall_at_1000_max value: -7.9052 - type: nauc_recall_at_1000_std value: 21.7637 - type: nauc_recall_at_1000_diff1 value: -30.4447 - type: nauc_precision_at_1_max value: 57.1837 - type: nauc_precision_at_1_std value: -4.2725 - type: nauc_precision_at_1_diff1 value: 74.8832 - type: nauc_precision_at_3_max value: 5.1265 - type: nauc_precision_at_3_std value: 2.3453999999999997 - type: nauc_precision_at_3_diff1 value: -9.5534 - type: nauc_precision_at_5_max value: 1.3988 - type: nauc_precision_at_5_std value: 3.8738 - type: nauc_precision_at_5_diff1 value: -14.770900000000001 - type: nauc_precision_at_10_max value: -1.1159999999999999 - type: nauc_precision_at_10_std value: 6.7406999999999995 - type: nauc_precision_at_10_diff1 value: -18.08 - type: nauc_precision_at_20_max value: -2.9072 - type: nauc_precision_at_20_std value: 9.6567 - type: nauc_precision_at_20_diff1 value: -21.197 - type: nauc_precision_at_100_max value: -4.4864 - type: nauc_precision_at_100_std value: 17.8761 - type: nauc_precision_at_100_diff1 value: -24.5792 - type: nauc_precision_at_1000_max value: -7.9052 - type: nauc_precision_at_1000_std value: 21.7637 - type: nauc_precision_at_1000_diff1 value: -30.4447 - type: nauc_mrr_at_1_max value: 57.1837 - type: nauc_mrr_at_1_std value: -4.2725 - type: nauc_mrr_at_1_diff1 value: 74.8832 - type: nauc_mrr_at_3_max value: 60.68019999999999 - type: nauc_mrr_at_3_std value: -2.5041 - type: nauc_mrr_at_3_diff1 value: 74.2505 - type: nauc_mrr_at_5_max value: 60.3928 - type: nauc_mrr_at_5_std value: -2.2979 - type: nauc_mrr_at_5_diff1 value: 74.27470000000001 - type: nauc_mrr_at_10_max value: 60.336800000000004 - type: nauc_mrr_at_10_std value: -2.308 - type: nauc_mrr_at_10_diff1 value: 74.4135 - type: nauc_mrr_at_20_max value: 60.317299999999996 - type: nauc_mrr_at_20_std value: -2.1652 - type: nauc_mrr_at_20_diff1 value: 74.3945 - type: nauc_mrr_at_100_max value: 60.283 - type: nauc_mrr_at_100_std value: -2.154 - type: nauc_mrr_at_100_diff1 value: 74.38040000000001 - type: nauc_mrr_at_1000_max value: 60.272099999999995 - type: nauc_mrr_at_1000_std value: -2.1783 - type: nauc_mrr_at_1000_diff1 value: 74.378 - type: main_score value: 72.399 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 69.0916 - type: f1 value: 68.9866 - type: f1_weighted value: 68.9866 - type: ap value: 63.3215 - type: ap_weighted value: 63.3215 - type: main_score value: 69.0916 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: ndcg_at_1 value: 24.914 - type: ndcg_at_3 value: 36.479 - type: ndcg_at_5 value: 40.288000000000004 - type: ndcg_at_10 value: 44.043 - type: ndcg_at_20 value: 46.838 - type: ndcg_at_100 value: 49.626999999999995 - type: ndcg_at_1000 value: 50.665000000000006 - type: map_at_1 value: 24.223 - type: map_at_3 value: 33.348 - type: map_at_5 value: 35.494 - type: map_at_10 value: 37.077 - type: map_at_20 value: 37.867 - type: map_at_100 value: 38.279999999999994 - type: map_at_1000 value: 38.323 - type: recall_at_1 value: 24.223 - type: recall_at_3 value: 44.9 - type: recall_at_5 value: 54.010999999999996 - type: recall_at_10 value: 65.399 - type: recall_at_20 value: 76.248 - type: recall_at_100 value: 90.78 - type: recall_at_1000 value: 98.619 - type: precision_at_1 value: 24.914 - type: precision_at_3 value: 15.501000000000001 - type: precision_at_5 value: 11.238 - type: precision_at_10 value: 6.837 - type: precision_at_20 value: 3.9960000000000004 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.105 - type: mrr_at_1 value: 24.914 - type: mrr_at_3 value: 34.0043 - type: mrr_at_5 value: 36.1089 - type: mrr_at_10 value: 37.6521 - type: mrr_at_20 value: 38.4106 - type: mrr_at_100 value: 38.7938 - type: mrr_at_1000 value: 38.8316 - type: nauc_ndcg_at_1_max value: 3.9297 - type: nauc_ndcg_at_1_std value: -22.016 - type: nauc_ndcg_at_1_diff1 value: 39.7204 - type: nauc_ndcg_at_3_max value: 4.7672 - type: nauc_ndcg_at_3_std value: -27.0359 - type: nauc_ndcg_at_3_diff1 value: 34.139 - type: nauc_ndcg_at_5_max value: 5.1921 - type: nauc_ndcg_at_5_std value: -28.6425 - type: nauc_ndcg_at_5_diff1 value: 33.671800000000005 - type: nauc_ndcg_at_10_max value: 5.3812999999999995 - type: nauc_ndcg_at_10_std value: -28.7602 - type: nauc_ndcg_at_10_diff1 value: 33.5856 - type: nauc_ndcg_at_20_max value: 5.7039 - type: nauc_ndcg_at_20_std value: -27.578000000000003 - type: nauc_ndcg_at_20_diff1 value: 33.9639 - type: nauc_ndcg_at_100_max value: 5.9491000000000005 - type: nauc_ndcg_at_100_std value: -25.562800000000003 - type: nauc_ndcg_at_100_diff1 value: 34.5177 - type: nauc_ndcg_at_1000_max value: 5.7685 - type: nauc_ndcg_at_1000_std value: -25.796400000000002 - type: nauc_ndcg_at_1000_diff1 value: 34.617 - type: nauc_map_at_1_max value: 3.8164 - type: nauc_map_at_1_std value: -22.1345 - type: nauc_map_at_1_diff1 value: 39.7682 - type: nauc_map_at_3_max value: 4.5438 - type: nauc_map_at_3_std value: -25.990299999999998 - type: nauc_map_at_3_diff1 value: 35.4211 - type: nauc_map_at_5_max value: 4.7521 - type: nauc_map_at_5_std value: -26.9187 - type: nauc_map_at_5_diff1 value: 35.1711 - type: nauc_map_at_10_max value: 4.8275 - type: nauc_map_at_10_std value: -26.962799999999998 - type: nauc_map_at_10_diff1 value: 35.1875 - type: nauc_map_at_20_max value: 4.9247 - type: nauc_map_at_20_std value: -26.622899999999998 - type: nauc_map_at_20_diff1 value: 35.308499999999995 - type: nauc_map_at_100_max value: 4.9704 - type: nauc_map_at_100_std value: -26.3156 - type: nauc_map_at_100_diff1 value: 35.3955 - type: nauc_map_at_1000_max value: 4.9692 - type: nauc_map_at_1000_std value: -26.3098 - type: nauc_map_at_1000_diff1 value: 35.3987 - type: nauc_recall_at_1_max value: 3.8164 - type: nauc_recall_at_1_std value: -22.1345 - type: nauc_recall_at_1_diff1 value: 39.7682 - type: nauc_recall_at_3_max value: 5.2443 - type: nauc_recall_at_3_std value: -29.965000000000003 - type: nauc_recall_at_3_diff1 value: 30.303 - type: nauc_recall_at_5_max value: 6.164499999999999 - type: nauc_recall_at_5_std value: -33.9534 - type: nauc_recall_at_5_diff1 value: 28.9101 - type: nauc_recall_at_10_max value: 6.8656999999999995 - type: nauc_recall_at_10_std value: -35.2711 - type: nauc_recall_at_10_diff1 value: 27.785500000000003 - type: nauc_recall_at_20_max value: 8.7891 - type: nauc_recall_at_20_std value: -31.276 - type: nauc_recall_at_20_diff1 value: 28.048099999999998 - type: nauc_recall_at_100_max value: 15.3546 - type: nauc_recall_at_100_std value: -7.2786 - type: nauc_recall_at_100_diff1 value: 29.0868 - type: nauc_recall_at_1000_max value: 33.858 - type: nauc_recall_at_1000_std value: 42.2189 - type: nauc_recall_at_1000_diff1 value: 18.9862 - type: nauc_precision_at_1_max value: 3.9297 - type: nauc_precision_at_1_std value: -22.016 - type: nauc_precision_at_1_diff1 value: 39.7204 - type: nauc_precision_at_3_max value: 5.1912 - type: nauc_precision_at_3_std value: -29.697000000000003 - type: nauc_precision_at_3_diff1 value: 30.089199999999998 - type: nauc_precision_at_5_max value: 6.311400000000001 - type: nauc_precision_at_5_std value: -32.9724 - type: nauc_precision_at_5_diff1 value: 28.0676 - type: nauc_precision_at_10_max value: 6.869400000000001 - type: nauc_precision_at_10_std value: -32.4788 - type: nauc_precision_at_10_diff1 value: 25.6897 - type: nauc_precision_at_20_max value: 9.206 - type: nauc_precision_at_20_std value: -25.3222 - type: nauc_precision_at_20_diff1 value: 23.799500000000002 - type: nauc_precision_at_100_max value: 13.8625 - type: nauc_precision_at_100_std value: 3.3068 - type: nauc_precision_at_100_diff1 value: 14.3806 - type: nauc_precision_at_1000_max value: 11.8588 - type: nauc_precision_at_1000_std value: 17.6676 - type: nauc_precision_at_1000_diff1 value: -3.8201 - type: nauc_mrr_at_1_max value: 3.9297 - type: nauc_mrr_at_1_std value: -22.016 - type: nauc_mrr_at_1_diff1 value: 39.7204 - type: nauc_mrr_at_3_max value: 4.6479 - type: nauc_mrr_at_3_std value: -25.644699999999997 - type: nauc_mrr_at_3_diff1 value: 35.478 - type: nauc_mrr_at_5_max value: 4.986 - type: nauc_mrr_at_5_std value: -26.4206 - type: nauc_mrr_at_5_diff1 value: 35.285 - type: nauc_mrr_at_10_max value: 5.0845 - type: nauc_mrr_at_10_std value: -26.411800000000003 - type: nauc_mrr_at_10_diff1 value: 35.2365 - type: nauc_mrr_at_20_max value: 5.1531 - type: nauc_mrr_at_20_std value: -26.0735 - type: nauc_mrr_at_20_diff1 value: 35.3495 - type: nauc_mrr_at_100_max value: 5.1672 - type: nauc_mrr_at_100_std value: -25.8254 - type: nauc_mrr_at_100_diff1 value: 35.4396 - type: nauc_mrr_at_1000_max value: 5.1629000000000005 - type: nauc_mrr_at_1000_std value: -25.8233 - type: nauc_mrr_at_1000_diff1 value: 35.4444 - type: main_score value: 44.043 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.08619999999999 - type: f1 value: 91.8074 - type: f1_weighted value: 92.0765 - type: main_score value: 92.08619999999999 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 65.2668 - type: f1 value: 44.499 - type: f1_weighted value: 67.9193 - type: main_score value: 65.2668 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 68.0128 - type: f1 value: 64.4011 - type: f1_weighted value: 67.4705 - type: main_score value: 68.0128 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 72.67320000000001 - type: f1 value: 71.7881 - type: f1_weighted value: 72.9092 - type: main_score value: 72.67320000000001 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.5764 - type: v_measure_std value: 1.3743999999999998 - type: main_score value: 31.5764 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.006999999999998 - type: v_measure_std value: 1.4235 - type: main_score value: 28.006999999999998 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: map value: 30.3039 - type: mrr value: 31.168699999999998 - type: nAUC_map_max value: -25.113200000000003 - type: nAUC_map_std value: -8.5652 - type: nAUC_map_diff1 value: 12.437199999999999 - type: nAUC_mrr_max value: -19.5255 - type: nAUC_mrr_std value: -6.1112 - type: nAUC_mrr_diff1 value: 12.1585 - type: main_score value: 30.3039 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: ndcg_at_1 value: 45.046 - type: ndcg_at_3 value: 41.975 - type: ndcg_at_5 value: 39.421 - type: ndcg_at_10 value: 35.879 - type: ndcg_at_20 value: 32.987 - type: ndcg_at_100 value: 32.107 - type: ndcg_at_1000 value: 40.67 - type: map_at_1 value: 5.854 - type: map_at_3 value: 9.991999999999999 - type: map_at_5 value: 11.405999999999999 - type: map_at_10 value: 13.272 - type: map_at_20 value: 14.604000000000001 - type: map_at_100 value: 16.521 - type: map_at_1000 value: 17.925 - type: recall_at_1 value: 5.854 - type: recall_at_3 value: 11.036999999999999 - type: recall_at_5 value: 13.391 - type: recall_at_10 value: 16.841 - type: recall_at_20 value: 20.522000000000002 - type: recall_at_100 value: 31.733 - type: recall_at_1000 value: 63.525 - type: precision_at_1 value: 46.749 - type: precision_at_3 value: 39.525 - type: precision_at_5 value: 34.056 - type: precision_at_10 value: 26.656000000000002 - type: precision_at_20 value: 19.211 - type: precision_at_100 value: 8.099 - type: precision_at_1000 value: 2.061 - type: mrr_at_1 value: 47.0588 - type: mrr_at_3 value: 53.9732 - type: mrr_at_5 value: 55.443799999999996 - type: mrr_at_10 value: 56.04599999999999 - type: mrr_at_20 value: 56.37799999999999 - type: mrr_at_100 value: 56.6504 - type: mrr_at_1000 value: 56.6866 - type: nauc_ndcg_at_1_max value: 43.5884 - type: nauc_ndcg_at_1_std value: 22.4376 - type: nauc_ndcg_at_1_diff1 value: 34.7846 - type: nauc_ndcg_at_3_max value: 44.7961 - type: nauc_ndcg_at_3_std value: 24.4811 - type: nauc_ndcg_at_3_diff1 value: 25.5747 - type: nauc_ndcg_at_5_max value: 43.5994 - type: nauc_ndcg_at_5_std value: 24.827199999999998 - type: nauc_ndcg_at_5_diff1 value: 23.8874 - type: nauc_ndcg_at_10_max value: 43.126999999999995 - type: nauc_ndcg_at_10_std value: 27.5053 - type: nauc_ndcg_at_10_diff1 value: 23.4832 - type: nauc_ndcg_at_20_max value: 43.1243 - type: nauc_ndcg_at_20_std value: 27.3455 - type: nauc_ndcg_at_20_diff1 value: 23.8534 - type: nauc_ndcg_at_100_max value: 46.5936 - type: nauc_ndcg_at_100_std value: 28.0084 - type: nauc_ndcg_at_100_diff1 value: 29.630200000000002 - type: nauc_ndcg_at_1000_max value: 51.7379 - type: nauc_ndcg_at_1000_std value: 33.2077 - type: nauc_ndcg_at_1000_diff1 value: 30.1522 - type: nauc_map_at_1_max value: 17.2703 - type: nauc_map_at_1_std value: -14.6241 - type: nauc_map_at_1_diff1 value: 46.9767 - type: nauc_map_at_3_max value: 25.562600000000003 - type: nauc_map_at_3_std value: -10.1565 - type: nauc_map_at_3_diff1 value: 39.347500000000004 - type: nauc_map_at_5_max value: 28.397299999999998 - type: nauc_map_at_5_std value: -7.0083 - type: nauc_map_at_5_diff1 value: 37.4216 - type: nauc_map_at_10_max value: 31.639400000000002 - type: nauc_map_at_10_std value: -1.9 - type: nauc_map_at_10_diff1 value: 35.9293 - type: nauc_map_at_20_max value: 34.342800000000004 - type: nauc_map_at_20_std value: 2.6614 - type: nauc_map_at_20_diff1 value: 34.7985 - type: nauc_map_at_100_max value: 37.046600000000005 - type: nauc_map_at_100_std value: 9.2072 - type: nauc_map_at_100_diff1 value: 33.2764 - type: nauc_map_at_1000_max value: 37.6597 - type: nauc_map_at_1000_std value: 12.6768 - type: nauc_map_at_1000_diff1 value: 31.773699999999998 - type: nauc_recall_at_1_max value: 17.2703 - type: nauc_recall_at_1_std value: -14.6241 - type: nauc_recall_at_1_diff1 value: 46.9767 - type: nauc_recall_at_3_max value: 24.5473 - type: nauc_recall_at_3_std value: -9.7412 - type: nauc_recall_at_3_diff1 value: 37.8539 - type: nauc_recall_at_5_max value: 27.249200000000002 - type: nauc_recall_at_5_std value: -5.823799999999999 - type: nauc_recall_at_5_diff1 value: 34.06 - type: nauc_recall_at_10_max value: 29.1217 - type: nauc_recall_at_10_std value: -0.21159999999999998 - type: nauc_recall_at_10_diff1 value: 32.3914 - type: nauc_recall_at_20_max value: 31.142999999999997 - type: nauc_recall_at_20_std value: 4.3805 - type: nauc_recall_at_20_diff1 value: 28.852899999999998 - type: nauc_recall_at_100_max value: 32.8751 - type: nauc_recall_at_100_std value: 16.0658 - type: nauc_recall_at_100_diff1 value: 24.8181 - type: nauc_recall_at_1000_max value: 24.5638 - type: nauc_recall_at_1000_std value: 20.822 - type: nauc_recall_at_1000_diff1 value: 13.123099999999999 - type: nauc_precision_at_1_max value: 44.714999999999996 - type: nauc_precision_at_1_std value: 23.2541 - type: nauc_precision_at_1_diff1 value: 33.9092 - type: nauc_precision_at_3_max value: 44.935199999999995 - type: nauc_precision_at_3_std value: 29.0989 - type: nauc_precision_at_3_diff1 value: 14.9816 - type: nauc_precision_at_5_max value: 40.7582 - type: nauc_precision_at_5_std value: 31.049 - type: nauc_precision_at_5_diff1 value: 9.7826 - type: nauc_precision_at_10_max value: 37.8974 - type: nauc_precision_at_10_std value: 38.9576 - type: nauc_precision_at_10_diff1 value: 4.3217 - type: nauc_precision_at_20_max value: 33.254099999999994 - type: nauc_precision_at_20_std value: 42.3527 - type: nauc_precision_at_20_diff1 value: -1.8002 - type: nauc_precision_at_100_max value: 20.6042 - type: nauc_precision_at_100_std value: 46.0314 - type: nauc_precision_at_100_diff1 value: -10.098 - type: nauc_precision_at_1000_max value: 6.8368 - type: nauc_precision_at_1000_std value: 36.4345 - type: nauc_precision_at_1000_diff1 value: -16.1738 - type: nauc_mrr_at_1_max value: 44.1317 - type: nauc_mrr_at_1_std value: 22.794900000000002 - type: nauc_mrr_at_1_diff1 value: 33.071600000000004 - type: nauc_mrr_at_3_max value: 49.8647 - type: nauc_mrr_at_3_std value: 28.821600000000004 - type: nauc_mrr_at_3_diff1 value: 31.1845 - type: nauc_mrr_at_5_max value: 50.3448 - type: nauc_mrr_at_5_std value: 28.721799999999998 - type: nauc_mrr_at_5_diff1 value: 31.6681 - type: nauc_mrr_at_10_max value: 50.601 - type: nauc_mrr_at_10_std value: 29.461199999999998 - type: nauc_mrr_at_10_diff1 value: 31.5519 - type: nauc_mrr_at_20_max value: 50.7861 - type: nauc_mrr_at_20_std value: 29.615000000000002 - type: nauc_mrr_at_20_diff1 value: 31.535200000000003 - type: nauc_mrr_at_100_max value: 50.7764 - type: nauc_mrr_at_100_std value: 29.772199999999998 - type: nauc_mrr_at_100_diff1 value: 31.5569 - type: nauc_mrr_at_1000_max value: 50.75150000000001 - type: nauc_mrr_at_1000_std value: 29.747600000000002 - type: nauc_mrr_at_1000_diff1 value: 31.5457 - type: main_score value: 35.879 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: ndcg_at_1 value: 45.394 - type: ndcg_at_3 value: 57.17 - type: ndcg_at_5 value: 61.402 - type: ndcg_at_10 value: 64.59899999999999 - type: ndcg_at_20 value: 66.24600000000001 - type: ndcg_at_100 value: 67.522 - type: ndcg_at_1000 value: 67.849 - type: map_at_1 value: 40.6 - type: map_at_3 value: 53.055 - type: map_at_5 value: 55.67100000000001 - type: map_at_10 value: 57.160999999999994 - type: map_at_20 value: 57.701 - type: map_at_100 value: 57.926 - type: map_at_1000 value: 57.940999999999995 - type: recall_at_1 value: 40.6 - type: recall_at_3 value: 65.766 - type: recall_at_5 value: 75.466 - type: recall_at_10 value: 84.654 - type: recall_at_20 value: 90.60000000000001 - type: recall_at_100 value: 96.854 - type: recall_at_1000 value: 99.232 - type: precision_at_1 value: 45.394 - type: precision_at_3 value: 25.521 - type: precision_at_5 value: 17.781 - type: precision_at_10 value: 10.098 - type: precision_at_20 value: 5.4559999999999995 - type: precision_at_100 value: 1.176 - type: precision_at_1000 value: 0.121 - type: mrr_at_1 value: 45.394 - type: mrr_at_3 value: 56.3104 - type: mrr_at_5 value: 58.36130000000001 - type: mrr_at_10 value: 59.5005 - type: mrr_at_20 value: 59.866299999999995 - type: mrr_at_100 value: 59.9998 - type: mrr_at_1000 value: 60.0097 - type: nauc_ndcg_at_1_max value: 26.4568 - type: nauc_ndcg_at_1_std value: -5.4489 - type: nauc_ndcg_at_1_diff1 value: 39.8496 - type: nauc_ndcg_at_3_max value: 31.1415 - type: nauc_ndcg_at_3_std value: -7.0855 - type: nauc_ndcg_at_3_diff1 value: 36.4212 - type: nauc_ndcg_at_5_max value: 32.819199999999995 - type: nauc_ndcg_at_5_std value: -5.775 - type: nauc_ndcg_at_5_diff1 value: 35.7043 - type: nauc_ndcg_at_10_max value: 33.0741 - type: nauc_ndcg_at_10_std value: -4.5213 - type: nauc_ndcg_at_10_diff1 value: 36.19 - type: nauc_ndcg_at_20_max value: 33.266400000000004 - type: nauc_ndcg_at_20_std value: -3.5874 - type: nauc_ndcg_at_20_diff1 value: 36.2496 - type: nauc_ndcg_at_100_max value: 32.7922 - type: nauc_ndcg_at_100_std value: -3.2738000000000005 - type: nauc_ndcg_at_100_diff1 value: 36.5649 - type: nauc_ndcg_at_1000_max value: 32.237500000000004 - type: nauc_ndcg_at_1000_std value: -3.9578 - type: nauc_ndcg_at_1000_diff1 value: 36.717499999999994 - type: nauc_map_at_1_max value: 24.3328 - type: nauc_map_at_1_std value: -7.889799999999999 - type: nauc_map_at_1_diff1 value: 40.0251 - type: nauc_map_at_3_max value: 29.6774 - type: nauc_map_at_3_std value: -7.5739 - type: nauc_map_at_3_diff1 value: 37.459900000000005 - type: nauc_map_at_5_max value: 30.6947 - type: nauc_map_at_5_std value: -6.7940000000000005 - type: nauc_map_at_5_diff1 value: 37.0909 - type: nauc_map_at_10_max value: 30.723899999999997 - type: nauc_map_at_10_std value: -6.2581999999999995 - type: nauc_map_at_10_diff1 value: 37.1775 - type: nauc_map_at_20_max value: 30.7861 - type: nauc_map_at_20_std value: -5.9957 - type: nauc_map_at_20_diff1 value: 37.209900000000005 - type: nauc_map_at_100_max value: 30.7336 - type: nauc_map_at_100_std value: -5.909 - type: nauc_map_at_100_diff1 value: 37.2446 - type: nauc_map_at_1000_max value: 30.7142 - type: nauc_map_at_1000_std value: -5.9306 - type: nauc_map_at_1000_diff1 value: 37.25 - type: nauc_recall_at_1_max value: 24.3328 - type: nauc_recall_at_1_std value: -7.889799999999999 - type: nauc_recall_at_1_diff1 value: 40.0251 - type: nauc_recall_at_3_max value: 34.2412 - type: nauc_recall_at_3_std value: -7.5245999999999995 - type: nauc_recall_at_3_diff1 value: 32.7498 - type: nauc_recall_at_5_max value: 39.6798 - type: nauc_recall_at_5_std value: -4.1992 - type: nauc_recall_at_5_diff1 value: 29.5385 - type: nauc_recall_at_10_max value: 44.5052 - type: nauc_recall_at_10_std value: 2.4045 - type: nauc_recall_at_10_diff1 value: 30.051499999999997 - type: nauc_recall_at_20_max value: 52.8161 - type: nauc_recall_at_20_std value: 14.1647 - type: nauc_recall_at_20_diff1 value: 27.7847 - type: nauc_recall_at_100_max value: 74.644 - type: nauc_recall_at_100_std value: 54.927099999999996 - type: nauc_recall_at_100_diff1 value: 27.507900000000003 - type: nauc_recall_at_1000_max value: 85.1144 - type: nauc_recall_at_1000_std value: 80.0515 - type: nauc_recall_at_1000_diff1 value: 37.028299999999994 - type: nauc_precision_at_1_max value: 26.4568 - type: nauc_precision_at_1_std value: -5.4489 - type: nauc_precision_at_1_diff1 value: 39.8496 - type: nauc_precision_at_3_max value: 30.0271 - type: nauc_precision_at_3_std value: -0.8751 - type: nauc_precision_at_3_diff1 value: 21.8662 - type: nauc_precision_at_5_max value: 28.4063 - type: nauc_precision_at_5_std value: 4.1253 - type: nauc_precision_at_5_diff1 value: 13.1855 - type: nauc_precision_at_10_max value: 22.6524 - type: nauc_precision_at_10_std value: 10.340399999999999 - type: nauc_precision_at_10_diff1 value: 5.4243 - type: nauc_precision_at_20_max value: 18.4481 - type: nauc_precision_at_20_std value: 16.0409 - type: nauc_precision_at_20_diff1 value: -0.9561 - type: nauc_precision_at_100_max value: 9.361600000000001 - type: nauc_precision_at_100_std value: 19.1145 - type: nauc_precision_at_100_diff1 value: -8.0049 - type: nauc_precision_at_1000_max value: 3.0707 - type: nauc_precision_at_1000_std value: 15.259900000000002 - type: nauc_precision_at_1000_diff1 value: -10.190000000000001 - type: nauc_mrr_at_1_max value: 26.4568 - type: nauc_mrr_at_1_std value: -5.4489 - type: nauc_mrr_at_1_diff1 value: 39.8496 - type: nauc_mrr_at_3_max value: 30.262299999999996 - type: nauc_mrr_at_3_std value: -5.428100000000001 - type: nauc_mrr_at_3_diff1 value: 36.878899999999994 - type: nauc_mrr_at_5_max value: 30.813000000000002 - type: nauc_mrr_at_5_std value: -4.7534 - type: nauc_mrr_at_5_diff1 value: 36.5968 - type: nauc_mrr_at_10_max value: 30.857499999999998 - type: nauc_mrr_at_10_std value: -4.4249 - type: nauc_mrr_at_10_diff1 value: 36.973 - type: nauc_mrr_at_20_max value: 30.8228 - type: nauc_mrr_at_20_std value: -4.3275 - type: nauc_mrr_at_20_diff1 value: 37.0266 - type: nauc_mrr_at_100_max value: 30.7442 - type: nauc_mrr_at_100_std value: -4.3408 - type: nauc_mrr_at_100_diff1 value: 37.060500000000005 - type: nauc_mrr_at_1000_max value: 30.7286 - type: nauc_mrr_at_1000_std value: -4.36 - type: nauc_mrr_at_1000_diff1 value: 37.0647 - type: main_score value: 64.59899999999999 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: ndcg_at_1 value: 82.01 - type: ndcg_at_3 value: 86.035 - type: ndcg_at_5 value: 87.628 - type: ndcg_at_10 value: 88.735 - type: ndcg_at_20 value: 89.375 - type: ndcg_at_100 value: 89.89 - type: ndcg_at_1000 value: 90.001 - type: map_at_1 value: 71.126 - type: map_at_3 value: 82.14399999999999 - type: map_at_5 value: 84.03500000000001 - type: map_at_10 value: 85.064 - type: map_at_20 value: 85.469 - type: map_at_100 value: 85.673 - type: map_at_1000 value: 85.69099999999999 - type: recall_at_1 value: 71.126 - type: recall_at_3 value: 87.76 - type: recall_at_5 value: 92.286 - type: recall_at_10 value: 95.56 - type: recall_at_20 value: 97.655 - type: recall_at_100 value: 99.497 - type: recall_at_1000 value: 99.979 - type: precision_at_1 value: 82.01 - type: precision_at_3 value: 37.653 - type: precision_at_5 value: 24.779999999999998 - type: precision_at_10 value: 13.441 - type: precision_at_20 value: 7.114 - type: precision_at_100 value: 1.524 - type: precision_at_1000 value: 0.157 - type: mrr_at_1 value: 81.96 - type: mrr_at_3 value: 87.105 - type: mrr_at_5 value: 87.779 - type: mrr_at_10 value: 88.02680000000001 - type: mrr_at_20 value: 88.10470000000001 - type: mrr_at_100 value: 88.126 - type: mrr_at_1000 value: 88.127 - type: nauc_ndcg_at_1_max value: 37.866499999999995 - type: nauc_ndcg_at_1_std value: -40.9317 - type: nauc_ndcg_at_1_diff1 value: 78.09089999999999 - type: nauc_ndcg_at_3_max value: 35.4917 - type: nauc_ndcg_at_3_std value: -48.968 - type: nauc_ndcg_at_3_diff1 value: 75.90050000000001 - type: nauc_ndcg_at_5_max value: 35.898799999999994 - type: nauc_ndcg_at_5_std value: -50.5572 - type: nauc_ndcg_at_5_diff1 value: 76.6471 - type: nauc_ndcg_at_10_max value: 36.7786 - type: nauc_ndcg_at_10_std value: -49.6733 - type: nauc_ndcg_at_10_diff1 value: 76.8147 - type: nauc_ndcg_at_20_max value: 37.1374 - type: nauc_ndcg_at_20_std value: -47.9144 - type: nauc_ndcg_at_20_diff1 value: 76.6412 - type: nauc_ndcg_at_100_max value: 37.3452 - type: nauc_ndcg_at_100_std value: -46.0007 - type: nauc_ndcg_at_100_diff1 value: 76.6194 - type: nauc_ndcg_at_1000_max value: 37.4848 - type: nauc_ndcg_at_1000_std value: -45.6578 - type: nauc_ndcg_at_1000_diff1 value: 76.6001 - type: nauc_map_at_1_max value: 26.7109 - type: nauc_map_at_1_std value: -42.9943 - type: nauc_map_at_1_diff1 value: 80.5567 - type: nauc_map_at_3_max value: 32.8491 - type: nauc_map_at_3_std value: -51.64 - type: nauc_map_at_3_diff1 value: 77.29700000000001 - type: nauc_map_at_5_max value: 34.4071 - type: nauc_map_at_5_std value: -51.6503 - type: nauc_map_at_5_diff1 value: 77.28920000000001 - type: nauc_map_at_10_max value: 35.4934 - type: nauc_map_at_10_std value: -50.0995 - type: nauc_map_at_10_diff1 value: 76.9983 - type: nauc_map_at_20_max value: 35.8087 - type: nauc_map_at_20_std value: -48.8069 - type: nauc_map_at_20_diff1 value: 76.8026 - type: nauc_map_at_100_max value: 35.8928 - type: nauc_map_at_100_std value: -48.0561 - type: nauc_map_at_100_diff1 value: 76.7244 - type: nauc_map_at_1000_max value: 35.924499999999995 - type: nauc_map_at_1000_std value: -47.981899999999996 - type: nauc_map_at_1000_diff1 value: 76.7183 - type: nauc_recall_at_1_max value: 26.7109 - type: nauc_recall_at_1_std value: -42.9943 - type: nauc_recall_at_1_diff1 value: 80.5567 - type: nauc_recall_at_3_max value: 29.066300000000002 - type: nauc_recall_at_3_std value: -60.1536 - type: nauc_recall_at_3_diff1 value: 73.32469999999999 - type: nauc_recall_at_5_max value: 30.1025 - type: nauc_recall_at_5_std value: -67.8779 - type: nauc_recall_at_5_diff1 value: 73.13340000000001 - type: nauc_recall_at_10_max value: 33.771699999999996 - type: nauc_recall_at_10_std value: -72.4753 - type: nauc_recall_at_10_diff1 value: 74.168 - type: nauc_recall_at_20_max value: 34.8005 - type: nauc_recall_at_20_std value: -68.60579999999999 - type: nauc_recall_at_20_diff1 value: 72.6083 - type: nauc_recall_at_100_max value: 33.394800000000004 - type: nauc_recall_at_100_std value: -49.7417 - type: nauc_recall_at_100_diff1 value: 73.5857 - type: nauc_recall_at_1000_max value: 48.8898 - type: nauc_recall_at_1000_std value: 54.583800000000004 - type: nauc_recall_at_1000_diff1 value: 64.0609 - type: nauc_precision_at_1_max value: 37.866499999999995 - type: nauc_precision_at_1_std value: -40.9317 - type: nauc_precision_at_1_diff1 value: 78.09089999999999 - type: nauc_precision_at_3_max value: 8.2308 - type: nauc_precision_at_3_std value: 5.0732 - type: nauc_precision_at_3_diff1 value: -19.919 - type: nauc_precision_at_5_max value: 3.0249 - type: nauc_precision_at_5_std value: 16.7897 - type: nauc_precision_at_5_diff1 value: -32.0086 - type: nauc_precision_at_10_max value: -0.5459999999999999 - type: nauc_precision_at_10_std value: 27.1262 - type: nauc_precision_at_10_diff1 value: -38.8076 - type: nauc_precision_at_20_max value: -2.7663 - type: nauc_precision_at_20_std value: 34.1696 - type: nauc_precision_at_20_diff1 value: -42.1088 - type: nauc_precision_at_100_max value: -5.0689 - type: nauc_precision_at_100_std value: 40.023599999999995 - type: nauc_precision_at_100_diff1 value: -43.8996 - type: nauc_precision_at_1000_max value: -5.1495 - type: nauc_precision_at_1000_std value: 41.4194 - type: nauc_precision_at_1000_diff1 value: -44.219 - type: nauc_mrr_at_1_max value: 37.7695 - type: nauc_mrr_at_1_std value: -41.0563 - type: nauc_mrr_at_1_diff1 value: 78.1854 - type: nauc_mrr_at_3_max value: 38.3824 - type: nauc_mrr_at_3_std value: -43.7797 - type: nauc_mrr_at_3_diff1 value: 77.0796 - type: nauc_mrr_at_5_max value: 38.5156 - type: nauc_mrr_at_5_std value: -43.8092 - type: nauc_mrr_at_5_diff1 value: 77.31710000000001 - type: nauc_mrr_at_10_max value: 38.523 - type: nauc_mrr_at_10_std value: -43.5039 - type: nauc_mrr_at_10_diff1 value: 77.375 - type: nauc_mrr_at_20_max value: 38.4635 - type: nauc_mrr_at_20_std value: -43.3619 - type: nauc_mrr_at_20_diff1 value: 77.3565 - type: nauc_mrr_at_100_max value: 38.4502 - type: nauc_mrr_at_100_std value: -43.3315 - type: nauc_mrr_at_100_diff1 value: 77.3584 - type: nauc_mrr_at_1000_max value: 38.449 - type: nauc_mrr_at_1000_std value: -43.3339 - type: nauc_mrr_at_1000_diff1 value: 77.3584 - type: main_score value: 88.735 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 49.1271 - type: v_measure_std value: 4.5517 - type: main_score value: 49.1271 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 61.0626 - type: v_measure_std value: 12.6364 - type: main_score value: 61.0626 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: ndcg_at_1 value: 23.7 - type: ndcg_at_3 value: 19.346 - type: ndcg_at_5 value: 17.044999999999998 - type: ndcg_at_10 value: 20.347 - type: ndcg_at_20 value: 23.237 - type: ndcg_at_100 value: 27.923 - type: ndcg_at_1000 value: 32.891999999999996 - type: map_at_1 value: 4.813 - type: map_at_3 value: 8.688 - type: map_at_5 value: 10.41 - type: map_at_10 value: 12.107999999999999 - type: map_at_20 value: 13.187 - type: map_at_100 value: 14.113000000000001 - type: map_at_1000 value: 14.383000000000001 - type: recall_at_1 value: 4.813 - type: recall_at_3 value: 11.022 - type: recall_at_5 value: 15.242 - type: recall_at_10 value: 21.308 - type: recall_at_20 value: 28.1 - type: recall_at_100 value: 43.335 - type: recall_at_1000 value: 67.672 - type: precision_at_1 value: 23.7 - type: precision_at_3 value: 18.099999999999998 - type: precision_at_5 value: 15.0 - type: precision_at_10 value: 10.48 - type: precision_at_20 value: 6.909999999999999 - type: precision_at_100 value: 2.133 - type: precision_at_1000 value: 0.333 - type: mrr_at_1 value: 23.7 - type: mrr_at_3 value: 31.35 - type: mrr_at_5 value: 33.650000000000006 - type: mrr_at_10 value: 34.9399 - type: mrr_at_20 value: 35.5429 - type: mrr_at_100 value: 35.9342 - type: mrr_at_1000 value: 35.9943 - type: nauc_ndcg_at_1_max value: 20.214499999999997 - type: nauc_ndcg_at_1_std value: 7.2459999999999996 - type: nauc_ndcg_at_1_diff1 value: 26.8353 - type: nauc_ndcg_at_3_max value: 23.3459 - type: nauc_ndcg_at_3_std value: 10.9732 - type: nauc_ndcg_at_3_diff1 value: 21.0618 - type: nauc_ndcg_at_5_max value: 24.5147 - type: nauc_ndcg_at_5_std value: 13.309000000000001 - type: nauc_ndcg_at_5_diff1 value: 20.0975 - type: nauc_ndcg_at_10_max value: 27.0937 - type: nauc_ndcg_at_10_std value: 16.4516 - type: nauc_ndcg_at_10_diff1 value: 19.9585 - type: nauc_ndcg_at_20_max value: 28.503600000000002 - type: nauc_ndcg_at_20_std value: 19.1956 - type: nauc_ndcg_at_20_diff1 value: 19.508200000000002 - type: nauc_ndcg_at_100_max value: 30.7317 - type: nauc_ndcg_at_100_std value: 23.2169 - type: nauc_ndcg_at_100_diff1 value: 19.7085 - type: nauc_ndcg_at_1000_max value: 30.3307 - type: nauc_ndcg_at_1000_std value: 24.7664 - type: nauc_ndcg_at_1000_diff1 value: 19.0469 - type: nauc_map_at_1_max value: 20.3702 - type: nauc_map_at_1_std value: 7.219200000000001 - type: nauc_map_at_1_diff1 value: 27.0193 - type: nauc_map_at_3_max value: 23.0558 - type: nauc_map_at_3_std value: 9.411999999999999 - type: nauc_map_at_3_diff1 value: 21.3691 - type: nauc_map_at_5_max value: 23.763 - type: nauc_map_at_5_std value: 11.228 - type: nauc_map_at_5_diff1 value: 20.4299 - type: nauc_map_at_10_max value: 25.6655 - type: nauc_map_at_10_std value: 14.0481 - type: nauc_map_at_10_diff1 value: 19.7937 - type: nauc_map_at_20_max value: 26.5994 - type: nauc_map_at_20_std value: 15.820400000000001 - type: nauc_map_at_20_diff1 value: 19.476499999999998 - type: nauc_map_at_100_max value: 27.4895 - type: nauc_map_at_100_std value: 17.262 - type: nauc_map_at_100_diff1 value: 19.4661 - type: nauc_map_at_1000_max value: 27.5301 - type: nauc_map_at_1000_std value: 17.4927 - type: nauc_map_at_1000_diff1 value: 19.4691 - type: nauc_recall_at_1_max value: 20.3702 - type: nauc_recall_at_1_std value: 7.219200000000001 - type: nauc_recall_at_1_diff1 value: 27.0193 - type: nauc_recall_at_3_max value: 23.6476 - type: nauc_recall_at_3_std value: 11.9176 - type: nauc_recall_at_3_diff1 value: 18.1657 - type: nauc_recall_at_5_max value: 24.8053 - type: nauc_recall_at_5_std value: 15.5205 - type: nauc_recall_at_5_diff1 value: 16.4924 - type: nauc_recall_at_10_max value: 27.9864 - type: nauc_recall_at_10_std value: 20.1496 - type: nauc_recall_at_10_diff1 value: 16.0154 - type: nauc_recall_at_20_max value: 29.0157 - type: nauc_recall_at_20_std value: 24.374100000000002 - type: nauc_recall_at_20_diff1 value: 14.174800000000001 - type: nauc_recall_at_100_max value: 31.245299999999997 - type: nauc_recall_at_100_std value: 32.161699999999996 - type: nauc_recall_at_100_diff1 value: 12.9714 - type: nauc_recall_at_1000_max value: 25.6486 - type: nauc_recall_at_1000_std value: 37.1526 - type: nauc_recall_at_1000_diff1 value: 6.0907 - type: nauc_precision_at_1_max value: 20.214499999999997 - type: nauc_precision_at_1_std value: 7.2459999999999996 - type: nauc_precision_at_1_diff1 value: 26.8353 - type: nauc_precision_at_3_max value: 23.8245 - type: nauc_precision_at_3_std value: 12.2589 - type: nauc_precision_at_3_diff1 value: 18.192800000000002 - type: nauc_precision_at_5_max value: 25.3681 - type: nauc_precision_at_5_std value: 15.947700000000001 - type: nauc_precision_at_5_diff1 value: 16.6931 - type: nauc_precision_at_10_max value: 28.2682 - type: nauc_precision_at_10_std value: 20.2673 - type: nauc_precision_at_10_diff1 value: 15.8977 - type: nauc_precision_at_20_max value: 29.3989 - type: nauc_precision_at_20_std value: 24.5769 - type: nauc_precision_at_20_diff1 value: 14.1994 - type: nauc_precision_at_100_max value: 31.418000000000003 - type: nauc_precision_at_100_std value: 32.0978 - type: nauc_precision_at_100_diff1 value: 12.768199999999998 - type: nauc_precision_at_1000_max value: 25.501099999999997 - type: nauc_precision_at_1000_std value: 36.477399999999996 - type: nauc_precision_at_1000_diff1 value: 5.5335 - type: nauc_mrr_at_1_max value: 20.214499999999997 - type: nauc_mrr_at_1_std value: 7.2459999999999996 - type: nauc_mrr_at_1_diff1 value: 26.8353 - type: nauc_mrr_at_3_max value: 22.7925 - type: nauc_mrr_at_3_std value: 10.6945 - type: nauc_mrr_at_3_diff1 value: 23.6308 - type: nauc_mrr_at_5_max value: 23.427799999999998 - type: nauc_mrr_at_5_std value: 11.8634 - type: nauc_mrr_at_5_diff1 value: 23.0875 - type: nauc_mrr_at_10_max value: 24.0918 - type: nauc_mrr_at_10_std value: 12.4753 - type: nauc_mrr_at_10_diff1 value: 23.352999999999998 - type: nauc_mrr_at_20_max value: 24.078 - type: nauc_mrr_at_20_std value: 12.5849 - type: nauc_mrr_at_20_diff1 value: 23.3351 - type: nauc_mrr_at_100_max value: 24.0858 - type: nauc_mrr_at_100_std value: 12.5772 - type: nauc_mrr_at_100_diff1 value: 23.4778 - type: nauc_mrr_at_1000_max value: 24.058799999999998 - type: nauc_mrr_at_1000_std value: 12.549 - type: nauc_mrr_at_1000_diff1 value: 23.4713 - type: main_score value: 20.347 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: pearson value: 75.7747 - type: spearman value: 71.3142 - type: cosine_pearson value: 75.7747 - type: cosine_spearman value: 71.3142 - type: manhattan_pearson value: 73.8759 - type: manhattan_spearman value: 71.1003 - type: euclidean_pearson value: 74.088 - type: euclidean_spearman value: 71.3142 - type: main_score value: 71.3142 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: pearson value: 72.5903 - type: spearman value: 70.6581 - type: cosine_pearson value: 72.5903 - type: cosine_spearman value: 70.6581 - type: manhattan_pearson value: 69.2077 - type: manhattan_spearman value: 70.4521 - type: euclidean_pearson value: 69.41720000000001 - type: euclidean_spearman value: 70.6581 - type: main_score value: 70.6581 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: pearson value: 73.1686 - type: spearman value: 77.4225 - type: cosine_pearson value: 73.1686 - type: cosine_spearman value: 77.4225 - type: manhattan_pearson value: 76.2481 - type: manhattan_spearman value: 77.325 - type: euclidean_pearson value: 76.3568 - type: euclidean_spearman value: 77.4225 - type: main_score value: 77.4225 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: pearson value: 74.46340000000001 - type: spearman value: 72.9162 - type: cosine_pearson value: 74.46340000000001 - type: cosine_spearman value: 72.9162 - type: manhattan_pearson value: 73.8079 - type: manhattan_spearman value: 72.8704 - type: euclidean_pearson value: 73.8244 - type: euclidean_spearman value: 72.9162 - type: main_score value: 72.9162 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: pearson value: 80.1161 - type: spearman value: 81.83200000000001 - type: cosine_pearson value: 80.1161 - type: cosine_spearman value: 81.83200000000001 - type: manhattan_pearson value: 81.573 - type: manhattan_spearman value: 81.807 - type: euclidean_pearson value: 81.59490000000001 - type: euclidean_spearman value: 81.83200000000001 - type: main_score value: 81.83200000000001 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: pearson value: 78.8244 - type: spearman value: 81.2262 - type: cosine_pearson value: 78.8244 - type: cosine_spearman value: 81.2262 - type: manhattan_pearson value: 80.6177 - type: manhattan_spearman value: 81.1361 - type: euclidean_pearson value: 80.7347 - type: euclidean_spearman value: 81.2262 - type: main_score value: 81.2262 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 67.9751 - type: spearman value: 68.92099999999999 - type: cosine_pearson value: 67.9751 - type: cosine_spearman value: 68.92099999999999 - type: manhattan_pearson value: 68.9355 - type: manhattan_spearman value: 68.777 - type: euclidean_pearson value: 69.11410000000001 - type: euclidean_spearman value: 68.92099999999999 - type: main_score value: 68.92099999999999 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 72.08449999999999 - type: spearman value: 74.6931 - type: cosine_pearson value: 72.08449999999999 - type: cosine_spearman value: 74.6931 - type: manhattan_pearson value: 73.52 - type: manhattan_spearman value: 74.7097 - type: euclidean_pearson value: 73.62180000000001 - type: euclidean_spearman value: 74.6931 - type: main_score value: 74.6931 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 80.528 - type: spearman value: 84.10459999999999 - type: cosine_pearson value: 80.528 - type: cosine_spearman value: 84.10459999999999 - type: manhattan_pearson value: 83.1537 - type: manhattan_spearman value: 84.0952 - type: euclidean_pearson value: 83.337 - type: euclidean_spearman value: 84.10459999999999 - type: main_score value: 84.10459999999999 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 49.641400000000004 - type: spearman value: 48.9413 - type: cosine_pearson value: 49.641400000000004 - type: cosine_spearman value: 48.9413 - type: manhattan_pearson value: 51.434000000000005 - type: manhattan_spearman value: 49.1595 - type: euclidean_pearson value: 50.867799999999995 - type: euclidean_spearman value: 48.9413 - type: main_score value: 48.9413 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 71.2577 - type: spearman value: 73.82419999999999 - type: cosine_pearson value: 71.2577 - type: cosine_spearman value: 73.82419999999999 - type: manhattan_pearson value: 71.9329 - type: manhattan_spearman value: 73.4651 - type: euclidean_pearson value: 72.2771 - type: euclidean_spearman value: 73.82419999999999 - type: main_score value: 73.82419999999999 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 64.1562 - type: spearman value: 64.8766 - type: cosine_pearson value: 64.1562 - type: cosine_spearman value: 64.8766 - type: manhattan_pearson value: 64.16579999999999 - type: manhattan_spearman value: 64.1931 - type: euclidean_pearson value: 64.6169 - type: euclidean_spearman value: 64.8766 - type: main_score value: 64.8766 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 42.257400000000004 - type: spearman value: 43.2176 - type: cosine_pearson value: 42.257400000000004 - type: cosine_spearman value: 43.2176 - type: manhattan_pearson value: 43.5359 - type: manhattan_spearman value: 42.4143 - type: euclidean_pearson value: 43.6717 - type: euclidean_spearman value: 43.2176 - type: main_score value: 43.2176 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 74.0088 - type: spearman value: 75.8687 - type: cosine_pearson value: 74.0088 - type: cosine_spearman value: 75.8687 - type: manhattan_pearson value: 74.8505 - type: manhattan_spearman value: 75.6101 - type: euclidean_pearson value: 75.1303 - type: euclidean_spearman value: 75.8687 - type: main_score value: 75.8687 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 68.0842 - type: spearman value: 69.4346 - type: cosine_pearson value: 68.0842 - type: cosine_spearman value: 69.4346 - type: manhattan_pearson value: 69.9982 - type: manhattan_spearman value: 69.8952 - type: euclidean_pearson value: 69.6375 - type: euclidean_spearman value: 69.4346 - type: main_score value: 69.4346 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 76.3695 - type: spearman value: 78.88730000000001 - type: cosine_pearson value: 76.3695 - type: cosine_spearman value: 78.88730000000001 - type: manhattan_pearson value: 79.0721 - type: manhattan_spearman value: 79.1151 - type: euclidean_pearson value: 78.783 - type: euclidean_spearman value: 78.88730000000001 - type: main_score value: 78.88730000000001 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 60.59139999999999 - type: spearman value: 52.692099999999996 - type: cosine_pearson value: 60.59139999999999 - type: cosine_spearman value: 52.692099999999996 - type: manhattan_pearson value: 64.66499999999999 - type: manhattan_spearman value: 53.09009999999999 - type: euclidean_pearson value: 64.5541 - type: euclidean_spearman value: 52.692099999999996 - type: main_score value: 52.692099999999996 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 77.8405 - type: spearman value: 76.6188 - type: cosine_pearson value: 77.8405 - type: cosine_spearman value: 76.6188 - type: manhattan_pearson value: 76.6598 - type: manhattan_spearman value: 76.3583 - type: euclidean_pearson value: 77.1442 - type: euclidean_spearman value: 76.6188 - type: main_score value: 76.6188 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 69.8017 - type: spearman value: 68.7734 - type: cosine_pearson value: 69.8017 - type: cosine_spearman value: 68.7734 - type: manhattan_pearson value: 70.6884 - type: manhattan_spearman value: 68.2974 - type: euclidean_pearson value: 70.7968 - type: euclidean_spearman value: 68.7734 - type: main_score value: 68.7734 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: pearson value: 73.3293 - type: spearman value: 76.00919999999999 - type: cosine_pearson value: 73.3293 - type: cosine_spearman value: 76.00919999999999 - type: manhattan_pearson value: 75.0184 - type: manhattan_spearman value: 75.8014 - type: euclidean_pearson value: 75.2638 - type: euclidean_spearman value: 76.00919999999999 - type: main_score value: 76.00919999999999 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 77.3669 - type: mrr value: 93.5985 - type: nAUC_map_max value: 50.2355 - type: nAUC_map_std value: 65.5401 - type: nAUC_map_diff1 value: 9.6333 - type: nAUC_mrr_max value: 76.5201 - type: nAUC_mrr_std value: 74.7401 - type: nAUC_mrr_diff1 value: 53.170899999999996 - type: main_score value: 77.3669 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: ndcg_at_1 value: 61.0 - type: ndcg_at_3 value: 67.589 - type: ndcg_at_5 value: 68.948 - type: ndcg_at_10 value: 71.8 - type: ndcg_at_20 value: 72.595 - type: ndcg_at_100 value: 74.138 - type: ndcg_at_1000 value: 74.83800000000001 - type: map_at_1 value: 57.74399999999999 - type: map_at_3 value: 64.866 - type: map_at_5 value: 66.018 - type: map_at_10 value: 67.535 - type: map_at_20 value: 67.77 - type: map_at_100 value: 68.011 - type: map_at_1000 value: 68.042 - type: recall_at_1 value: 57.74399999999999 - type: recall_at_3 value: 71.906 - type: recall_at_5 value: 75.344 - type: recall_at_10 value: 83.2 - type: recall_at_20 value: 86.26700000000001 - type: recall_at_100 value: 94.333 - type: recall_at_1000 value: 99.667 - type: precision_at_1 value: 61.0 - type: precision_at_3 value: 26.111 - type: precision_at_5 value: 16.8 - type: precision_at_10 value: 9.5 - type: precision_at_20 value: 4.933 - type: precision_at_100 value: 1.073 - type: precision_at_1000 value: 0.11299999999999999 - type: mrr_at_1 value: 61.0 - type: mrr_at_3 value: 67.4444 - type: mrr_at_5 value: 68.0778 - type: mrr_at_10 value: 69.0483 - type: mrr_at_20 value: 69.2333 - type: mrr_at_100 value: 69.4403 - type: mrr_at_1000 value: 69.4708 - type: nauc_ndcg_at_1_max value: 53.481500000000004 - type: nauc_ndcg_at_1_std value: 8.227 - type: nauc_ndcg_at_1_diff1 value: 72.0771 - type: nauc_ndcg_at_3_max value: 57.0147 - type: nauc_ndcg_at_3_std value: 5.2435 - type: nauc_ndcg_at_3_diff1 value: 68.8841 - type: nauc_ndcg_at_5_max value: 57.4675 - type: nauc_ndcg_at_5_std value: 8.4709 - type: nauc_ndcg_at_5_diff1 value: 67.2977 - type: nauc_ndcg_at_10_max value: 60.3957 - type: nauc_ndcg_at_10_std value: 11.3174 - type: nauc_ndcg_at_10_diff1 value: 67.8332 - type: nauc_ndcg_at_20_max value: 60.3607 - type: nauc_ndcg_at_20_std value: 11.9948 - type: nauc_ndcg_at_20_diff1 value: 68.1122 - type: nauc_ndcg_at_100_max value: 59.5293 - type: nauc_ndcg_at_100_std value: 11.697799999999999 - type: nauc_ndcg_at_100_diff1 value: 68.453 - type: nauc_ndcg_at_1000_max value: 58.8931 - type: nauc_ndcg_at_1000_std value: 10.876199999999999 - type: nauc_ndcg_at_1000_diff1 value: 68.5746 - type: nauc_map_at_1_max value: 49.762299999999996 - type: nauc_map_at_1_std value: -0.2785 - type: nauc_map_at_1_diff1 value: 71.9072 - type: nauc_map_at_3_max value: 54.108599999999996 - type: nauc_map_at_3_std value: 2.0995 - type: nauc_map_at_3_diff1 value: 69.3459 - type: nauc_map_at_5_max value: 55.257 - type: nauc_map_at_5_std value: 5.5776 - type: nauc_map_at_5_diff1 value: 68.3314 - type: nauc_map_at_10_max value: 57.1506 - type: nauc_map_at_10_std value: 7.4561 - type: nauc_map_at_10_diff1 value: 68.8482 - type: nauc_map_at_20_max value: 57.126200000000004 - type: nauc_map_at_20_std value: 7.6833 - type: nauc_map_at_20_diff1 value: 68.9132 - type: nauc_map_at_100_max value: 56.9874 - type: nauc_map_at_100_std value: 7.7405 - type: nauc_map_at_100_diff1 value: 68.9371 - type: nauc_map_at_1000_max value: 56.959199999999996 - type: nauc_map_at_1000_std value: 7.709499999999999 - type: nauc_map_at_1000_diff1 value: 68.9444 - type: nauc_recall_at_1_max value: 49.762299999999996 - type: nauc_recall_at_1_std value: -0.2785 - type: nauc_recall_at_1_diff1 value: 71.9072 - type: nauc_recall_at_3_max value: 58.22580000000001 - type: nauc_recall_at_3_std value: 2.3135 - type: nauc_recall_at_3_diff1 value: 65.5868 - type: nauc_recall_at_5_max value: 60.4096 - type: nauc_recall_at_5_std value: 11.7662 - type: nauc_recall_at_5_diff1 value: 61.5815 - type: nauc_recall_at_10_max value: 72.74629999999999 - type: nauc_recall_at_10_std value: 22.148 - type: nauc_recall_at_10_diff1 value: 62.2401 - type: nauc_recall_at_20_max value: 74.9625 - type: nauc_recall_at_20_std value: 28.1358 - type: nauc_recall_at_20_diff1 value: 63.240700000000004 - type: nauc_recall_at_100_max value: 79.15910000000001 - type: nauc_recall_at_100_std value: 39.4162 - type: nauc_recall_at_100_diff1 value: 65.733 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 72.2222 - type: nauc_recall_at_1000_diff1 value: 72.2222 - type: nauc_precision_at_1_max value: 53.481500000000004 - type: nauc_precision_at_1_std value: 8.227 - type: nauc_precision_at_1_diff1 value: 72.0771 - type: nauc_precision_at_3_max value: 55.675799999999995 - type: nauc_precision_at_3_std value: 23.9615 - type: nauc_precision_at_3_diff1 value: 48.1199 - type: nauc_precision_at_5_max value: 50.503299999999996 - type: nauc_precision_at_5_std value: 36.9259 - type: nauc_precision_at_5_diff1 value: 31.769399999999997 - type: nauc_precision_at_10_max value: 45.4878 - type: nauc_precision_at_10_std value: 44.0469 - type: nauc_precision_at_10_diff1 value: 16.666900000000002 - type: nauc_precision_at_20_max value: 40.2908 - type: nauc_precision_at_20_std value: 47.330600000000004 - type: nauc_precision_at_20_diff1 value: 11.0043 - type: nauc_precision_at_100_max value: 27.4643 - type: nauc_precision_at_100_std value: 53.0014 - type: nauc_precision_at_100_diff1 value: -4.8238 - type: nauc_precision_at_1000_max value: 15.755099999999999 - type: nauc_precision_at_1000_std value: 56.634499999999996 - type: nauc_precision_at_1000_diff1 value: -21.124100000000002 - type: nauc_mrr_at_1_max value: 53.481500000000004 - type: nauc_mrr_at_1_std value: 8.227 - type: nauc_mrr_at_1_diff1 value: 72.0771 - type: nauc_mrr_at_3_max value: 57.6662 - type: nauc_mrr_at_3_std value: 9.2816 - type: nauc_mrr_at_3_diff1 value: 69.8276 - type: nauc_mrr_at_5_max value: 57.6565 - type: nauc_mrr_at_5_std value: 10.422099999999999 - type: nauc_mrr_at_5_diff1 value: 69.0964 - type: nauc_mrr_at_10_max value: 58.000099999999996 - type: nauc_mrr_at_10_std value: 10.957600000000001 - type: nauc_mrr_at_10_diff1 value: 69.0098 - type: nauc_mrr_at_20_max value: 58.0066 - type: nauc_mrr_at_20_std value: 11.0139 - type: nauc_mrr_at_20_diff1 value: 69.1278 - type: nauc_mrr_at_100_max value: 57.9072 - type: nauc_mrr_at_100_std value: 10.9621 - type: nauc_mrr_at_100_diff1 value: 69.1925 - type: nauc_mrr_at_1000_max value: 57.87949999999999 - type: nauc_mrr_at_1000_std value: 10.934199999999999 - type: nauc_mrr_at_1000_diff1 value: 69.2004 - type: main_score value: 71.8 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: similarity_accuracy value: 99.8248 - type: similarity_accuracy_threshold value: 74.6155 - type: similarity_f1 value: 91.12780000000001 - type: similarity_f1_threshold value: 74.2422 - type: similarity_precision value: 91.3568 - type: similarity_recall value: 90.9 - type: similarity_ap value: 96.00319999999999 - type: cosine_accuracy value: 99.8248 - type: cosine_accuracy_threshold value: 74.6155 - type: cosine_f1 value: 91.12780000000001 - type: cosine_f1_threshold value: 74.2422 - type: cosine_precision value: 91.3568 - type: cosine_recall value: 90.9 - type: cosine_ap value: 96.00319999999999 - type: manhattan_accuracy value: 99.8257 - type: manhattan_accuracy_threshold value: 1574.1653 - type: manhattan_f1 value: 91.1531 - type: manhattan_f1_threshold value: 1595.7924 - type: manhattan_precision value: 90.6126 - type: manhattan_recall value: 91.7 - type: manhattan_ap value: 95.9848 - type: euclidean_accuracy value: 99.8248 - type: euclidean_accuracy_threshold value: 71.2523 - type: euclidean_f1 value: 91.12780000000001 - type: euclidean_f1_threshold value: 71.7744 - type: euclidean_precision value: 91.3568 - type: euclidean_recall value: 90.9 - type: euclidean_ap value: 96.00319999999999 - type: dot_accuracy value: 99.8248 - type: dot_accuracy_threshold value: 74.6155 - type: dot_f1 value: 91.12780000000001 - type: dot_f1_threshold value: 74.2422 - type: dot_precision value: 91.3568 - type: dot_recall value: 90.9 - type: dot_ap value: 96.00319999999999 - type: max_accuracy value: 99.8257 - type: max_f1 value: 91.1531 - type: max_precision value: 91.3568 - type: max_recall value: 91.7 - type: max_ap value: 96.00319999999999 - type: main_score value: 96.00319999999999 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 61.3985 - type: v_measure_std value: 5.2151000000000005 - type: main_score value: 61.3985 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.1433 - type: v_measure_std value: 1.5853 - type: main_score value: 36.1433 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 50.47580000000001 - type: mrr value: 51.221399999999996 - type: nAUC_map_max value: 10.1311 - type: nAUC_map_std value: 6.239999999999999 - type: nAUC_map_diff1 value: 36.3486 - type: nAUC_mrr_max value: 10.9306 - type: nAUC_mrr_std value: 6.7909 - type: nAUC_mrr_diff1 value: 36.5536 - type: main_score value: 50.47580000000001 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: pearson value: 29.8474 - type: spearman value: 29.391099999999998 - type: cosine_spearman value: 29.391099999999998 - type: cosine_pearson value: 29.8474 - type: dot_spearman value: 29.391099999999998 - type: dot_pearson value: 29.8474 - type: main_score value: 29.391099999999998 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: ndcg_at_1 value: 85.0 - type: ndcg_at_3 value: 84.58099999999999 - type: ndcg_at_5 value: 83.573 - type: ndcg_at_10 value: 80.285 - type: ndcg_at_20 value: 77.469 - type: ndcg_at_100 value: 63.524 - type: ndcg_at_1000 value: 56.839 - type: map_at_1 value: 0.22799999999999998 - type: map_at_3 value: 0.656 - type: map_at_5 value: 1.078 - type: map_at_10 value: 2.0389999999999997 - type: map_at_20 value: 3.7670000000000003 - type: map_at_100 value: 12.8 - type: map_at_1000 value: 31.575999999999997 - type: recall_at_1 value: 0.22799999999999998 - type: recall_at_3 value: 0.695 - type: recall_at_5 value: 1.151 - type: recall_at_10 value: 2.215 - type: recall_at_20 value: 4.232 - type: recall_at_100 value: 15.828000000000001 - type: recall_at_1000 value: 53.516 - type: precision_at_1 value: 90.0 - type: precision_at_3 value: 89.333 - type: precision_at_5 value: 88.8 - type: precision_at_10 value: 84.6 - type: precision_at_20 value: 81.6 - type: precision_at_100 value: 65.64 - type: precision_at_1000 value: 25.380000000000003 - type: mrr_at_1 value: 90.0 - type: mrr_at_3 value: 94.6667 - type: mrr_at_5 value: 94.6667 - type: mrr_at_10 value: 94.6667 - type: mrr_at_20 value: 94.6667 - type: mrr_at_100 value: 94.6667 - type: mrr_at_1000 value: 94.6667 - type: nauc_ndcg_at_1_max value: -5.4637 - type: nauc_ndcg_at_1_std value: 14.5981 - type: nauc_ndcg_at_1_diff1 value: 13.6414 - type: nauc_ndcg_at_3_max value: 10.9521 - type: nauc_ndcg_at_3_std value: 39.8204 - type: nauc_ndcg_at_3_diff1 value: -13.839799999999999 - type: nauc_ndcg_at_5_max value: 20.9664 - type: nauc_ndcg_at_5_std value: 50.876999999999995 - type: nauc_ndcg_at_5_diff1 value: -15.3559 - type: nauc_ndcg_at_10_max value: 34.053 - type: nauc_ndcg_at_10_std value: 59.1102 - type: nauc_ndcg_at_10_diff1 value: -23.3868 - type: nauc_ndcg_at_20_max value: 39.5081 - type: nauc_ndcg_at_20_std value: 70.287 - type: nauc_ndcg_at_20_diff1 value: -36.7999 - type: nauc_ndcg_at_100_max value: 38.8671 - type: nauc_ndcg_at_100_std value: 80.5875 - type: nauc_ndcg_at_100_diff1 value: -28.766599999999997 - type: nauc_ndcg_at_1000_max value: 45.4017 - type: nauc_ndcg_at_1000_std value: 73.1799 - type: nauc_ndcg_at_1000_diff1 value: -13.5374 - type: nauc_map_at_1_max value: -15.7901 - type: nauc_map_at_1_std value: -14.5481 - type: nauc_map_at_1_diff1 value: 35.3307 - type: nauc_map_at_3_max value: -4.8114 - type: nauc_map_at_3_std value: -8.3704 - type: nauc_map_at_3_diff1 value: 26.2918 - type: nauc_map_at_5_max value: -0.9780000000000001 - type: nauc_map_at_5_std value: -3.4821 - type: nauc_map_at_5_diff1 value: 25.469 - type: nauc_map_at_10_max value: 4.2075000000000005 - type: nauc_map_at_10_std value: 1.5897999999999999 - type: nauc_map_at_10_diff1 value: 20.0578 - type: nauc_map_at_20_max value: 11.1623 - type: nauc_map_at_20_std value: 13.4387 - type: nauc_map_at_20_diff1 value: 12.9992 - type: nauc_map_at_100_max value: 21.7341 - type: nauc_map_at_100_std value: 51.2629 - type: nauc_map_at_100_diff1 value: 6.3333 - type: nauc_map_at_1000_max value: 45.7524 - type: nauc_map_at_1000_std value: 79.5106 - type: nauc_map_at_1000_diff1 value: -16.2395 - type: nauc_recall_at_1_max value: -15.7901 - type: nauc_recall_at_1_std value: -14.5481 - type: nauc_recall_at_1_diff1 value: 35.3307 - type: nauc_recall_at_3_max value: -3.9641 - type: nauc_recall_at_3_std value: -11.6408 - type: nauc_recall_at_3_diff1 value: 26.243 - type: nauc_recall_at_5_max value: -1.3654 - type: nauc_recall_at_5_std value: -7.7433000000000005 - type: nauc_recall_at_5_diff1 value: 25.5058 - type: nauc_recall_at_10_max value: 0.6649999999999999 - type: nauc_recall_at_10_std value: -5.8116 - type: nauc_recall_at_10_diff1 value: 23.0906 - type: nauc_recall_at_20_max value: 4.398 - type: nauc_recall_at_20_std value: 2.5343999999999998 - type: nauc_recall_at_20_diff1 value: 17.0552 - type: nauc_recall_at_100_max value: 12.8082 - type: nauc_recall_at_100_std value: 32.912400000000005 - type: nauc_recall_at_100_diff1 value: 14.6836 - type: nauc_recall_at_1000_max value: 42.261500000000005 - type: nauc_recall_at_1000_std value: 60.5793 - type: nauc_recall_at_1000_diff1 value: -6.1521 - type: nauc_precision_at_1_max value: -7.077500000000001 - type: nauc_precision_at_1_std value: 19.7572 - type: nauc_precision_at_1_diff1 value: 21.9141 - type: nauc_precision_at_3_max value: 30.758799999999997 - type: nauc_precision_at_3_std value: 53.897099999999995 - type: nauc_precision_at_3_diff1 value: -25.885399999999997 - type: nauc_precision_at_5_max value: 43.5162 - type: nauc_precision_at_5_std value: 66.8874 - type: nauc_precision_at_5_diff1 value: -20.7483 - type: nauc_precision_at_10_max value: 46.7798 - type: nauc_precision_at_10_std value: 63.677499999999995 - type: nauc_precision_at_10_diff1 value: -21.1182 - type: nauc_precision_at_20_max value: 49.8621 - type: nauc_precision_at_20_std value: 79.1937 - type: nauc_precision_at_20_diff1 value: -38.9691 - type: nauc_precision_at_100_max value: 42.8699 - type: nauc_precision_at_100_std value: 83.7695 - type: nauc_precision_at_100_diff1 value: -26.794 - type: nauc_precision_at_1000_max value: 42.7819 - type: nauc_precision_at_1000_std value: 53.815900000000006 - type: nauc_precision_at_1000_diff1 value: -34.4047 - type: nauc_mrr_at_1_max value: -7.077500000000001 - type: nauc_mrr_at_1_std value: 19.7572 - type: nauc_mrr_at_1_diff1 value: 21.9141 - type: nauc_mrr_at_3_max value: -2.1212999999999997 - type: nauc_mrr_at_3_std value: 21.9859 - type: nauc_mrr_at_3_diff1 value: 25.0584 - type: nauc_mrr_at_5_max value: -2.1212999999999997 - type: nauc_mrr_at_5_std value: 21.9859 - type: nauc_mrr_at_5_diff1 value: 25.0584 - type: nauc_mrr_at_10_max value: -2.1212999999999997 - type: nauc_mrr_at_10_std value: 21.9859 - type: nauc_mrr_at_10_diff1 value: 25.0584 - type: nauc_mrr_at_20_max value: -2.1212999999999997 - type: nauc_mrr_at_20_std value: 21.9859 - type: nauc_mrr_at_20_diff1 value: 25.0584 - type: nauc_mrr_at_100_max value: -2.1212999999999997 - type: nauc_mrr_at_100_std value: 21.9859 - type: nauc_mrr_at_100_diff1 value: 25.0584 - type: nauc_mrr_at_1000_max value: -2.1212999999999997 - type: nauc_mrr_at_1000_std value: 21.9859 - type: nauc_mrr_at_1000_diff1 value: 25.0584 - type: main_score value: 80.285 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: ndcg_at_1 value: 33.672999999999995 - type: ndcg_at_3 value: 34.392 - type: ndcg_at_5 value: 32.606 - type: ndcg_at_10 value: 29.767 - type: ndcg_at_20 value: 30.353 - type: ndcg_at_100 value: 41.094 - type: ndcg_at_1000 value: 51.937 - type: map_at_1 value: 2.64 - type: map_at_3 value: 6.428000000000001 - type: map_at_5 value: 8.792 - type: map_at_10 value: 11.882 - type: map_at_20 value: 14.818000000000001 - type: map_at_100 value: 18.613 - type: map_at_1000 value: 20.233 - type: recall_at_1 value: 2.64 - type: recall_at_3 value: 7.951999999999999 - type: recall_at_5 value: 11.898 - type: recall_at_10 value: 18.782 - type: recall_at_20 value: 27.488 - type: recall_at_100 value: 51.337999999999994 - type: recall_at_1000 value: 84.399 - type: precision_at_1 value: 36.735 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.061 - type: precision_at_10 value: 26.122 - type: precision_at_20 value: 19.898 - type: precision_at_100 value: 8.429 - type: precision_at_1000 value: 1.5650000000000002 - type: mrr_at_1 value: 36.7347 - type: mrr_at_3 value: 51.7007 - type: mrr_at_5 value: 54.65989999999999 - type: mrr_at_10 value: 55.8868 - type: mrr_at_20 value: 56.2944 - type: mrr_at_100 value: 56.360200000000006 - type: mrr_at_1000 value: 56.360200000000006 - type: nauc_ndcg_at_1_max value: -23.0012 - type: nauc_ndcg_at_1_std value: -9.474 - type: nauc_ndcg_at_1_diff1 value: 15.5991 - type: nauc_ndcg_at_3_max value: -16.1454 - type: nauc_ndcg_at_3_std value: -26.226100000000002 - type: nauc_ndcg_at_3_diff1 value: 22.9111 - type: nauc_ndcg_at_5_max value: -20.3259 - type: nauc_ndcg_at_5_std value: -23.3106 - type: nauc_ndcg_at_5_diff1 value: 20.112199999999998 - type: nauc_ndcg_at_10_max value: -17.4616 - type: nauc_ndcg_at_10_std value: -15.5791 - type: nauc_ndcg_at_10_diff1 value: 13.2876 - type: nauc_ndcg_at_20_max value: -20.0683 - type: nauc_ndcg_at_20_std value: -10.979899999999999 - type: nauc_ndcg_at_20_diff1 value: 5.929 - type: nauc_ndcg_at_100_max value: -21.096899999999998 - type: nauc_ndcg_at_100_std value: 13.212399999999999 - type: nauc_ndcg_at_100_diff1 value: 3.9886 - type: nauc_ndcg_at_1000_max value: -14.1544 - type: nauc_ndcg_at_1000_std value: 19.5979 - type: nauc_ndcg_at_1000_diff1 value: 1.2742 - type: nauc_map_at_1_max value: -18.123900000000003 - type: nauc_map_at_1_std value: -17.8031 - type: nauc_map_at_1_diff1 value: 21.032899999999998 - type: nauc_map_at_3_max value: -6.7797 - type: nauc_map_at_3_std value: -28.810299999999998 - type: nauc_map_at_3_diff1 value: 16.2912 - type: nauc_map_at_5_max value: -7.620699999999999 - type: nauc_map_at_5_std value: -27.6982 - type: nauc_map_at_5_diff1 value: 14.813100000000002 - type: nauc_map_at_10_max value: -5.1492 - type: nauc_map_at_10_std value: -23.885 - type: nauc_map_at_10_diff1 value: 6.9926 - type: nauc_map_at_20_max value: -9.6331 - type: nauc_map_at_20_std value: -19.215 - type: nauc_map_at_20_diff1 value: 0.6491 - type: nauc_map_at_100_max value: -9.7297 - type: nauc_map_at_100_std value: -6.9502999999999995 - type: nauc_map_at_100_diff1 value: -1.5897999999999999 - type: nauc_map_at_1000_max value: -8.9517 - type: nauc_map_at_1000_std value: -3.9941999999999998 - type: nauc_map_at_1000_diff1 value: -2.8158 - type: nauc_recall_at_1_max value: -18.123900000000003 - type: nauc_recall_at_1_std value: -17.8031 - type: nauc_recall_at_1_diff1 value: 21.032899999999998 - type: nauc_recall_at_3_max value: -12.1006 - type: nauc_recall_at_3_std value: -35.3199 - type: nauc_recall_at_3_diff1 value: 12.044 - type: nauc_recall_at_5_max value: -15.7192 - type: nauc_recall_at_5_std value: -30.7299 - type: nauc_recall_at_5_diff1 value: 8.3249 - type: nauc_recall_at_10_max value: -13.3968 - type: nauc_recall_at_10_std value: -19.2107 - type: nauc_recall_at_10_diff1 value: 0.1315 - type: nauc_recall_at_20_max value: -19.5043 - type: nauc_recall_at_20_std value: -10.005500000000001 - type: nauc_recall_at_20_diff1 value: -7.197299999999999 - type: nauc_recall_at_100_max value: -21.4032 - type: nauc_recall_at_100_std value: 33.5358 - type: nauc_recall_at_100_diff1 value: -10.4876 - type: nauc_recall_at_1000_max value: 1.8395000000000001 - type: nauc_recall_at_1000_std value: 70.462 - type: nauc_recall_at_1000_diff1 value: -23.4072 - type: nauc_precision_at_1_max value: -23.0917 - type: nauc_precision_at_1_std value: -8.036999999999999 - type: nauc_precision_at_1_diff1 value: 19.354599999999998 - type: nauc_precision_at_3_max value: -11.3547 - type: nauc_precision_at_3_std value: -30.2495 - type: nauc_precision_at_3_diff1 value: 20.3126 - type: nauc_precision_at_5_max value: -17.2545 - type: nauc_precision_at_5_std value: -24.8896 - type: nauc_precision_at_5_diff1 value: 15.6276 - type: nauc_precision_at_10_max value: -11.5796 - type: nauc_precision_at_10_std value: -2.3662 - type: nauc_precision_at_10_diff1 value: 3.8091 - type: nauc_precision_at_20_max value: -11.9042 - type: nauc_precision_at_20_std value: 15.6577 - type: nauc_precision_at_20_diff1 value: -8.8878 - type: nauc_precision_at_100_max value: -0.5217 - type: nauc_precision_at_100_std value: 71.8387 - type: nauc_precision_at_100_diff1 value: -16.8714 - type: nauc_precision_at_1000_max value: 36.234300000000005 - type: nauc_precision_at_1000_std value: 37.5447 - type: nauc_precision_at_1000_diff1 value: -20.7229 - type: nauc_mrr_at_1_max value: -23.0917 - type: nauc_mrr_at_1_std value: -8.036999999999999 - type: nauc_mrr_at_1_diff1 value: 19.354599999999998 - type: nauc_mrr_at_3_max value: -27.9937 - type: nauc_mrr_at_3_std value: -26.519900000000003 - type: nauc_mrr_at_3_diff1 value: 20.288 - type: nauc_mrr_at_5_max value: -33.218599999999995 - type: nauc_mrr_at_5_std value: -23.857400000000002 - type: nauc_mrr_at_5_diff1 value: 15.978200000000001 - type: nauc_mrr_at_10_max value: -31.7904 - type: nauc_mrr_at_10_std value: -19.169900000000002 - type: nauc_mrr_at_10_diff1 value: 17.762700000000002 - type: nauc_mrr_at_20_max value: -30.44 - type: nauc_mrr_at_20_std value: -20.2867 - type: nauc_mrr_at_20_diff1 value: 18.895500000000002 - type: nauc_mrr_at_100_max value: -30.5404 - type: nauc_mrr_at_100_std value: -20.5699 - type: nauc_mrr_at_100_diff1 value: 18.7046 - type: nauc_mrr_at_1000_max value: -30.5404 - type: nauc_mrr_at_1000_std value: -20.5699 - type: nauc_mrr_at_1000_diff1 value: 18.7046 - type: main_score value: 29.767 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 64.8096 - type: f1 value: 49.844300000000004 - type: f1_weighted value: 72.5251 - type: ap value: 11.7519 - type: ap_weighted value: 11.7519 - type: main_score value: 64.8096 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 58.1692 - type: f1 value: 58.4408 - type: f1_weighted value: 57.565599999999996 - type: main_score value: 58.1692 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 39.293 - type: v_measure_std value: 1.5684 - type: main_score value: 39.293 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: similarity_accuracy value: 83.29260000000001 - type: similarity_accuracy_threshold value: 78.2732 - type: similarity_f1 value: 60.656600000000005 - type: similarity_f1_threshold value: 73.4961 - type: similarity_precision value: 59.007 - type: similarity_recall value: 62.4011 - type: similarity_ap value: 64.7501 - type: cosine_accuracy value: 83.29260000000001 - type: cosine_accuracy_threshold value: 78.2732 - type: cosine_f1 value: 60.656600000000005 - type: cosine_f1_threshold value: 73.4961 - type: cosine_precision value: 59.007 - type: cosine_recall value: 62.4011 - type: cosine_ap value: 64.7501 - type: manhattan_accuracy value: 83.2986 - type: manhattan_accuracy_threshold value: 1476.7148 - type: manhattan_f1 value: 60.7459 - type: manhattan_f1_threshold value: 1607.9180000000001 - type: manhattan_precision value: 59.0581 - type: manhattan_recall value: 62.53300000000001 - type: manhattan_ap value: 64.76859999999999 - type: euclidean_accuracy value: 83.29260000000001 - type: euclidean_accuracy_threshold value: 65.9194 - type: euclidean_f1 value: 60.656600000000005 - type: euclidean_f1_threshold value: 72.8065 - type: euclidean_precision value: 59.007 - type: euclidean_recall value: 62.4011 - type: euclidean_ap value: 64.7501 - type: dot_accuracy value: 83.29260000000001 - type: dot_accuracy_threshold value: 78.2731 - type: dot_f1 value: 60.656600000000005 - type: dot_f1_threshold value: 73.4961 - type: dot_precision value: 59.007 - type: dot_recall value: 62.4011 - type: dot_ap value: 64.7501 - type: max_accuracy value: 83.2986 - type: max_f1 value: 60.7459 - type: max_precision value: 59.0581 - type: max_recall value: 62.53300000000001 - type: max_ap value: 64.76859999999999 - type: main_score value: 64.76859999999999 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: similarity_accuracy value: 89.0247 - type: similarity_accuracy_threshold value: 69.271 - type: similarity_f1 value: 78.24419999999999 - type: similarity_f1_threshold value: 66.2183 - type: similarity_precision value: 76.616 - type: similarity_recall value: 79.943 - type: similarity_ap value: 85.9494 - type: cosine_accuracy value: 89.0247 - type: cosine_accuracy_threshold value: 69.271 - type: cosine_f1 value: 78.24419999999999 - type: cosine_f1_threshold value: 66.2183 - type: cosine_precision value: 76.616 - type: cosine_recall value: 79.943 - type: cosine_ap value: 85.9494 - type: manhattan_accuracy value: 89.0267 - type: manhattan_accuracy_threshold value: 1750.3544000000002 - type: manhattan_f1 value: 78.2188 - type: manhattan_f1_threshold value: 1837.7304 - type: manhattan_precision value: 75.1472 - type: manhattan_recall value: 81.5522 - type: manhattan_ap value: 85.9496 - type: euclidean_accuracy value: 89.0247 - type: euclidean_accuracy_threshold value: 78.3951 - type: euclidean_f1 value: 78.24419999999999 - type: euclidean_f1_threshold value: 82.197 - type: euclidean_precision value: 76.616 - type: euclidean_recall value: 79.943 - type: euclidean_ap value: 85.9494 - type: dot_accuracy value: 89.0247 - type: dot_accuracy_threshold value: 69.271 - type: dot_f1 value: 78.24419999999999 - type: dot_f1_threshold value: 66.2183 - type: dot_precision value: 76.616 - type: dot_recall value: 79.943 - type: dot_ap value: 85.9494 - type: max_accuracy value: 89.0267 - type: max_f1 value: 78.24419999999999 - type: max_precision value: 76.616 - type: max_recall value: 81.5522 - type: max_ap value: 85.9496 - type: main_score value: 85.9496 --- <h1 align="center">Snowflake's Arctic-embed-m-v2.0</h1> <h4 align="center"> <p> <a href=#news>News</a> | <a href=#models>Models</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#contact">Contact</a> | <a href="#faq">FAQ</a> <a href="#license">License</a> | <a href="#acknowledgement">Acknowledgement</a> <p> </h4> <img referrerpolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=d5cb84e7-4b3a-4d82-85a1-19ec3721c447" /> ## News - 12/11/2024: Release of [Technical Report](https://arxiv.org/abs/2412.04506) - 12/04/2024: Release of [snowflake-arctic-embed-l-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-l-v2.0) and [snowflake-arctic-embed-m-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-v2.0) our newest models with multilingual workloads in mind. ## Models Snowflake arctic-embed-m-v2.0 is the newest addition to the suite of embedding models Snowflake has released optimizing for retrieval performance and inference efficiency. Arctic Embed 2.0 introduces a new standard for multilingual embedding models, combining high-quality multilingual text retrieval without sacrificing performance in English. Released under the permissive Apache 2.0 license, Arctic Embed 2.0 is ideal for applications that demand reliable, enterprise-grade multilingual search and retrieval at scale. Key Features: 1. Multilingual without compromise: Excels in English and non-English retrieval, outperforming leading open-source and proprietary models on benchmarks like MTEB Retrieval, CLEF, and MIRACL. 2. Inference efficiency: Its 113m non-embedding parameters inference is fast and efficient for any scale. 3. Compression-friendly: Achieves high-quality retrieval with embeddings as small as 128 bytes/vector using Matryoshka Representation Learning (MRL) and quantization-aware embedding training. 4. Long Context Support: arctic-embed-m-v2.0 builds on [GTE-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base) which can support a context window of up to 8192 via the use of RoPE. ### Quality Benchmarks Unlike most other open-source models, Arctic-embed-m-v2.0 excels across English (via MTEB Retrieval) and multilingual (via MIRACL and CLEF). You no longer need to support models to empower high-quality English and multilingual retrieval. All numbers mentioned below are the average NDCG@10 across the dataset being discussed. | Model Name | # params | # non-emb params | # dimensions | BEIR (15) | MIRACL (4) | CLEF (Focused) | CLEF (Full) | |---|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | **snowflake-arctic-m-v2.0** | 305M | 113M | 768 | **55.4** | 55.2 | **51.7** | **53.9** | | snowflake-arctic-m | 109M | 86M | 768 | 54.9 | 24.9 | 34.4 | 29.1 | | me5 base | 560M | 303M | 1024 | 51.4 | 54.0 | 43.0 | 34.6 | | bge-m3 (BAAI) | 568M | 303M | 1024 | 48.8 | **56.8** | 40.8 | 41.3 | | gte (Alibaba) | 305M | 113M | 768 | 51.1 | 52.3 | 47.7 | 53.1 | Aside from high-quality retrieval, arctic delivers embeddings that are easily compressible. By leveraging vector truncation via MRL to decrease vector size by 3x with about 3% degradation in quality. Combine MRLed vectors with vector compression (Int4) to power retrieval in 128 bytes per doc. | Model | | BEIR (15) | Relative Performance | MIRACL (4) | Relative Performance | CLEF (5) | Relative Performance | CLEF (Full) | Relative Performance | |---|---|:---:|:---:|:---:|:---:|:---:|---|---|---| | snowflake-arctic-m-v2.0 | 768 | 55.4 | N/A | 55.2 | N/A | 51.7 | N/A | 53.9 | N/A | | snowflake-arctic-m-v2.0 | 256 | 54.4 | -1.81% | 54.0 | -2.17% | 50.6 | -2.13% | 52.3 | -3.06% | ## Usage ### Using Sentence Transformers ```python from sentence_transformers import SentenceTransformer # Load the model model_name = 'Snowflake/snowflake-arctic-embed-m-v2.0' model = SentenceTransformer(model_name, trust_remote_code=True) # Define the queries and documents queries = ['what is snowflake?', 'Where can I get the best tacos?'] documents = ['The Data Cloud!', 'Mexico City of Course!'] # Compute embeddings: use `prompt_name="query"` to encode queries! query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) # Compute cosine similarity scores scores = model.similarity(query_embeddings, document_embeddings) # Output the results for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` ### Using Huggingface Transformers You can use the transformers package to use Snowflake's arctic-embed model, as shown below. For optimal retrieval quality, use the CLS token to embed each text portion and use the query prefix below (just on the query). ```python import torch from transformers import AutoModel, AutoTokenizer model_name = 'Snowflake/snowflake-arctic-embed-m-v2.0' tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModel.from_pretrained(model_name, add_pooling_layer=False, trust_remote_code=True) model.eval() query_prefix = 'query: ' queries = ['what is snowflake?', 'Where can I get the best tacos?'] queries_with_prefix = ["{}{}".format(query_prefix, i) for i in queries] query_tokens = tokenizer(queries_with_prefix, padding=True, truncation=True, return_tensors='pt', max_length=8192) documents = ['The Data Cloud!', 'Mexico City of Course!'] document_tokens = tokenizer(documents, padding=True, truncation=True, return_tensors='pt', max_length=8192) # Compute token embeddings with torch.no_grad(): query_embeddings = model(**query_tokens)[0][:, 0] document_embeddings = model(**document_tokens)[0][:, 0] # normalize embeddings query_embeddings = torch.nn.functional.normalize(query_embeddings, p=2, dim=1) document_embeddings = torch.nn.functional.normalize(document_embeddings, p=2, dim=1) scores = torch.mm(query_embeddings, document_embeddings.transpose(0, 1)) for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) #Output passages & scores print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` ### Using Huggingface Transformers.js If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@huggingface/transformers) using: ```bash npm i @huggingface/transformers ``` You can then use the model for retrieval, as follows: ```js import { pipeline, dot } from '@huggingface/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Snowflake/snowflake-arctic-embed-m-v2.0'); // Generate sentence embeddings const sentences = [ 'query: what is snowflake?', 'The Data Cloud!', 'Mexico City of Course!', ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => dot(source_embeddings, x)); console.log(similarities); // [0.32719788157046004, 0.06960141111667434] ``` ## Contact Feel free to open an issue or pull request if you have any questions or suggestions about this project. You also can email Daniel Campos([email protected]). ## License Arctic is licensed under the [Apache-2](https://www.apache.org/licenses/LICENSE-2.0). The released models can be used for commercial purposes free of charge.
[ "BIOSSES", "SCIFACT" ]
cointegrated/rubert-base-cased-nli-threeway
cointegrated
zero-shot-classification
[ "transformers", "pytorch", "onnx", "safetensors", "bert", "text-classification", "rubert", "russian", "nli", "rte", "zero-shot-classification", "ru", "dataset:cointegrated/nli-rus-translated-v2021", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2022-03-02T23:29:05Z"
2024-04-05T09:31:57+00:00
159,831
33
--- datasets: - cointegrated/nli-rus-translated-v2021 language: ru pipeline_tag: zero-shot-classification tags: - rubert - russian - nli - rte - zero-shot-classification widget: - text: Я хочу поехать в Австралию candidate_labels: спорт,путешествия,музыка,кино,книги,наука,политика hypothesis_template: Тема текста - {}. --- # RuBERT for NLI (natural language inference) This is the [DeepPavlov/rubert-base-cased](https://huggingface.co/DeepPavlov/rubert-base-cased) fine-tuned to predict the logical relationship between two short texts: entailment, contradiction, or neutral. ## Usage How to run the model for NLI: ```python # !pip install transformers sentencepiece --quiet import torch from transformers import AutoTokenizer, AutoModelForSequenceClassification model_checkpoint = 'cointegrated/rubert-base-cased-nli-threeway' tokenizer = AutoTokenizer.from_pretrained(model_checkpoint) model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint) if torch.cuda.is_available(): model.cuda() text1 = 'Сократ - человек, а все люди смертны.' text2 = 'Сократ никогда не умрёт.' with torch.inference_mode(): out = model(**tokenizer(text1, text2, return_tensors='pt').to(model.device)) proba = torch.softmax(out.logits, -1).cpu().numpy()[0] print({v: proba[k] for k, v in model.config.id2label.items()}) # {'entailment': 0.009525929, 'contradiction': 0.9332064, 'neutral': 0.05726764} ``` You can also use this model for zero-shot short text classification (by labels only), e.g. for sentiment analysis: ```python def predict_zero_shot(text, label_texts, model, tokenizer, label='entailment', normalize=True): label_texts tokens = tokenizer([text] * len(label_texts), label_texts, truncation=True, return_tensors='pt', padding=True) with torch.inference_mode(): result = torch.softmax(model(**tokens.to(model.device)).logits, -1) proba = result[:, model.config.label2id[label]].cpu().numpy() if normalize: proba /= sum(proba) return proba classes = ['Я доволен', 'Я недоволен'] predict_zero_shot('Какая гадость эта ваша заливная рыба!', classes, model, tokenizer) # array([0.05609814, 0.9439019 ], dtype=float32) predict_zero_shot('Какая вкусная эта ваша заливная рыба!', classes, model, tokenizer) # array([0.9059292 , 0.09407079], dtype=float32) ``` Alternatively, you can use [Huggingface pipelines](https://huggingface.co/transformers/main_classes/pipelines.html) for inference. ## Sources The model has been trained on a series of NLI datasets automatically translated to Russian from English. Most datasets were taken [from the repo of Felipe Salvatore](https://github.com/felipessalvatore/NLI_datasets): [JOCI](https://github.com/sheng-z/JOCI), [MNLI](https://cims.nyu.edu/~sbowman/multinli/), [MPE](https://aclanthology.org/I17-1011/), [SICK](http://www.lrec-conf.org/proceedings/lrec2014/pdf/363_Paper.pdf), [SNLI](https://nlp.stanford.edu/projects/snli/). Some datasets obtained from the original sources: [ANLI](https://github.com/facebookresearch/anli), [NLI-style FEVER](https://github.com/easonnie/combine-FEVER-NSMN/blob/master/other_resources/nli_fever.md), [IMPPRES](https://github.com/facebookresearch/Imppres). ## Performance The table below shows ROC AUC (one class vs rest) for five models on the corresponding *dev* sets: - [tiny](https://huggingface.co/cointegrated/rubert-tiny-bilingual-nli): a small BERT predicting entailment vs not_entailment - [twoway](https://huggingface.co/cointegrated/rubert-base-cased-nli-twoway): a base-sized BERT predicting entailment vs not_entailment - [threeway](https://huggingface.co/cointegrated/rubert-base-cased-nli-threeway) (**this model**): a base-sized BERT predicting entailment vs contradiction vs neutral - [vicgalle-xlm](https://huggingface.co/vicgalle/xlm-roberta-large-xnli-anli): a large multilingual NLI model - [facebook-bart](https://huggingface.co/facebook/bart-large-mnli): a large multilingual NLI model |model |add_one_rte|anli_r1|anli_r2|anli_r3|copa|fever|help|iie |imppres|joci|mnli |monli|mpe |scitail|sick|snli|terra|total | |------------------------|-----------|-------|-------|-------|----|-----|----|-----|-------|----|-----|-----|----|-------|----|----|-----|------| |n_observations |387 |1000 |1000 |1200 |200 |20474|3355|31232|7661 |939 |19647|269 |1000|2126 |500 |9831|307 |101128| |tiny/entailment |0.77 |0.59 |0.52 |0.53 |0.53|0.90 |0.81|0.78 |0.93 |0.81|0.82 |0.91 |0.81|0.78 |0.93|0.95|0.67 |0.77 | |twoway/entailment |0.89 |0.73 |0.61 |0.62 |0.58|0.96 |0.92|0.87 |0.99 |0.90|0.90 |0.99 |0.91|0.96 |0.97|0.97|0.87 |0.86 | |threeway/entailment |0.91 |0.75 |0.61 |0.61 |0.57|0.96 |0.56|0.61 |0.99 |0.90|0.91 |0.67 |0.92|0.84 |0.98|0.98|0.90 |0.80 | |vicgalle-xlm/entailment |0.88 |0.79 |0.63 |0.66 |0.57|0.93 |0.56|0.62 |0.77 |0.80|0.90 |0.70 |0.83|0.84 |0.91|0.93|0.93 |0.78 | |facebook-bart/entailment|0.51 |0.41 |0.43 |0.47 |0.50|0.74 |0.55|0.57 |0.60 |0.63|0.70 |0.52 |0.56|0.68 |0.67|0.72|0.64 |0.58 | |threeway/contradiction | |0.71 |0.64 |0.61 | |0.97 | | |1.00 |0.77|0.92 | |0.89| |0.99|0.98| |0.85 | |threeway/neutral | |0.79 |0.70 |0.62 | |0.91 | | |0.99 |0.68|0.86 | |0.79| |0.96|0.96| |0.83 | For evaluation (and for training of the [tiny](https://huggingface.co/cointegrated/rubert-tiny-bilingual-nli) and [twoway](https://huggingface.co/cointegrated/rubert-base-cased-nli-twoway) models), some extra datasets were used: [Add-one RTE](https://cs.brown.edu/people/epavlick/papers/ans.pdf), [CoPA](https://people.ict.usc.edu/~gordon/copa.html), [IIE](https://aclanthology.org/I17-1100), and [SCITAIL](https://allenai.org/data/scitail) taken from [the repo of Felipe Salvatore](https://github.com/felipessalvatore/NLI_datasets) and translatted, [HELP](https://github.com/verypluming/HELP) and [MoNLI](https://github.com/atticusg/MoNLI) taken from the original sources and translated, and Russian [TERRa](https://russiansuperglue.com/ru/tasks/task_info/TERRa).
[ "SCITAIL" ]
Alibaba-NLP/gte-reranker-modernbert-base
Alibaba-NLP
sentence-similarity
[ "transformers", "onnx", "safetensors", "modernbert", "text-classification", "sentence-transformers", "transformers.js", "sentence-similarity", "en", "arxiv:2308.03281", "base_model:answerdotai/ModernBERT-base", "base_model:finetune:answerdotai/ModernBERT-base", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2025-01-20T05:46:18Z"
2025-01-24T12:31:47+00:00
148,999
46
--- base_model: - answerdotai/ModernBERT-base language: - en library_name: transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - transformers.js base_model_relation: finetune --- # gte-reranker-modernbert-base We are excited to introduce the `gte-modernbert` series of models, which are built upon the latest modernBERT pre-trained encoder-only foundation models. The `gte-modernbert` series models include both text embedding models and rerank models. The `gte-modernbert` models demonstrates competitive performance in several text embedding and text retrieval evaluation tasks when compared to similar-scale models from the current open-source community. This includes assessments such as **MTEB**, **LoCO**, and **COIR** evaluation. ## Model Overview - Developed by: Tongyi Lab, Alibaba Group - Model Type: Text reranker - Primary Language: English - Model Size: 149M - Max Input Length: 8192 tokens ### Model list | Models | Language | Model Type | Model Size | Max Seq. Length | Dimension | MTEB-en | BEIR | LoCo | CoIR | |:--------------------------------------------------------------------------------------:|:--------:|:----------------------:|:----------:|:---------------:|:---------:|:-------:|:----:|:----:|:----:| | [`gte-modernbert-base`](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | English | text embedding | 149M | 8192 | 768 | 64.38 | 55.33 | 87.57 | 79.31 | | [`gte-reranker-modernbert-base`](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | English | text reranker | 149M | 8192 | - | - | 56.19 | 90.68 | 79.99 | ## Usage > [!TIP] > For `transformers` and `sentence-transformers`, if your GPU supports it, the efficient Flash Attention 2 will be used automatically if you have `flash_attn` installed. It is not mandatory. > > ```bash > pip install flash_attn > ``` Use with `transformers` ```python # Requires transformers>=4.48.0 import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer model_name_or_path = "Alibaba-NLP/gte-reranker-modernbert-base" tokenizer = AutoTokenizer.from_pretrained(model_name_or_path) model = AutoModelForSequenceClassification.from_pretrained( model_name_or_path, torch_dtype=torch.float16, ) model.eval() pairs = [ ["what is the capital of China?", "Beijing"], ["how to implement quick sort in python?", "Introduction of quick sort"], ["how to implement quick sort in python?", "The weather is nice today"], ] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) # tensor([ 2.1387, 2.4609, -1.6729]) ``` Use with `sentence-transformers`: Before you start, install the sentence-transformers libraries: ``` pip install sentence-transformers ``` ```python # Requires transformers>=4.48.0 from sentence_transformers import CrossEncoder model = CrossEncoder( "Alibaba-NLP/gte-reranker-modernbert-base", automodel_args={"torch_dtype": "auto"}, ) pairs = [ ["what is the capital of China?", "Beijing"], ["how to implement quick sort in python?","Introduction of quick sort"], ["how to implement quick sort in python?", "The weather is nice today"], ] scores = model.predict(pairs) print(scores) # [0.8945664 0.9213594 0.15742092] # NOTE: Sentence Transformers calls Softmax over the outputs by default, hence the scores are in [0, 1] range. ``` Use with `transformers.js` ```js import { AutoTokenizer, AutoModelForSequenceClassification, } from "@huggingface/transformers"; const model_id = "Alibaba-NLP/gte-reranker-modernbert-base"; const model = await AutoModelForSequenceClassification.from_pretrained( model_id, { dtype: "fp32" }, // Supported options: "fp32", "fp16", "q8", "q4", "q4f16" ); const tokenizer = await AutoTokenizer.from_pretrained(model_id); const pairs = [ ["what is the capital of China?", "Beijing"], ["how to implement quick sort in python?", "Introduction of quick sort"], ["how to implement quick sort in python?", "The weather is nice today"], ]; const inputs = tokenizer( pairs.map((x) => x[0]), { text_pair: pairs.map((x) => x[1]), padding: true, truncation: true, }, ); const { logits } = await model(inputs); console.log(logits.tolist()); // [[2.138258218765259], [2.4609625339508057], [-1.6775450706481934]] ``` ## Training Details The `gte-modernbert` series of models follows the training scheme of the previous [GTE models](https://huggingface.co/collections/Alibaba-NLP/gte-models-6680f0b13f885cb431e6d469), with the only difference being that the pre-training language model base has been replaced from [GTE-MLM](https://huggingface.co/Alibaba-NLP/gte-en-mlm-base) to [ModernBert](https://huggingface.co/answerdotai/ModernBERT-base). For more training details, please refer to our paper: [mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval](https://aclanthology.org/2024.emnlp-industry.103/) ## Evaluation ### MTEB The results of other models are retrieved from [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). Given that all models in the `gte-modernbert` series have a size of less than 1B parameters, we focused exclusively on the results of models under 1B from the MTEB leaderboard. | Model Name | Param Size (M) | Dimension | Sequence Length | Average (56) | Class. (12) | Clust. (11) | Pair Class. (3) | Reran. (4) | Retr. (15) | STS (10) | Summ. (1) | |:------------------------------------------------------------------------------------------------:|:--------------:|:---------:|:---------------:|:------------:|:-----------:|:---:|:---:|:---:|:---:|:-----------:|:--------:| | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 335 | 1024 | 512 | 64.68 | 75.64 | 46.71 | 87.2 | 60.11 | 54.39 | 85 | 32.71 | | [multilingual-e5-large-instruct](https://huggingface.co/intfloat/multilingual-e5-large-instruct) | 560 | 1024 | 514 | 64.41 | 77.56 | 47.1 | 86.19 | 58.58 | 52.47 | 84.78 | 30.39 | | [bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 335 | 1024 | 512 | 64.23 | 75.97 | 46.08 | 87.12 | 60.03 | 54.29 | 83.11 | 31.61 | | [gte-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | 137 | 768 | 8192 | 64.11 | 77.17 | 46.82 | 85.33 | 57.66 | 54.09 | 81.97 | 31.17 | | [bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 109 | 768 | 512 | 63.55 | 75.53 | 45.77 | 86.55 | 58.86 | 53.25 | 82.4 | 31.07 | | [gte-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 409 | 1024 | 8192 | 65.39 | 77.75 | 47.95 | 84.63 | 58.50 | 57.91 | 81.43 | 30.91 | | [modernbert-embed-base](https://huggingface.co/nomic-ai/modernbert-embed-base) | 149 | 768 | 8192 | 62.62 | 74.31 | 44.98 | 83.96 | 56.42 | 52.89 | 81.78 | 31.39 | | [nomic-embed-text-v1.5](https://huggingface.co/nomic-ai/nomic-embed-text-v1.5) | | 768 | 8192 | 62.28 | 73.55 | 43.93 | 84.61 | 55.78 | 53.01| 81.94 | 30.4 | | [gte-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base) | 305 | 768 | 8192 | 61.4 | 70.89 | 44.31 | 84.24 | 57.47 |51.08 | 82.11 | 30.58 | | [jina-embeddings-v3](https://huggingface.co/jinaai/jina-embeddings-v3) | 572 | 1024 | 8192 | 65.51 | 82.58 |45.21 |84.01 |58.13 |53.88 | 85.81 | 29.71 | | [**gte-modernbert-base**](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 149 | 768 | 8192 | **64.38** | **76.99** | **46.47** | **85.93** | **59.24** | **55.33** | **81.57** | **30.68** | ### LoCo (Long Document Retrieval) | Model Name | Dimension | Sequence Length | Average (5) | QsmsumRetrieval | SummScreenRetrieval | QasperAbastractRetrieval | QasperTitleRetrieval | GovReportRetrieval | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [gte-qwen1.5-7b](https://huggingface.co/Alibaba-NLP/gte-qwen1.5-7b) | 4096 | 32768 | 87.57 | 49.37 | 93.10 | 99.67 | 97.54 | 98.21 | | [gte-large-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-v1.5) |1024 | 8192 | 86.71 | 44.55 | 92.61 | 99.82 | 97.81 | 98.74 | | [gte-base-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-v1.5) | 768 | 8192 | 87.44 | 49.91 | 91.78 | 99.82 | 97.13 | 98.58 | | [gte-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 768 | 8192 | 88.88 | 54.45 | 93.00 | 99.82 | 98.03 | 98.70 | | [gte-reranker-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | - | 8192 | 90.68 | 70.86 | 94.06 | 99.73 | 99.11 | 89.67 | ### COIR (Code Retrieval Task) | Model Name | Dimension | Sequence Length | Average(20) | CodeSearchNet-ccr-go | CodeSearchNet-ccr-java | CodeSearchNet-ccr-javascript | CodeSearchNet-ccr-php | CodeSearchNet-ccr-python | CodeSearchNet-ccr-ruby | CodeSearchNet-go | CodeSearchNet-java | CodeSearchNet-javascript | CodeSearchNet-php | CodeSearchNet-python | CodeSearchNet-ruby | apps | codefeedback-mt | codefeedback-st | codetrans-contest | codetrans-dl | cosqa | stackoverflow-qa | synthetic-text2sql | |:----:|:---:|:---:|:---:|:---:| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | | [gte-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 768 | 8192 | 79.31 | 94.15 | 93.57 | 94.27 | 91.51 | 93.93 | 90.63 | 88.32 | 83.27 | 76.05 | 85.12 | 88.16 | 77.59 | 57.54 | 82.34 | 85.95 | 71.89 | 35.46 | 43.47 | 91.2 | 61.87 | | [gte-reranker-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | - | 8192 | 79.99 | 96.43 | 96.88 | 98.32 | 91.81 | 97.7 | 91.96 | 88.81 | 79.71 | 76.27 | 89.39 | 98.37 | 84.11 | 47.57 | 83.37 | 88.91 | 49.66 | 36.36 | 44.37 | 89.58 | 64.21 | ### BEIR | Model Name | Dimension | Sequence Length | Average(15) | ArguAna | ClimateFEVER | CQADupstackAndroidRetrieval | DBPedia | FEVER | FiQA2018 | HotpotQA | MSMARCO | NFCorpus | NQ | QuoraRetrieval | SCIDOCS | SciFact | Touche2020 | TRECCOVID | | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | | [gte-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 768 | 8192 | 55.33 | 72.68 | 37.74 | 42.63 | 41.79 | 91.03 | 48.81 | 69.47 | 40.9 | 36.44 | 57.62 | 88.55 | 21.29 | 77.4 | 21.68 | 81.95 | | [gte-reranker-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | - | 8192 | 56.73 | 69.03 | 37.79 | 44.68 | 47.23 | 94.54 | 49.81 | 78.16 | 45.38 | 30.69 | 64.57 | 87.77 | 20.60 | 73.57 | 27.36 | 79.89 | ## Hiring We have open positions for **Research Interns** and **Full-Time Researchers** to join our team at Tongyi Lab. We are seeking passionate individuals with expertise in representation learning, LLM-driven information retrieval, Retrieval-Augmented Generation (RAG), and agent-based systems. Our team is located in the vibrant cities of **Beijing** and **Hangzhou**. If you are driven by curiosity and eager to make a meaningful impact through your work, we would love to hear from you. Please submit your resume along with a brief introduction to <a href="mailto:[email protected]">[email protected]</a>. ## Citation If you find our paper or models helpful, feel free to give us a cite. ``` @inproceedings{zhang2024mgte, title={mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval}, author={Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Wen and Dai, Ziqi and Tang, Jialong and Lin, Huan and Yang, Baosong and Xie, Pengjun and Huang, Fei and others}, booktitle={Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing: Industry Track}, pages={1393--1412}, year={2024} } @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "SCIFACT" ]
microsoft/BiomedNLP-BiomedBERT-base-uncased-abstract
microsoft
fill-mask
[ "transformers", "pytorch", "jax", "bert", "fill-mask", "exbert", "en", "arxiv:2007.15779", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2022-03-02T23:29:05Z"
2023-11-06T18:04:15+00:00
134,643
72
--- language: en license: mit tags: - exbert widget: - text: '[MASK] is a tyrosine kinase inhibitor.' --- ## MSR BiomedBERT (abstracts only) <div style="border: 2px solid orange; border-radius:10px; padding:0px 10px; width: fit-content;"> * This model was previously named **"PubMedBERT (abstracts)"**. * You can either adopt the new model name "microsoft/BiomedNLP-BiomedBERT-base-uncased-abstract" or update your `transformers` library to version 4.22+ if you need to refer to the old name. </div> Pretraining large neural language models, such as BERT, has led to impressive gains on many natural language processing (NLP) tasks. However, most pretraining efforts focus on general domain corpora, such as newswire and Web. A prevailing assumption is that even domain-specific pretraining can benefit by starting from general-domain language models. [Recent work](https://arxiv.org/abs/2007.15779) shows that for domains with abundant unlabeled text, such as biomedicine, pretraining language models from scratch results in substantial gains over continual pretraining of general-domain language models. This BiomedBERT is pretrained from scratch using _abstracts_ from [PubMed](https://pubmed.ncbi.nlm.nih.gov/). This model achieves state-of-the-art performance on several biomedical NLP tasks, as shown on the [Biomedical Language Understanding and Reasoning Benchmark](https://aka.ms/BLURB). ## Citation If you find BiomedBERT useful in your research, please cite the following paper: ```latex @misc{pubmedbert, author = {Yu Gu and Robert Tinn and Hao Cheng and Michael Lucas and Naoto Usuyama and Xiaodong Liu and Tristan Naumann and Jianfeng Gao and Hoifung Poon}, title = {Domain-Specific Language Model Pretraining for Biomedical Natural Language Processing}, year = {2020}, eprint = {arXiv:2007.15779}, } ``` <a href="https://huggingface.co/exbert/?model=microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract&modelKind=bidirectional&sentence=Gefitinib%20is%20an%20EGFR%20tyrosine%20kinase%20inhibitor,%20which%20is%20often%20used%20for%20breast%20cancer%20and%20NSCLC%20treatment.&layer=10&heads=..0,1,2,3,4,5,6,7,8,9,10,11&threshold=0.7&tokenInd=17&tokenSide=right&maskInds=..&hideClsSep=true"> <img width="300px" src="https://cdn-media.huggingface.co/exbert/button.png"> </a>
[ "BLURB" ]
Alibaba-NLP/gte-modernbert-base
Alibaba-NLP
sentence-similarity
[ "transformers", "pytorch", "onnx", "safetensors", "modernbert", "feature-extraction", "sentence-transformers", "mteb", "embedding", "transformers.js", "sentence-similarity", "en", "arxiv:2308.03281", "base_model:answerdotai/ModernBERT-base", "base_model:finetune:answerdotai/ModernBERT-base", "license:apache-2.0", "endpoints_compatible", "region:us" ]
"2025-01-20T03:03:48Z"
2025-01-24T12:31:23+00:00
134,607
119
--- base_model: - answerdotai/ModernBERT-base language: - en library_name: transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - mteb - embedding - transformers.js base_model_relation: finetune --- # gte-modernbert-base We are excited to introduce the `gte-modernbert` series of models, which are built upon the latest modernBERT pre-trained encoder-only foundation models. The `gte-modernbert` series models include both text embedding models and rerank models. The `gte-modernbert` models demonstrates competitive performance in several text embedding and text retrieval evaluation tasks when compared to similar-scale models from the current open-source community. This includes assessments such as MTEB, LoCO, and COIR evaluation. ## Model Overview - Developed by: Tongyi Lab, Alibaba Group - Model Type: Text Embedding - Primary Language: English - Model Size: 149M - Max Input Length: 8192 tokens - Output Dimension: 768 ### Model list | Models | Language | Model Type | Model Size | Max Seq. Length | Dimension | MTEB-en | BEIR | LoCo | CoIR | |:--------------------------------------------------------------------------------------:|:--------:|:----------------------:|:----------:|:---------------:|:---------:|:-------:|:----:|:----:|:----:| | [`gte-modernbert-base`](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | English | text embedding | 149M | 8192 | 768 | 64.38 | 55.33 | 87.57 | 79.31 | | [`gte-reranker-modernbert-base`](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | English | text reranker | 149M | 8192 | - | - | 56.19 | 90.68 | 79.99 | ## Usage > [!TIP] > For `transformers` and `sentence-transformers`, if your GPU supports it, the efficient Flash Attention 2 will be used automatically if you have `flash_attn` installed. It is not mandatory. > > ```bash > pip install flash_attn > ``` Use with `transformers` ```python # Requires transformers>=4.48.0 import torch.nn.functional as F from transformers import AutoModel, AutoTokenizer input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] model_path = "Alibaba-NLP/gte-modernbert-base" tokenizer = AutoTokenizer.from_pretrained(model_path) model = AutoModel.from_pretrained(model_path) # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=8192, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = outputs.last_hidden_state[:, 0] # (Optionally) normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) # [[42.89073944091797, 71.30911254882812, 33.664554595947266]] ``` Use with `sentence-transformers`: ```python # Requires transformers>=4.48.0 from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] model = SentenceTransformer("Alibaba-NLP/gte-modernbert-base") embeddings = model.encode(input_texts) print(embeddings.shape) # (4, 768) similarities = cos_sim(embeddings[0], embeddings[1:]) print(similarities) # tensor([[0.4289, 0.7131, 0.3366]]) ``` Use with `transformers.js`: ```js // npm i @huggingface/transformers import { pipeline, matmul } from "@huggingface/transformers"; // Create a feature extraction pipeline const extractor = await pipeline( "feature-extraction", "Alibaba-NLP/gte-modernbert-base", { dtype: "fp32" }, // Supported options: "fp32", "fp16", "q8", "q4", "q4f16" ); // Embed queries and documents const embeddings = await extractor( [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms", ], { pooling: "cls", normalize: true }, ); // Compute similarity scores const similarities = (await matmul(embeddings.slice([0, 1]), embeddings.slice([1, null]).transpose(1, 0))).mul(100); console.log(similarities.tolist()); // [[42.89077377319336, 71.30916595458984, 33.66455841064453]] ``` ## Training Details The `gte-modernbert` series of models follows the training scheme of the previous [GTE models](https://huggingface.co/collections/Alibaba-NLP/gte-models-6680f0b13f885cb431e6d469), with the only difference being that the pre-training language model base has been replaced from [GTE-MLM](https://huggingface.co/Alibaba-NLP/gte-en-mlm-base) to [ModernBert](https://huggingface.co/answerdotai/ModernBERT-base). For more training details, please refer to our paper: [mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval](https://aclanthology.org/2024.emnlp-industry.103/) ## Evaluation ### MTEB The results of other models are retrieved from [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). Given that all models in the `gte-modernbert` series have a size of less than 1B parameters, we focused exclusively on the results of models under 1B from the MTEB leaderboard. | Model Name | Param Size (M) | Dimension | Sequence Length | Average (56) | Class. (12) | Clust. (11) | Pair Class. (3) | Reran. (4) | Retr. (15) | STS (10) | Summ. (1) | |:------------------------------------------------------------------------------------------------:|:--------------:|:---------:|:---------------:|:------------:|:-----------:|:---:|:---:|:---:|:---:|:-----------:|:--------:| | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 335 | 1024 | 512 | 64.68 | 75.64 | 46.71 | 87.2 | 60.11 | 54.39 | 85 | 32.71 | | [multilingual-e5-large-instruct](https://huggingface.co/intfloat/multilingual-e5-large-instruct) | 560 | 1024 | 514 | 64.41 | 77.56 | 47.1 | 86.19 | 58.58 | 52.47 | 84.78 | 30.39 | | [bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 335 | 1024 | 512 | 64.23 | 75.97 | 46.08 | 87.12 | 60.03 | 54.29 | 83.11 | 31.61 | | [gte-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | 137 | 768 | 8192 | 64.11 | 77.17 | 46.82 | 85.33 | 57.66 | 54.09 | 81.97 | 31.17 | | [bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 109 | 768 | 512 | 63.55 | 75.53 | 45.77 | 86.55 | 58.86 | 53.25 | 82.4 | 31.07 | | [gte-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 409 | 1024 | 8192 | 65.39 | 77.75 | 47.95 | 84.63 | 58.50 | 57.91 | 81.43 | 30.91 | | [modernbert-embed-base](https://huggingface.co/nomic-ai/modernbert-embed-base) | 149 | 768 | 8192 | 62.62 | 74.31 | 44.98 | 83.96 | 56.42 | 52.89 | 81.78 | 31.39 | | [nomic-embed-text-v1.5](https://huggingface.co/nomic-ai/nomic-embed-text-v1.5) | | 768 | 8192 | 62.28 | 73.55 | 43.93 | 84.61 | 55.78 | 53.01| 81.94 | 30.4 | | [gte-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base) | 305 | 768 | 8192 | 61.4 | 70.89 | 44.31 | 84.24 | 57.47 |51.08 | 82.11 | 30.58 | | [jina-embeddings-v3](https://huggingface.co/jinaai/jina-embeddings-v3) | 572 | 1024 | 8192 | 65.51 | 82.58 |45.21 |84.01 |58.13 |53.88 | 85.81 | 29.71 | | [**gte-modernbert-base**](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 149 | 768 | 8192 | **64.38** | **76.99** | **46.47** | **85.93** | **59.24** | **55.33** | **81.57** | **30.68** | ### LoCo (Long Document Retrieval)(NDCG@10) | Model Name | Dimension | Sequence Length | Average (5) | QsmsumRetrieval | SummScreenRetrieval | QasperAbastractRetrieval | QasperTitleRetrieval | GovReportRetrieval | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [gte-qwen1.5-7b](https://huggingface.co/Alibaba-NLP/gte-qwen1.5-7b) | 4096 | 32768 | 87.57 | 49.37 | 93.10 | 99.67 | 97.54 | 98.21 | | [gte-large-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-v1.5) |1024 | 8192 | 86.71 | 44.55 | 92.61 | 99.82 | 97.81 | 98.74 | | [gte-base-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-v1.5) | 768 | 8192 | 87.44 | 49.91 | 91.78 | 99.82 | 97.13 | 98.58 | | [gte-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 768 | 8192 | 88.88 | 54.45 | 93.00 | 99.82 | 98.03 | 98.70 | | [gte-reranker-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | - | 8192 | 90.68 | 70.86 | 94.06 | 99.73 | 99.11 | 89.67 | ### COIR (Code Retrieval Task)(NDCG@10) | Model Name | Dimension | Sequence Length | Average(20) | CodeSearchNet-ccr-go | CodeSearchNet-ccr-java | CodeSearchNet-ccr-javascript | CodeSearchNet-ccr-php | CodeSearchNet-ccr-python | CodeSearchNet-ccr-ruby | CodeSearchNet-go | CodeSearchNet-java | CodeSearchNet-javascript | CodeSearchNet-php | CodeSearchNet-python | CodeSearchNet-ruby | apps | codefeedback-mt | codefeedback-st | codetrans-contest | codetrans-dl | cosqa | stackoverflow-qa | synthetic-text2sql | |:----:|:---:|:---:|:---:|:---:| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | | [gte-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 768 | 8192 | 79.31 | 94.15 | 93.57 | 94.27 | 91.51 | 93.93 | 90.63 | 88.32 | 83.27 | 76.05 | 85.12 | 88.16 | 77.59 | 57.54 | 82.34 | 85.95 | 71.89 | 35.46 | 43.47 | 91.2 | 61.87 | | [gte-reranker-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | - | 8192 | 79.99 | 96.43 | 96.88 | 98.32 | 91.81 | 97.7 | 91.96 | 88.81 | 79.71 | 76.27 | 89.39 | 98.37 | 84.11 | 47.57 | 83.37 | 88.91 | 49.66 | 36.36 | 44.37 | 89.58 | 64.21 | ### BEIR(NDCG@10) | Model Name | Dimension | Sequence Length | Average(15) | ArguAna | ClimateFEVER | CQADupstackAndroidRetrieval | DBPedia | FEVER | FiQA2018 | HotpotQA | MSMARCO | NFCorpus | NQ | QuoraRetrieval | SCIDOCS | SciFact | Touche2020 | TRECCOVID | | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | | [gte-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-modernbert-base) | 768 | 8192 | 55.33 | 72.68 | 37.74 | 42.63 | 41.79 | 91.03 | 48.81 | 69.47 | 40.9 | 36.44 | 57.62 | 88.55 | 21.29 | 77.4 | 21.68 | 81.95 | | [gte-reranker-modernbert-base](https://huggingface.co/Alibaba-NLP/gte-reranker-modernbert-base) | - | 8192 | 56.73 | 69.03 | 37.79 | 44.68 | 47.23 | 94.54 | 49.81 | 78.16 | 45.38 | 30.69 | 64.57 | 87.77 | 20.60 | 73.57 | 27.36 | 79.89 | ## Hiring We have open positions for **Research Interns** and **Full-Time Researchers** to join our team at Tongyi Lab. We are seeking passionate individuals with expertise in representation learning, LLM-driven information retrieval, Retrieval-Augmented Generation (RAG), and agent-based systems. Our team is located in the vibrant cities of **Beijing** and **Hangzhou**. If you are driven by curiosity and eager to make a meaningful impact through your work, we would love to hear from you. Please submit your resume along with a brief introduction to <a href="mailto:[email protected]">[email protected]</a>. ## Citation If you find our paper or models helpful, feel free to give us a cite. ``` @inproceedings{zhang2024mgte, title={mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval}, author={Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Wen and Dai, Ziqi and Tang, Jialong and Lin, Huan and Yang, Baosong and Xie, Pengjun and Huang, Fei and others}, booktitle={Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing: Industry Track}, pages={1393--1412}, year={2024} } @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "SCIFACT" ]
EleutherAI/pythia-160m
EleutherAI
text-generation
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "causal-lm", "pythia", "en", "dataset:EleutherAI/pile", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2023-02-08T19:25:46Z"
2023-07-09T15:52:09+00:00
134,206
30
--- datasets: - EleutherAI/pile language: - en license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-160M ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-160M for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-160M as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-160M has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-160M will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-160M to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-160M may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-160M. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-160M. ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "SCIQ" ]
nomic-ai/modernbert-embed-base
nomic-ai
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "modernbert", "feature-extraction", "sentence-similarity", "mteb", "transformers.js", "en", "arxiv:2402.01613", "base_model:answerdotai/ModernBERT-base", "base_model:finetune:answerdotai/ModernBERT-base", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2024-12-29T23:51:30Z"
2025-01-24T15:31:46+00:00
132,008
199
--- base_model: - answerdotai/ModernBERT-base - nomic-ai/modernbert-embed-unsupervised language: - en license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb - transformers.js base_model_relation: finetune model-index: - name: binarize_False results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: None config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 78.13432835820896 - type: ap value: 42.190424731303246 - type: f1 value: 72.34446401534811 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: None config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.093825 - type: ap value: 90.03727505544286 - type: f1 value: 93.0874055138833 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: None config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 48.428000000000004 - type: f1 value: 47.74311520203536 - task: type: Retrieval dataset: name: MTEB ArguAna type: None config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 23.898 - type: map_at_10 value: 39.775 - type: map_at_100 value: 40.827000000000005 - type: map_at_1000 value: 40.837 - type: map_at_20 value: 40.604 - type: map_at_3 value: 34.519 - type: map_at_5 value: 37.307 - type: mrr_at_1 value: 24.395 - type: mrr_at_10 value: 39.963 - type: mrr_at_100 value: 41.014 - type: mrr_at_1000 value: 41.024 - type: mrr_at_20 value: 40.791 - type: mrr_at_3 value: 34.732 - type: mrr_at_5 value: 37.480999999999995 - type: ndcg_at_1 value: 23.898 - type: ndcg_at_10 value: 48.962 - type: ndcg_at_100 value: 53.386 - type: ndcg_at_1000 value: 53.634 - type: ndcg_at_20 value: 51.898999999999994 - type: ndcg_at_3 value: 38.034 - type: ndcg_at_5 value: 43.036 - type: precision_at_1 value: 23.898 - type: precision_at_10 value: 7.852 - type: precision_at_100 value: 0.9769999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.4990000000000006 - type: precision_at_3 value: 16.073999999999998 - type: precision_at_5 value: 12.063 - type: recall_at_1 value: 23.898 - type: recall_at_10 value: 78.521 - type: recall_at_100 value: 97.724 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 89.972 - type: recall_at_3 value: 48.222 - type: recall_at_5 value: 60.313 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: None config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.69067314293749 - type: v_measures value: - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - 0.4953006738713271 - 0.500982950617211 - 0.490168788349858 - 0.4924060458428337 - 0.475176328561399 - 0.47446297663785564 - 0.46948807073019405 - 0.4772028638329531 - 0.48735189935310713 - 0.48641173887761663 - 0.5575029526712674 - 0.5574020390232136 - 0.5536066904942645 - 0.5536169413675474 - 0.5566938602585987 - 0.5561143054736898 - 0.561846457174852 - 0.5511643632282144 - 0.5514762015499715 - 0.551824471283655 - 0.5148077891863135 - 0.29015461701593837 - 0.4430422977323321 - 0.40857527197890686 - 0.3479983114229163 - 0.27582001934225003 - 0.29595564003512503 - 0.22528676611734755 - 0.3073271865740206 - 1.0 - 0.2749401557058413 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: None config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 38.0916537995626 - type: v_measures value: - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - 0.37814352051854533 - 0.39235658929084877 - 0.3871170834588581 - 0.4042678213739614 - 0.3918486409557737 - 0.38473003463452093 - 0.35622070034791886 - 0.3911472272128115 - 0.3986923912337426 - 0.39040109467533013 - 0.4370949482641744 - 0.4414023630938724 - 0.4351473848532441 - 0.4401176389499172 - 0.4423731097742471 - 0.438309696145818 - 0.43410597641884624 - 0.43900908630646696 - 0.44081346534023286 - 0.4386000014888906 - 0.4047539306032343 - 0.21697191913450847 - 0.29241358200068185 - 0.3390740154458194 - 0.2793967439904601 - 0.20383792346854981 - 0.23904022437429004 - 0.14733601126565044 - 0.22946888289524586 - 1.0 - 0.19422067034794377 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: None config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.33195643912506 - type: mrr value: 76.43978366970057 - task: type: STS dataset: name: MTEB BIOSSES type: None config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 81.20285894915236 - type: cos_sim_spearman value: 78.16322678527897 - type: euclidean_pearson value: 80.6118408638417 - type: euclidean_spearman value: 78.19033583671204 - type: manhattan_pearson value: 80.41282660275819 - type: manhattan_spearman value: 77.98611431591628 - task: type: Classification dataset: name: MTEB Banking77Classification type: None config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 85.25324675324676 - type: f1 value: 85.19854235582687 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: None config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.65216461057432 - type: v_measures value: - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - 0.409550367831406 - 0.3943451642663655 - 0.38843873187080014 - 0.40032616646112934 - 0.3956833025503425 - 0.3842865397042604 - 0.3950585966936957 - 0.41669832667987455 - 0.39790986378306964 - 0.3829194012164885 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: None config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 33.28787287895752 - type: v_measures value: - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - 0.3235019092705102 - 0.34053753555843735 - 0.32485572754337366 - 0.3149662563474906 - 0.3326837187664875 - 0.3229632335470733 - 0.33078383561261365 - 0.35111148393509534 - 0.33383133843449825 - 0.35355224888017306 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 32.677 - type: map_at_10 value: 43.739 - type: map_at_100 value: 45.152 - type: map_at_1000 value: 45.279 - type: map_at_20 value: 44.553 - type: map_at_3 value: 40.321 - type: map_at_5 value: 42.201 - type: mrr_at_1 value: 40.2 - type: mrr_at_10 value: 49.755 - type: mrr_at_100 value: 50.468 - type: mrr_at_1000 value: 50.513 - type: mrr_at_20 value: 50.192 - type: mrr_at_3 value: 47.163 - type: mrr_at_5 value: 48.686 - type: ndcg_at_1 value: 40.2 - type: ndcg_at_10 value: 49.963 - type: ndcg_at_100 value: 54.978 - type: ndcg_at_1000 value: 56.979 - type: ndcg_at_20 value: 51.983000000000004 - type: ndcg_at_3 value: 45.086999999999996 - type: ndcg_at_5 value: 47.309 - type: precision_at_1 value: 40.2 - type: precision_at_10 value: 9.328 - type: precision_at_100 value: 1.443 - type: precision_at_1000 value: 0.19 - type: precision_at_20 value: 5.558 - type: precision_at_3 value: 21.364 - type: precision_at_5 value: 15.222 - type: recall_at_1 value: 32.677 - type: recall_at_10 value: 61.71 - type: recall_at_100 value: 82.431 - type: recall_at_1000 value: 94.896 - type: recall_at_20 value: 68.73700000000001 - type: recall_at_3 value: 47.431 - type: recall_at_5 value: 53.739000000000004 - type: map_at_1 value: 27.734166666666667 - type: map_at_10 value: 36.858 - type: map_at_100 value: 38.043833333333325 - type: map_at_1000 value: 38.15541666666667 - type: map_at_20 value: 37.521249999999995 - type: map_at_3 value: 34.07658333333333 - type: map_at_5 value: 35.62683333333333 - type: mrr_at_1 value: 32.676249999999996 - type: mrr_at_10 value: 40.999 - type: mrr_at_100 value: 41.835 - type: mrr_at_1000 value: 41.8895 - type: mrr_at_20 value: 41.4865 - type: mrr_at_3 value: 38.645 - type: mrr_at_5 value: 39.99725000000001 - type: ndcg_at_1 value: 32.676249999999996 - type: ndcg_at_10 value: 42.08016666666666 - type: ndcg_at_100 value: 47.082750000000004 - type: ndcg_at_1000 value: 49.276583333333335 - type: ndcg_at_20 value: 44.04808333333334 - type: ndcg_at_3 value: 37.43375 - type: ndcg_at_5 value: 39.623000000000005 - type: precision_at_1 value: 32.676249999999996 - type: precision_at_10 value: 7.271 - type: precision_at_100 value: 1.1458333333333333 - type: precision_at_1000 value: 0.152 - type: precision_at_20 value: 4.282916666666667 - type: precision_at_3 value: 17.061416666666666 - type: precision_at_5 value: 12.05466666666667 - type: recall_at_1 value: 27.734166666666667 - type: recall_at_10 value: 53.33574999999999 - type: recall_at_100 value: 75.16275 - type: recall_at_1000 value: 90.34891666666665 - type: recall_at_20 value: 60.4935 - type: recall_at_3 value: 40.377916666666664 - type: recall_at_5 value: 46.0195 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 32.71 - type: map_at_10 value: 43.297000000000004 - type: map_at_100 value: 44.607 - type: map_at_1000 value: 44.729 - type: map_at_20 value: 44.013999999999996 - type: map_at_3 value: 40.213 - type: map_at_5 value: 42.004000000000005 - type: mrr_at_1 value: 40.892 - type: mrr_at_10 value: 49.394 - type: mrr_at_100 value: 50.005 - type: mrr_at_1000 value: 50.043000000000006 - type: mrr_at_20 value: 49.764 - type: mrr_at_3 value: 47.134 - type: mrr_at_5 value: 48.522 - type: ndcg_at_1 value: 40.892 - type: ndcg_at_10 value: 49.047000000000004 - type: ndcg_at_100 value: 53.266999999999996 - type: ndcg_at_1000 value: 55.096999999999994 - type: ndcg_at_20 value: 50.707 - type: ndcg_at_3 value: 44.896 - type: ndcg_at_5 value: 46.983000000000004 - type: precision_at_1 value: 40.892 - type: precision_at_10 value: 9.293 - type: precision_at_100 value: 1.473 - type: precision_at_1000 value: 0.192 - type: precision_at_20 value: 5.446 - type: precision_at_3 value: 21.592 - type: precision_at_5 value: 15.540999999999999 - type: recall_at_1 value: 32.71 - type: recall_at_10 value: 58.592999999999996 - type: recall_at_100 value: 76.242 - type: recall_at_1000 value: 87.717 - type: recall_at_20 value: 64.646 - type: recall_at_3 value: 46.253 - type: recall_at_5 value: 51.946999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 41.644999999999996 - type: map_at_10 value: 53.825 - type: map_at_100 value: 54.82 - type: map_at_1000 value: 54.87499999999999 - type: map_at_20 value: 54.43 - type: map_at_3 value: 50.705 - type: map_at_5 value: 52.501 - type: mrr_at_1 value: 47.524 - type: mrr_at_10 value: 57.260999999999996 - type: mrr_at_100 value: 57.902 - type: mrr_at_1000 value: 57.931999999999995 - type: mrr_at_20 value: 57.689 - type: mrr_at_3 value: 55.089 - type: mrr_at_5 value: 56.38999999999999 - type: ndcg_at_1 value: 47.524 - type: ndcg_at_10 value: 59.41499999999999 - type: ndcg_at_100 value: 63.258 - type: ndcg_at_1000 value: 64.376 - type: ndcg_at_20 value: 61.149 - type: ndcg_at_3 value: 54.381 - type: ndcg_at_5 value: 56.89999999999999 - type: precision_at_1 value: 47.524 - type: precision_at_10 value: 9.386 - type: precision_at_100 value: 1.221 - type: precision_at_1000 value: 0.136 - type: precision_at_20 value: 5.223 - type: precision_at_3 value: 24.096 - type: precision_at_5 value: 16.364 - type: recall_at_1 value: 41.644999999999996 - type: recall_at_10 value: 72.386 - type: recall_at_100 value: 88.794 - type: recall_at_1000 value: 96.75399999999999 - type: recall_at_20 value: 78.74 - type: recall_at_3 value: 59.028000000000006 - type: recall_at_5 value: 65.197 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 28.648 - type: map_at_10 value: 36.388999999999996 - type: map_at_100 value: 37.372 - type: map_at_1000 value: 37.457 - type: map_at_20 value: 36.912 - type: map_at_3 value: 34.076 - type: map_at_5 value: 35.415 - type: mrr_at_1 value: 30.508000000000003 - type: mrr_at_10 value: 38.132 - type: mrr_at_100 value: 39.04 - type: mrr_at_1000 value: 39.106 - type: mrr_at_20 value: 38.643 - type: mrr_at_3 value: 35.876000000000005 - type: mrr_at_5 value: 37.208999999999996 - type: ndcg_at_1 value: 30.508000000000003 - type: ndcg_at_10 value: 40.762 - type: ndcg_at_100 value: 45.732 - type: ndcg_at_1000 value: 47.799 - type: ndcg_at_20 value: 42.591 - type: ndcg_at_3 value: 36.266999999999996 - type: ndcg_at_5 value: 38.58 - type: precision_at_1 value: 30.508000000000003 - type: precision_at_10 value: 6.010999999999999 - type: precision_at_100 value: 0.897 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_20 value: 3.412 - type: precision_at_3 value: 14.991 - type: precision_at_5 value: 10.328 - type: recall_at_1 value: 28.648 - type: recall_at_10 value: 52.342999999999996 - type: recall_at_100 value: 75.268 - type: recall_at_1000 value: 90.641 - type: recall_at_20 value: 59.303 - type: recall_at_3 value: 40.447 - type: recall_at_5 value: 46.117000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 18.476 - type: map_at_10 value: 27.148 - type: map_at_100 value: 28.317999999999998 - type: map_at_1000 value: 28.427999999999997 - type: map_at_20 value: 27.764 - type: map_at_3 value: 24.801000000000002 - type: map_at_5 value: 26.133 - type: mrr_at_1 value: 22.886 - type: mrr_at_10 value: 31.741000000000003 - type: mrr_at_100 value: 32.708 - type: mrr_at_1000 value: 32.769 - type: mrr_at_20 value: 32.296 - type: mrr_at_3 value: 29.498 - type: mrr_at_5 value: 30.773 - type: ndcg_at_1 value: 22.886 - type: ndcg_at_10 value: 32.265 - type: ndcg_at_100 value: 37.829 - type: ndcg_at_1000 value: 40.558 - type: ndcg_at_20 value: 34.372 - type: ndcg_at_3 value: 28.105000000000004 - type: ndcg_at_5 value: 30.04 - type: precision_at_1 value: 22.886 - type: precision_at_10 value: 5.808 - type: precision_at_100 value: 0.985 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_20 value: 3.495 - type: precision_at_3 value: 13.639999999999999 - type: precision_at_5 value: 9.577 - type: recall_at_1 value: 18.476 - type: recall_at_10 value: 43.442 - type: recall_at_100 value: 67.376 - type: recall_at_1000 value: 86.874 - type: recall_at_20 value: 51.038 - type: recall_at_3 value: 31.785999999999998 - type: recall_at_5 value: 36.858999999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 29.098000000000003 - type: map_at_10 value: 38.97 - type: map_at_100 value: 40.293 - type: map_at_1000 value: 40.397 - type: map_at_20 value: 39.778999999999996 - type: map_at_3 value: 35.723 - type: map_at_5 value: 37.519999999999996 - type: mrr_at_1 value: 35.515 - type: mrr_at_10 value: 44.55 - type: mrr_at_100 value: 45.37 - type: mrr_at_1000 value: 45.412 - type: mrr_at_20 value: 45.054 - type: mrr_at_3 value: 41.835 - type: mrr_at_5 value: 43.356 - type: ndcg_at_1 value: 35.515 - type: ndcg_at_10 value: 44.91 - type: ndcg_at_100 value: 50.27700000000001 - type: ndcg_at_1000 value: 52.215 - type: ndcg_at_20 value: 47.235 - type: ndcg_at_3 value: 39.505 - type: ndcg_at_5 value: 42.016 - type: precision_at_1 value: 35.515 - type: precision_at_10 value: 8.152 - type: precision_at_100 value: 1.262 - type: precision_at_1000 value: 0.16 - type: precision_at_20 value: 4.851 - type: precision_at_3 value: 18.447 - type: precision_at_5 value: 13.321 - type: recall_at_1 value: 29.098000000000003 - type: recall_at_10 value: 57.115 - type: recall_at_100 value: 79.467 - type: recall_at_1000 value: 92.162 - type: recall_at_20 value: 65.161 - type: recall_at_3 value: 42.254000000000005 - type: recall_at_5 value: 48.415 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 27.372000000000003 - type: map_at_10 value: 37.781 - type: map_at_100 value: 39.128 - type: map_at_1000 value: 39.238 - type: map_at_20 value: 38.592 - type: map_at_3 value: 34.782999999999994 - type: map_at_5 value: 36.466 - type: mrr_at_1 value: 33.904 - type: mrr_at_10 value: 43.15 - type: mrr_at_100 value: 44.049 - type: mrr_at_1000 value: 44.107 - type: mrr_at_20 value: 43.721 - type: mrr_at_3 value: 40.677 - type: mrr_at_5 value: 42.19 - type: ndcg_at_1 value: 33.904 - type: ndcg_at_10 value: 43.527 - type: ndcg_at_100 value: 49.004999999999995 - type: ndcg_at_1000 value: 51.276999999999994 - type: ndcg_at_20 value: 45.988 - type: ndcg_at_3 value: 38.824999999999996 - type: ndcg_at_5 value: 41.04 - type: precision_at_1 value: 33.904 - type: precision_at_10 value: 7.854 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.16 - type: precision_at_20 value: 4.692 - type: precision_at_3 value: 18.531 - type: precision_at_5 value: 13.150999999999998 - type: recall_at_1 value: 27.372000000000003 - type: recall_at_10 value: 55.245999999999995 - type: recall_at_100 value: 78.278 - type: recall_at_1000 value: 93.718 - type: recall_at_20 value: 64.095 - type: recall_at_3 value: 41.665 - type: recall_at_5 value: 47.632000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 25.653 - type: map_at_10 value: 32.151 - type: map_at_100 value: 33.152 - type: map_at_1000 value: 33.243 - type: map_at_20 value: 32.717 - type: map_at_3 value: 30.287 - type: map_at_5 value: 31.25 - type: mrr_at_1 value: 28.988000000000003 - type: mrr_at_10 value: 35.131 - type: mrr_at_100 value: 36.002 - type: mrr_at_1000 value: 36.069 - type: mrr_at_20 value: 35.61 - type: mrr_at_3 value: 33.308 - type: mrr_at_5 value: 34.259 - type: ndcg_at_1 value: 28.988000000000003 - type: ndcg_at_10 value: 35.988 - type: ndcg_at_100 value: 40.764 - type: ndcg_at_1000 value: 43.112 - type: ndcg_at_20 value: 37.852999999999994 - type: ndcg_at_3 value: 32.562000000000005 - type: ndcg_at_5 value: 33.983000000000004 - type: precision_at_1 value: 28.988000000000003 - type: precision_at_10 value: 5.475 - type: precision_at_100 value: 0.8500000000000001 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_20 value: 3.229 - type: precision_at_3 value: 13.905999999999999 - type: precision_at_5 value: 9.386999999999999 - type: recall_at_1 value: 25.653 - type: recall_at_10 value: 44.962 - type: recall_at_100 value: 66.405 - type: recall_at_1000 value: 83.88799999999999 - type: recall_at_20 value: 51.79899999999999 - type: recall_at_3 value: 35.144999999999996 - type: recall_at_5 value: 38.814 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 17.825 - type: map_at_10 value: 25.592 - type: map_at_100 value: 26.613999999999997 - type: map_at_1000 value: 26.734 - type: map_at_20 value: 26.115 - type: map_at_3 value: 23.119 - type: map_at_5 value: 24.54 - type: mrr_at_1 value: 21.335 - type: mrr_at_10 value: 29.165000000000003 - type: mrr_at_100 value: 30.049 - type: mrr_at_1000 value: 30.121 - type: mrr_at_20 value: 29.639 - type: mrr_at_3 value: 26.863999999999997 - type: mrr_at_5 value: 28.185 - type: ndcg_at_1 value: 21.335 - type: ndcg_at_10 value: 30.357 - type: ndcg_at_100 value: 35.410000000000004 - type: ndcg_at_1000 value: 38.24 - type: ndcg_at_20 value: 32.08 - type: ndcg_at_3 value: 25.95 - type: ndcg_at_5 value: 28.081 - type: precision_at_1 value: 21.335 - type: precision_at_10 value: 5.506 - type: precision_at_100 value: 0.928 - type: precision_at_1000 value: 0.135 - type: precision_at_20 value: 3.2550000000000003 - type: precision_at_3 value: 12.239 - type: precision_at_5 value: 8.885 - type: recall_at_1 value: 17.825 - type: recall_at_10 value: 41.105999999999995 - type: recall_at_100 value: 64.17 - type: recall_at_1000 value: 84.19200000000001 - type: recall_at_20 value: 47.497 - type: recall_at_3 value: 28.862 - type: recall_at_5 value: 34.348 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 29.435 - type: map_at_10 value: 38.261 - type: map_at_100 value: 39.242 - type: map_at_1000 value: 39.347 - type: map_at_20 value: 38.742 - type: map_at_3 value: 35.457 - type: map_at_5 value: 37.043 - type: mrr_at_1 value: 34.235 - type: mrr_at_10 value: 42.24 - type: mrr_at_100 value: 42.988 - type: mrr_at_1000 value: 43.043 - type: mrr_at_20 value: 42.613 - type: mrr_at_3 value: 39.832 - type: mrr_at_5 value: 41.227000000000004 - type: ndcg_at_1 value: 34.235 - type: ndcg_at_10 value: 43.384 - type: ndcg_at_100 value: 48.14 - type: ndcg_at_1000 value: 50.414 - type: ndcg_at_20 value: 44.913 - type: ndcg_at_3 value: 38.454 - type: ndcg_at_5 value: 40.776 - type: precision_at_1 value: 34.235 - type: precision_at_10 value: 7.164 - type: precision_at_100 value: 1.065 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_20 value: 4.021 - type: precision_at_3 value: 17.226 - type: precision_at_5 value: 12.071 - type: recall_at_1 value: 29.435 - type: recall_at_10 value: 54.93900000000001 - type: recall_at_100 value: 76.176 - type: recall_at_1000 value: 91.989 - type: recall_at_20 value: 60.451 - type: recall_at_3 value: 41.332 - type: recall_at_5 value: 47.316 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 25.605 - type: map_at_10 value: 34.162 - type: map_at_100 value: 35.827999999999996 - type: map_at_1000 value: 36.04 - type: map_at_20 value: 35.016000000000005 - type: map_at_3 value: 30.984 - type: map_at_5 value: 32.717 - type: mrr_at_1 value: 30.435000000000002 - type: mrr_at_10 value: 38.681 - type: mrr_at_100 value: 39.656000000000006 - type: mrr_at_1000 value: 39.71 - type: mrr_at_20 value: 39.208999999999996 - type: mrr_at_3 value: 35.903 - type: mrr_at_5 value: 37.454 - type: ndcg_at_1 value: 30.435000000000002 - type: ndcg_at_10 value: 39.916000000000004 - type: ndcg_at_100 value: 45.958 - type: ndcg_at_1000 value: 48.449999999999996 - type: ndcg_at_20 value: 42.085 - type: ndcg_at_3 value: 34.696 - type: ndcg_at_5 value: 37.147000000000006 - type: precision_at_1 value: 30.435000000000002 - type: precision_at_10 value: 7.767 - type: precision_at_100 value: 1.547 - type: precision_at_1000 value: 0.23800000000000002 - type: precision_at_20 value: 4.941 - type: precision_at_3 value: 16.073999999999998 - type: precision_at_5 value: 11.937000000000001 - type: recall_at_1 value: 25.605 - type: recall_at_10 value: 50.654999999999994 - type: recall_at_100 value: 77.609 - type: recall_at_1000 value: 93.518 - type: recall_at_20 value: 58.845000000000006 - type: recall_at_3 value: 36.272 - type: recall_at_5 value: 42.596000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 23.666 - type: map_at_10 value: 30.980999999999998 - type: map_at_100 value: 32.0 - type: map_at_1000 value: 32.098 - type: map_at_20 value: 31.621 - type: map_at_3 value: 28.449999999999996 - type: map_at_5 value: 29.731999999999996 - type: mrr_at_1 value: 25.692999999999998 - type: mrr_at_10 value: 32.788000000000004 - type: mrr_at_100 value: 33.783 - type: mrr_at_1000 value: 33.849000000000004 - type: mrr_at_20 value: 33.408 - type: mrr_at_3 value: 30.561 - type: mrr_at_5 value: 31.716 - type: ndcg_at_1 value: 25.692999999999998 - type: ndcg_at_10 value: 35.428 - type: ndcg_at_100 value: 40.375 - type: ndcg_at_1000 value: 42.802 - type: ndcg_at_20 value: 37.621 - type: ndcg_at_3 value: 30.476999999999997 - type: ndcg_at_5 value: 32.621 - type: precision_at_1 value: 25.692999999999998 - type: precision_at_10 value: 5.508 - type: precision_at_100 value: 0.848 - type: precision_at_1000 value: 0.116 - type: precision_at_20 value: 3.272 - type: precision_at_3 value: 12.631 - type: precision_at_5 value: 8.872 - type: recall_at_1 value: 23.666 - type: recall_at_10 value: 47.532000000000004 - type: recall_at_100 value: 69.73700000000001 - type: recall_at_1000 value: 87.83800000000001 - type: recall_at_20 value: 55.61000000000001 - type: recall_at_3 value: 34.06 - type: recall_at_5 value: 39.254 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: None config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 16.337 - type: map_at_10 value: 26.488 - type: map_at_100 value: 28.415000000000003 - type: map_at_1000 value: 28.584 - type: map_at_20 value: 27.557 - type: map_at_3 value: 22.665 - type: map_at_5 value: 24.542 - type: mrr_at_1 value: 36.417 - type: mrr_at_10 value: 48.001 - type: mrr_at_100 value: 48.784 - type: mrr_at_1000 value: 48.809000000000005 - type: mrr_at_20 value: 48.507 - type: mrr_at_3 value: 45.103 - type: mrr_at_5 value: 46.843 - type: ndcg_at_1 value: 36.417 - type: ndcg_at_10 value: 35.67 - type: ndcg_at_100 value: 42.716 - type: ndcg_at_1000 value: 45.639 - type: ndcg_at_20 value: 38.471 - type: ndcg_at_3 value: 30.444 - type: ndcg_at_5 value: 32.004 - type: precision_at_1 value: 36.417 - type: precision_at_10 value: 10.73 - type: precision_at_100 value: 1.833 - type: precision_at_1000 value: 0.23800000000000002 - type: precision_at_20 value: 6.596 - type: precision_at_3 value: 22.302 - type: precision_at_5 value: 16.521 - type: recall_at_1 value: 16.337 - type: recall_at_10 value: 40.671 - type: recall_at_100 value: 64.55300000000001 - type: recall_at_1000 value: 80.934 - type: recall_at_20 value: 48.381 - type: recall_at_3 value: 27.279999999999998 - type: recall_at_5 value: 32.621 - task: type: Retrieval dataset: name: MTEB DBPedia type: None config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.056000000000001 - type: map_at_10 value: 19.419 - type: map_at_100 value: 27.069 - type: map_at_1000 value: 28.666000000000004 - type: map_at_20 value: 22.434 - type: map_at_3 value: 13.895 - type: map_at_5 value: 16.121 - type: mrr_at_1 value: 69.0 - type: mrr_at_10 value: 75.804 - type: mrr_at_100 value: 76.117 - type: mrr_at_1000 value: 76.125 - type: mrr_at_20 value: 76.009 - type: mrr_at_3 value: 74.375 - type: mrr_at_5 value: 75.4 - type: ndcg_at_1 value: 57.49999999999999 - type: ndcg_at_10 value: 41.495 - type: ndcg_at_100 value: 45.208 - type: ndcg_at_1000 value: 52.221 - type: ndcg_at_20 value: 40.617999999999995 - type: ndcg_at_3 value: 46.592 - type: ndcg_at_5 value: 43.559 - type: precision_at_1 value: 69.0 - type: precision_at_10 value: 32.574999999999996 - type: precision_at_100 value: 10.205 - type: precision_at_1000 value: 2.036 - type: precision_at_20 value: 24.687 - type: precision_at_3 value: 49.75 - type: precision_at_5 value: 42.0 - type: recall_at_1 value: 9.056000000000001 - type: recall_at_10 value: 24.866 - type: recall_at_100 value: 50.097 - type: recall_at_1000 value: 72.038 - type: recall_at_20 value: 31.858999999999998 - type: recall_at_3 value: 15.096000000000002 - type: recall_at_5 value: 18.548000000000002 - task: type: Classification dataset: name: MTEB EmotionClassification type: None config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 48.259999999999984 - type: f1 value: 43.1498589523159 - task: type: Retrieval dataset: name: MTEB FEVER type: None config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 74.798 - type: map_at_10 value: 83.454 - type: map_at_100 value: 83.623 - type: map_at_1000 value: 83.635 - type: map_at_20 value: 83.55 - type: map_at_3 value: 82.392 - type: map_at_5 value: 83.167 - type: mrr_at_1 value: 80.708 - type: mrr_at_10 value: 88.377 - type: mrr_at_100 value: 88.411 - type: mrr_at_1000 value: 88.411 - type: mrr_at_20 value: 88.402 - type: mrr_at_3 value: 87.646 - type: mrr_at_5 value: 88.232 - type: ndcg_at_1 value: 80.708 - type: ndcg_at_10 value: 87.35199999999999 - type: ndcg_at_100 value: 87.91600000000001 - type: ndcg_at_1000 value: 88.12299999999999 - type: ndcg_at_20 value: 87.593 - type: ndcg_at_3 value: 85.738 - type: ndcg_at_5 value: 86.845 - type: precision_at_1 value: 80.708 - type: precision_at_10 value: 10.432 - type: precision_at_100 value: 1.091 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 5.296 - type: precision_at_3 value: 32.778 - type: precision_at_5 value: 20.399 - type: recall_at_1 value: 74.798 - type: recall_at_10 value: 94.459 - type: recall_at_100 value: 96.614 - type: recall_at_1000 value: 97.868 - type: recall_at_20 value: 95.254 - type: recall_at_3 value: 90.144 - type: recall_at_5 value: 92.965 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: None config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 20.008 - type: map_at_10 value: 32.731 - type: map_at_100 value: 34.467999999999996 - type: map_at_1000 value: 34.643 - type: map_at_20 value: 33.717000000000006 - type: map_at_3 value: 28.427999999999997 - type: map_at_5 value: 30.788 - type: mrr_at_1 value: 40.586 - type: mrr_at_10 value: 49.056 - type: mrr_at_100 value: 49.887 - type: mrr_at_1000 value: 49.929 - type: mrr_at_20 value: 49.552 - type: mrr_at_3 value: 46.785 - type: mrr_at_5 value: 48.004000000000005 - type: ndcg_at_1 value: 40.586 - type: ndcg_at_10 value: 40.589999999999996 - type: ndcg_at_100 value: 47.03 - type: ndcg_at_1000 value: 49.994 - type: ndcg_at_20 value: 43.229 - type: ndcg_at_3 value: 37.061 - type: ndcg_at_5 value: 37.992 - type: precision_at_1 value: 40.586 - type: precision_at_10 value: 11.219 - type: precision_at_100 value: 1.781 - type: precision_at_1000 value: 0.232 - type: precision_at_20 value: 6.705 - type: precision_at_3 value: 24.743000000000002 - type: precision_at_5 value: 18.086 - type: recall_at_1 value: 20.008 - type: recall_at_10 value: 47.412 - type: recall_at_100 value: 71.274 - type: recall_at_1000 value: 88.898 - type: recall_at_20 value: 55.706999999999994 - type: recall_at_3 value: 33.346 - type: recall_at_5 value: 39.112 - task: type: Retrieval dataset: name: MTEB HotpotQA type: None config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 41.789 - type: map_at_10 value: 57.898 - type: map_at_100 value: 58.632 - type: map_at_1000 value: 58.693 - type: map_at_20 value: 58.314 - type: map_at_3 value: 55.236 - type: map_at_5 value: 56.852999999999994 - type: mrr_at_1 value: 83.57900000000001 - type: mrr_at_10 value: 87.631 - type: mrr_at_100 value: 87.764 - type: mrr_at_1000 value: 87.77000000000001 - type: mrr_at_20 value: 87.70700000000001 - type: mrr_at_3 value: 87.02499999999999 - type: mrr_at_5 value: 87.34100000000001 - type: ndcg_at_1 value: 83.57900000000001 - type: ndcg_at_10 value: 67.11399999999999 - type: ndcg_at_100 value: 69.686 - type: ndcg_at_1000 value: 70.926 - type: ndcg_at_20 value: 68.119 - type: ndcg_at_3 value: 63.402 - type: ndcg_at_5 value: 65.354 - type: precision_at_1 value: 83.57900000000001 - type: precision_at_10 value: 13.333 - type: precision_at_100 value: 1.537 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_20 value: 6.988999999999999 - type: precision_at_3 value: 38.929 - type: precision_at_5 value: 24.897 - type: recall_at_1 value: 41.789 - type: recall_at_10 value: 66.664 - type: recall_at_100 value: 76.833 - type: recall_at_1000 value: 85.14500000000001 - type: recall_at_20 value: 69.892 - type: recall_at_3 value: 58.392999999999994 - type: recall_at_5 value: 62.242 - task: type: Classification dataset: name: MTEB ImdbClassification type: None config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 86.6108 - type: ap value: 81.63890253106925 - type: f1 value: 86.54585789538082 - task: type: Retrieval dataset: name: MTEB MSMARCO type: None config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 22.407 - type: map_at_10 value: 34.603 - type: map_at_100 value: 35.808 - type: map_at_1000 value: 35.855 - type: map_at_20 value: 35.368 - type: map_at_3 value: 30.764000000000003 - type: map_at_5 value: 32.964 - type: mrr_at_1 value: 23.009 - type: mrr_at_10 value: 35.136 - type: mrr_at_100 value: 36.284 - type: mrr_at_1000 value: 36.325 - type: mrr_at_20 value: 35.869 - type: mrr_at_3 value: 31.351000000000003 - type: mrr_at_5 value: 33.54 - type: ndcg_at_1 value: 23.009 - type: ndcg_at_10 value: 41.471999999999994 - type: ndcg_at_100 value: 47.211999999999996 - type: ndcg_at_1000 value: 48.361 - type: ndcg_at_20 value: 44.169000000000004 - type: ndcg_at_3 value: 33.646 - type: ndcg_at_5 value: 37.580000000000005 - type: precision_at_1 value: 23.009 - type: precision_at_10 value: 6.54 - type: precision_at_100 value: 0.941 - type: precision_at_1000 value: 0.104 - type: precision_at_20 value: 3.832 - type: precision_at_3 value: 14.283999999999999 - type: precision_at_5 value: 10.564 - type: recall_at_1 value: 22.407 - type: recall_at_10 value: 62.678999999999995 - type: recall_at_100 value: 89.09700000000001 - type: recall_at_1000 value: 97.822 - type: recall_at_20 value: 73.116 - type: recall_at_3 value: 41.4 - type: recall_at_5 value: 50.855 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: None config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 92.94573643410853 - type: f1 value: 92.73148878666994 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: None config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 77.86137710898313 - type: f1 value: 60.360562463738724 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: None config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 73.83322125084062 - type: f1 value: 71.61864304680206 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: None config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 77.50504371217215 - type: f1 value: 77.52039268347185 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: None config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.346952648910225 - type: v_measures value: - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - 0.3246964225451952 - 0.33269208719245646 - 0.3355911472371345 - 0.32978655133380147 - 0.3275090874657499 - 0.3752583186941529 - 0.3494711327267592 - 0.36636134409497156 - 0.3538734420417993 - 0.3394557315590024 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: None config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.19992734583148 - type: v_measures value: - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - 0.31100967211136193 - 0.31302897733611235 - 0.3126922134381441 - 0.30243629014133017 - 0.31564501718268645 - 0.34772968477866795 - 0.32522623268021805 - 0.3410158265159116 - 0.33581770403870503 - 0.31539111636001027 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: None config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.62309561205373 - type: mrr value: 31.707879717902554 - task: type: Retrieval dataset: name: MTEB NFCorpus type: None config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 5.668 - type: map_at_10 value: 12.225999999999999 - type: map_at_100 value: 15.122 - type: map_at_1000 value: 16.422 - type: map_at_20 value: 13.361999999999998 - type: map_at_3 value: 9.083 - type: map_at_5 value: 10.5 - type: mrr_at_1 value: 46.44 - type: mrr_at_10 value: 53.553 - type: mrr_at_100 value: 54.15 - type: mrr_at_1000 value: 54.193000000000005 - type: mrr_at_20 value: 53.837 - type: mrr_at_3 value: 51.702999999999996 - type: mrr_at_5 value: 52.647 - type: ndcg_at_1 value: 44.272 - type: ndcg_at_10 value: 33.395 - type: ndcg_at_100 value: 29.976999999999997 - type: ndcg_at_1000 value: 38.388 - type: ndcg_at_20 value: 30.606 - type: ndcg_at_3 value: 39.212 - type: ndcg_at_5 value: 36.611 - type: precision_at_1 value: 46.129999999999995 - type: precision_at_10 value: 24.334 - type: precision_at_100 value: 7.553999999999999 - type: precision_at_1000 value: 1.994 - type: precision_at_20 value: 17.678 - type: precision_at_3 value: 36.326 - type: precision_at_5 value: 31.330999999999996 - type: recall_at_1 value: 5.668 - type: recall_at_10 value: 15.837000000000002 - type: recall_at_100 value: 29.845 - type: recall_at_1000 value: 60.563 - type: recall_at_20 value: 18.587999999999997 - type: recall_at_3 value: 10.096 - type: recall_at_5 value: 12.261 - task: type: Retrieval dataset: name: MTEB NQ type: None config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 39.335 - type: map_at_10 value: 54.932 - type: map_at_100 value: 55.742000000000004 - type: map_at_1000 value: 55.766000000000005 - type: map_at_20 value: 55.504 - type: map_at_3 value: 50.904 - type: map_at_5 value: 53.388999999999996 - type: mrr_at_1 value: 44.003 - type: mrr_at_10 value: 57.419 - type: mrr_at_100 value: 57.963 - type: mrr_at_1000 value: 57.981 - type: mrr_at_20 value: 57.80499999999999 - type: mrr_at_3 value: 54.30199999999999 - type: mrr_at_5 value: 56.257000000000005 - type: ndcg_at_1 value: 43.974999999999994 - type: ndcg_at_10 value: 62.153999999999996 - type: ndcg_at_100 value: 65.326 - type: ndcg_at_1000 value: 65.862 - type: ndcg_at_20 value: 63.922999999999995 - type: ndcg_at_3 value: 54.834 - type: ndcg_at_5 value: 58.857000000000006 - type: precision_at_1 value: 43.974999999999994 - type: precision_at_10 value: 9.722 - type: precision_at_100 value: 1.153 - type: precision_at_1000 value: 0.12 - type: precision_at_20 value: 5.3 - type: precision_at_3 value: 24.392 - type: precision_at_5 value: 16.993 - type: recall_at_1 value: 39.335 - type: recall_at_10 value: 81.501 - type: recall_at_100 value: 94.851 - type: recall_at_1000 value: 98.817 - type: recall_at_20 value: 87.968 - type: recall_at_3 value: 62.795 - type: recall_at_5 value: 71.985 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: None config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: map_at_1 value: 71.222 - type: map_at_10 value: 85.193 - type: map_at_100 value: 85.802 - type: map_at_1000 value: 85.81800000000001 - type: map_at_20 value: 85.587 - type: map_at_3 value: 82.253 - type: map_at_5 value: 84.142 - type: mrr_at_1 value: 82.04 - type: mrr_at_10 value: 88.101 - type: mrr_at_100 value: 88.196 - type: mrr_at_1000 value: 88.196 - type: mrr_at_20 value: 88.175 - type: mrr_at_3 value: 87.145 - type: mrr_at_5 value: 87.825 - type: ndcg_at_1 value: 82.04 - type: ndcg_at_10 value: 88.849 - type: ndcg_at_100 value: 89.992 - type: ndcg_at_1000 value: 90.089 - type: ndcg_at_20 value: 89.468 - type: ndcg_at_3 value: 86.06899999999999 - type: ndcg_at_5 value: 87.669 - type: precision_at_1 value: 82.04 - type: precision_at_10 value: 13.447000000000001 - type: precision_at_100 value: 1.528 - type: precision_at_1000 value: 0.157 - type: precision_at_20 value: 7.116 - type: precision_at_3 value: 37.617 - type: precision_at_5 value: 24.776 - type: recall_at_1 value: 71.222 - type: recall_at_10 value: 95.73899999999999 - type: recall_at_100 value: 99.572 - type: recall_at_1000 value: 99.988 - type: recall_at_20 value: 97.725 - type: recall_at_3 value: 87.742 - type: recall_at_5 value: 92.23400000000001 - task: type: Clustering dataset: name: MTEB RedditClustering type: None config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.502005725283524 - type: v_measures value: - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - 0.5845673186673394 - 0.648423996059595 - 0.5081078446363154 - 0.577059582267051 - 0.5449838765447135 - 0.5255305026550916 - 0.6001776953894321 - 0.5075448301528861 - 0.5238448212279936 - 0.5329001795025329 - 0.5112306232092642 - 0.6002807353254037 - 0.5525285295615835 - 0.56281813563348 - 0.6722346506108504 - 0.5293879728430999 - 0.5972632642217942 - 0.6345018102197326 - 0.515945887049231 - 0.5291998092690363 - 0.5250323799432043 - 0.538426398169316 - 0.6954213901632498 - 0.580008522375662 - 0.5280806756230237 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: None config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 63.14989421688691 - type: v_measures value: - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - 0.673210410652684 - 0.6825035243902045 - 0.6275126414823813 - 0.40001836573261074 - 0.711458797825346 - 0.6212317163461291 - 0.4113635660304527 - 0.7394060043565659 - 0.6969073197749642 - 0.7513770750973534 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: None config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: map_at_1 value: 4.4830000000000005 - type: map_at_10 value: 11.04 - type: map_at_100 value: 12.764000000000001 - type: map_at_1000 value: 13.04 - type: map_at_20 value: 11.953 - type: map_at_3 value: 8.125 - type: map_at_5 value: 9.565999999999999 - type: mrr_at_1 value: 22.1 - type: mrr_at_10 value: 32.494 - type: mrr_at_100 value: 33.525 - type: mrr_at_1000 value: 33.596 - type: mrr_at_20 value: 33.089 - type: mrr_at_3 value: 29.416999999999998 - type: mrr_at_5 value: 31.267 - type: ndcg_at_1 value: 22.1 - type: ndcg_at_10 value: 18.587 - type: ndcg_at_100 value: 25.482 - type: ndcg_at_1000 value: 30.581999999999997 - type: ndcg_at_20 value: 21.077 - type: ndcg_at_3 value: 18.165 - type: ndcg_at_5 value: 15.676000000000002 - type: precision_at_1 value: 22.1 - type: precision_at_10 value: 9.48 - type: precision_at_100 value: 1.942 - type: precision_at_1000 value: 0.316 - type: precision_at_20 value: 6.175 - type: precision_at_3 value: 17.033 - type: precision_at_5 value: 13.719999999999999 - type: recall_at_1 value: 4.4830000000000005 - type: recall_at_10 value: 19.208 - type: recall_at_100 value: 39.417 - type: recall_at_1000 value: 64.235 - type: recall_at_20 value: 25.057000000000002 - type: recall_at_3 value: 10.348 - type: recall_at_5 value: 13.893 - task: type: STS dataset: name: MTEB SICK-R type: None config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cos_sim_pearson value: 83.50181312649208 - type: cos_sim_spearman value: 79.92900705478993 - type: euclidean_pearson value: 81.13482128094503 - type: euclidean_spearman value: 79.92732266864367 - type: manhattan_pearson value: 81.06702121654993 - type: manhattan_spearman value: 79.86983106619135 - task: type: STS dataset: name: MTEB STS12 type: None config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.85431681906961 - type: cos_sim_spearman value: 77.61671419416626 - type: euclidean_pearson value: 81.30538320520961 - type: euclidean_spearman value: 77.62096481461272 - type: manhattan_pearson value: 81.2306021173407 - type: manhattan_spearman value: 77.58386300715222 - task: type: STS dataset: name: MTEB STS13 type: None config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.98057702322754 - type: cos_sim_spearman value: 86.13305071688859 - type: euclidean_pearson value: 85.70903555966376 - type: euclidean_spearman value: 86.13150222328171 - type: manhattan_pearson value: 85.69380834788831 - type: manhattan_spearman value: 86.10784739081191 - task: type: STS dataset: name: MTEB STS14 type: None config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.43368314724589 - type: cos_sim_spearman value: 81.26767916144169 - type: euclidean_pearson value: 83.23234690932492 - type: euclidean_spearman value: 81.2671726214706 - type: manhattan_pearson value: 83.2381239261109 - type: manhattan_spearman value: 81.27674961470714 - task: type: STS dataset: name: MTEB STS15 type: None config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.8637546411748 - type: cos_sim_spearman value: 88.25330888676139 - type: euclidean_pearson value: 87.81194589390417 - type: euclidean_spearman value: 88.25258669625579 - type: manhattan_pearson value: 87.8131866998459 - type: manhattan_spearman value: 88.26523268929576 - task: type: STS dataset: name: MTEB STS16 type: None config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.83129743147286 - type: cos_sim_spearman value: 85.73732687732624 - type: euclidean_pearson value: 85.18051277328075 - type: euclidean_spearman value: 85.73565846174445 - type: manhattan_pearson value: 85.179029651079 - type: manhattan_spearman value: 85.75709685404729 - task: type: STS dataset: name: MTEB STS17 (en-en) type: None config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.04715794253148 - type: cos_sim_spearman value: 87.61577496386343 - type: euclidean_pearson value: 88.34713614361046 - type: euclidean_spearman value: 87.56541901567275 - type: manhattan_pearson value: 88.26010824585985 - type: manhattan_spearman value: 87.35211736948182 - task: type: STS dataset: name: MTEB STS22 (en) type: None config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 62.36160793264433 - type: cos_sim_spearman value: 66.07767480051893 - type: euclidean_pearson value: 66.4716471304865 - type: euclidean_spearman value: 66.03999286501872 - type: manhattan_pearson value: 66.46197824372902 - type: manhattan_spearman value: 65.82936468127227 - task: type: STS dataset: name: MTEB STSBenchmark type: None config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 85.27768996785856 - type: cos_sim_spearman value: 86.96704639052885 - type: euclidean_pearson value: 86.48753189555983 - type: euclidean_spearman value: 86.96981285751171 - type: manhattan_pearson value: 86.49262465015401 - type: manhattan_spearman value: 86.95378609580054 - task: type: Reranking dataset: name: MTEB SciDocsRR type: None config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 81.52012853393428 - type: mrr value: 94.70817671798063 - task: type: Retrieval dataset: name: MTEB SciFact type: None config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 55.344 - type: map_at_10 value: 64.82900000000001 - type: map_at_100 value: 65.42 - type: map_at_1000 value: 65.443 - type: map_at_20 value: 65.2 - type: map_at_3 value: 61.8 - type: map_at_5 value: 63.510999999999996 - type: mrr_at_1 value: 58.333 - type: mrr_at_10 value: 66.24600000000001 - type: mrr_at_100 value: 66.742 - type: mrr_at_1000 value: 66.762 - type: mrr_at_20 value: 66.549 - type: mrr_at_3 value: 64.056 - type: mrr_at_5 value: 65.372 - type: ndcg_at_1 value: 58.333 - type: ndcg_at_10 value: 69.626 - type: ndcg_at_100 value: 72.236 - type: ndcg_at_1000 value: 72.872 - type: ndcg_at_20 value: 70.864 - type: ndcg_at_3 value: 64.50399999999999 - type: ndcg_at_5 value: 67.07600000000001 - type: precision_at_1 value: 58.333 - type: precision_at_10 value: 9.4 - type: precision_at_100 value: 1.073 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_20 value: 4.983 - type: precision_at_3 value: 25.222 - type: precision_at_5 value: 16.8 - type: recall_at_1 value: 55.344 - type: recall_at_10 value: 82.789 - type: recall_at_100 value: 94.6 - type: recall_at_1000 value: 99.667 - type: recall_at_20 value: 87.533 - type: recall_at_3 value: 69.18299999999999 - type: recall_at_5 value: 75.622 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: None config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.69405940594059 - type: cos_sim_ap value: 92.03642221694545 - type: cos_sim_f1 value: 84.06395048994327 - type: cos_sim_precision value: 86.79446219382322 - type: cos_sim_recall value: 81.5 - type: dot_accuracy value: 99.6930693069307 - type: dot_ap value: 91.9971441434875 - type: dot_f1 value: 83.8006230529595 - type: dot_precision value: 87.14902807775377 - type: dot_recall value: 80.7 - type: euclidean_accuracy value: 99.69504950495049 - type: euclidean_ap value: 92.03626548389335 - type: euclidean_f1 value: 84.10732714138285 - type: euclidean_precision value: 86.88699360341151 - type: euclidean_recall value: 81.5 - type: manhattan_accuracy value: 99.69504950495049 - type: manhattan_ap value: 92.02049659660081 - type: manhattan_f1 value: 84.34959349593495 - type: manhattan_precision value: 85.74380165289256 - type: manhattan_recall value: 83.0 - type: max_accuracy value: 99.69504950495049 - type: max_ap value: 92.03642221694545 - type: max_f1 value: 84.34959349593495 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: None config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 67.04916654680977 - type: v_measures value: - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - 0.707614120277991 - 0.694974842783697 - 0.5756359888519659 - 0.6964499615297283 - 0.6547764033608466 - 0.6448470247319567 - 0.6263766967145058 - 0.7139286894225703 - 0.6737195749489034 - 0.6824504575459811 - 0.7667603743275774 - 0.7595788549615426 - 0.7086156082505461 - 0.6624140136843005 - 0.6136884209896801 - 0.6717953455355791 - 0.6494834308652331 - 0.6507885275711466 - 0.6382769468968572 - 0.6556052416453325 - 0.6700496626301571 - 0.6424264693175464 - 0.6400679099051025 - 0.7118398877792876 - 0.6501271821744096 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: None config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 33.36641413495258 - type: v_measures value: - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - 0.3245963448931168 - 0.31882294716748927 - 0.31975204745764507 - 0.30752650651575314 - 0.3191185767616115 - 0.35880812225202774 - 0.3427515820677152 - 0.344097881083346 - 0.35390675395072985 - 0.3472606513458235 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: None config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.19282080158746 - type: mrr value: 51.871100713012474 - task: type: Summarization dataset: name: MTEB SummEval type: None config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.437664703708485 - type: cos_sim_spearman value: 31.391119208581575 - type: dot_pearson value: 31.19925970504054 - type: dot_spearman value: 31.38087224016694 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: None config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: map_at_1 value: 0.249 - type: map_at_10 value: 2.163 - type: map_at_100 value: 13.242999999999999 - type: map_at_1000 value: 30.866 - type: map_at_20 value: 3.9539999999999997 - type: map_at_3 value: 0.718 - type: map_at_5 value: 1.169 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 98.0 - type: mrr_at_100 value: 98.0 - type: mrr_at_1000 value: 98.0 - type: mrr_at_20 value: 98.0 - type: mrr_at_3 value: 98.0 - type: mrr_at_5 value: 98.0 - type: ndcg_at_1 value: 92.0 - type: ndcg_at_10 value: 84.147 - type: ndcg_at_100 value: 65.143 - type: ndcg_at_1000 value: 56.038 - type: ndcg_at_20 value: 80.869 - type: ndcg_at_3 value: 89.11200000000001 - type: ndcg_at_5 value: 87.199 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 87.8 - type: precision_at_100 value: 66.72 - type: precision_at_1000 value: 24.684 - type: precision_at_20 value: 84.3 - type: precision_at_3 value: 94.0 - type: precision_at_5 value: 91.2 - type: recall_at_1 value: 0.249 - type: recall_at_10 value: 2.284 - type: recall_at_100 value: 16.025 - type: recall_at_1000 value: 52.068999999999996 - type: recall_at_20 value: 4.3180000000000005 - type: recall_at_3 value: 0.738 - type: recall_at_5 value: 1.212 - task: type: Retrieval dataset: name: MTEB Touche2020 type: None config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.4520000000000004 - type: map_at_10 value: 13.045000000000002 - type: map_at_100 value: 19.442 - type: map_at_1000 value: 21.09 - type: map_at_20 value: 15.667 - type: map_at_3 value: 7.409000000000001 - type: map_at_5 value: 9.73 - type: mrr_at_1 value: 46.939 - type: mrr_at_10 value: 60.295 - type: mrr_at_100 value: 60.904 - type: mrr_at_1000 value: 60.919000000000004 - type: mrr_at_20 value: 60.77 - type: mrr_at_3 value: 58.50300000000001 - type: mrr_at_5 value: 59.014 - type: ndcg_at_1 value: 44.897999999999996 - type: ndcg_at_10 value: 31.911 - type: ndcg_at_100 value: 41.945 - type: ndcg_at_1000 value: 53.181999999999995 - type: ndcg_at_20 value: 31.505 - type: ndcg_at_3 value: 39.745000000000005 - type: ndcg_at_5 value: 35.528999999999996 - type: precision_at_1 value: 46.939 - type: precision_at_10 value: 26.531 - type: precision_at_100 value: 8.163 - type: precision_at_1000 value: 1.559 - type: precision_at_20 value: 19.387999999999998 - type: precision_at_3 value: 40.136 - type: precision_at_5 value: 33.878 - type: recall_at_1 value: 3.4520000000000004 - type: recall_at_10 value: 18.899 - type: recall_at_100 value: 50.207 - type: recall_at_1000 value: 83.871 - type: recall_at_20 value: 26.756999999999998 - type: recall_at_3 value: 8.729000000000001 - type: recall_at_5 value: 12.084999999999999 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: None config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 67.4560546875 - type: ap value: 12.720403845355294 - type: f1 value: 51.76062666567839 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: None config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 62.36276174306734 - type: f1 value: 62.69956906934332 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: None config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 49.473492910233965 - type: v_measures value: - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - 0.48829262296803855 - 0.49853262011854643 - 0.48457750518082765 - 0.5020774116970983 - 0.5001897357021557 - 0.4702417082210781 - 0.4763216048226018 - 0.49932879417585735 - 0.5129628835129124 - 0.514824404624281 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: None config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 85.75430649102938 - type: cos_sim_ap value: 73.62842656477649 - type: cos_sim_f1 value: 67.76023680315738 - type: cos_sim_precision value: 63.61741547012506 - type: cos_sim_recall value: 72.4802110817942 - type: dot_accuracy value: 85.7423854085951 - type: dot_ap value: 73.59147637253723 - type: dot_f1 value: 67.69498693867396 - type: dot_precision value: 64.03859731701577 - type: dot_recall value: 71.79419525065963 - type: euclidean_accuracy value: 85.7423854085951 - type: euclidean_ap value: 73.6288990409654 - type: euclidean_f1 value: 67.80415430267064 - type: euclidean_precision value: 63.79711493718009 - type: euclidean_recall value: 72.34828496042216 - type: manhattan_accuracy value: 85.69470107885796 - type: manhattan_ap value: 73.49219614602531 - type: manhattan_f1 value: 67.60809797550613 - type: manhattan_precision value: 64.22127255460589 - type: manhattan_recall value: 71.37203166226914 - type: max_accuracy value: 85.75430649102938 - type: max_ap value: 73.6288990409654 - type: max_f1 value: 67.80415430267064 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: None config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.08293553770326 - type: cos_sim_ap value: 86.21246419992926 - type: cos_sim_f1 value: 78.49922526377924 - type: cos_sim_precision value: 75.35769939084857 - type: cos_sim_recall value: 81.9140745303357 - type: dot_accuracy value: 89.08681647067955 - type: dot_ap value: 86.19733517196862 - type: dot_f1 value: 78.51132446157838 - type: dot_precision value: 75.70233755093287 - type: dot_recall value: 81.53680320295658 - type: euclidean_accuracy value: 89.07517367175069 - type: euclidean_ap value: 86.21198725320203 - type: euclidean_f1 value: 78.49867139061116 - type: euclidean_precision value: 75.38276155372839 - type: euclidean_recall value: 81.88327687095781 - type: manhattan_accuracy value: 89.0538285403811 - type: manhattan_ap value: 86.17785515765131 - type: manhattan_f1 value: 78.48184098593084 - type: manhattan_precision value: 74.34396308285694 - type: manhattan_recall value: 83.10748383122882 - type: max_accuracy value: 89.08681647067955 - type: max_ap value: 86.21246419992926 - type: max_f1 value: 78.51132446157838 --- # ModernBERT Embed [![image/png](modernbertembed.png)](https://huggingface.co/nomic-ai/modernbert-embed-base) ModernBERT Embed is an embedding model trained from [ModernBERT-base](https://huggingface.co/answerdotai/ModernBERT-base), bringing the new advances of ModernBERT to embeddings! Trained on the [Nomic Embed](https://arxiv.org/abs/2402.01613) weakly-supervised and supervised datasets, `modernbert-embed` also supports Matryoshka Representation Learning dimensions of 256, reducing memory by 3x with minimal performance loss. ## Performance | Model | Dimensions | Average (56) | Classification (12) | Clustering (11) | Pair Classification (3) | Reranking (4) | Retrieval (15) | STS (10) | Summarization (1) | |-----------------------|------------|--------------|---------------------|-----------------|-------------------------|---------------|----------------|-----------|------------------| | nomic-embed-text-v1 | 768 | 62.4 | 74.1 | 43.9 | **85.2** | 55.7 | 52.8 | 82.1 | 30.1 | | nomic-embed-text-v1.5 | 768 | 62.28 | 73.55 | 43.93 | 84.61 | 55.78 | **53.01** | **81.94** | 30.4 | | modernbert-embed-base | 768 | **62.62** | **74.31** | **44.98** | 83.96 | **56.42** | 52.89 | 81.78 | **31.39** | | nomic-embed-text-v1.5 | 256 | 61.04 | 72.1 | 43.16 | 84.09 | 55.18 | 50.81 | 81.34 | 30.05 | | modernbert-embed-base | 256 | 61.17 | 72.40 | 43.82 | 83.45 | 55.69 | 50.62 | 81.12 | 31.27 | ## Usage You can use these models directly with the latest transformers release and requires installing `transformers>=4.48.0`: ```bash pip install transformers>=4.48.0 ``` Reminder, this model is trained similarly to Nomic Embed and **REQUIRES** prefixes to be added to the input. For more information, see the instructions in [Nomic Embed](https://huggingface.co/nomic-ai/nomic-embed-text-v1.5#task-instruction-prefixes). Most use cases, adding `search_query: ` to the query and `search_document: ` to the documents will be sufficient. ### Sentence Transformers ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/modernbert-embed-base") query_embeddings = model.encode([ "search_query: What is TSNE?", "search_query: Who is Laurens van der Maaten?", ]) doc_embeddings = model.encode([ "search_document: TSNE is a dimensionality reduction algorithm created by Laurens van Der Maaten", ]) print(query_embeddings.shape, doc_embeddings.shape) # (2, 768) (1, 768) similarities = model.similarity(query_embeddings, doc_embeddings) print(similarities) # tensor([[0.7214], # [0.3260]]) ``` <details><summary>Click to see Sentence Transformers usage with Matryoshka Truncation</summary> In Sentence Transformers, you can truncate embeddings to a smaller dimension by using the `truncate_dim` parameter when loading the `SentenceTransformer` model. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("nomic-ai/modernbert-embed-base", truncate_dim=256) query_embeddings = model.encode([ "search_query: What is TSNE?", "search_query: Who is Laurens van der Maaten?", ]) doc_embeddings = model.encode([ "search_document: TSNE is a dimensionality reduction algorithm created by Laurens van Der Maaten", ]) print(query_embeddings.shape, doc_embeddings.shape) # (2, 256) (1, 256) similarities = model.similarity(query_embeddings, doc_embeddings) print(similarities) # tensor([[0.7759], # [0.3419]]) ``` Note the small differences compared to the full 768-dimensional similarities. </details> ### Transformers ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = ( attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() ) return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp( input_mask_expanded.sum(1), min=1e-9 ) queries = ["search_query: What is TSNE?", "search_query: Who is Laurens van der Maaten?"] documents = ["search_document: TSNE is a dimensionality reduction algorithm created by Laurens van Der Maaten"] tokenizer = AutoTokenizer.from_pretrained("nomic-ai/modernbert-embed-base") model = AutoModel.from_pretrained("nomic-ai/modernbert-embed-base") encoded_queries = tokenizer(queries, padding=True, truncation=True, return_tensors="pt") encoded_documents = tokenizer(documents, padding=True, truncation=True, return_tensors="pt") with torch.no_grad(): queries_outputs = model(**encoded_queries) documents_outputs = model(**encoded_documents) query_embeddings = mean_pooling(queries_outputs, encoded_queries["attention_mask"]) query_embeddings = F.normalize(query_embeddings, p=2, dim=1) doc_embeddings = mean_pooling(documents_outputs, encoded_documents["attention_mask"]) doc_embeddings = F.normalize(doc_embeddings, p=2, dim=1) print(query_embeddings.shape, doc_embeddings.shape) # torch.Size([2, 768]) torch.Size([1, 768]) similarities = query_embeddings @ doc_embeddings.T print(similarities) # tensor([[0.7214], # [0.3260]]) ``` <details><summary>Click to see Transformers usage with Matryoshka Truncation</summary> In `transformers`, you can truncate embeddings to a smaller dimension by slicing the mean pooled embeddings, prior to normalization. ```python import torch import torch.nn.functional as F from transformers import AutoTokenizer, AutoModel def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] input_mask_expanded = ( attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() ) return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp( input_mask_expanded.sum(1), min=1e-9 ) queries = ["search_query: What is TSNE?", "search_query: Who is Laurens van der Maaten?"] documents = ["search_document: TSNE is a dimensionality reduction algorithm created by Laurens van Der Maaten"] tokenizer = AutoTokenizer.from_pretrained(".") model = AutoModel.from_pretrained(".") truncate_dim = 256 encoded_queries = tokenizer(queries, padding=True, truncation=True, return_tensors="pt") encoded_documents = tokenizer(documents, padding=True, truncation=True, return_tensors="pt") with torch.no_grad(): queries_outputs = model(**encoded_queries) documents_outputs = model(**encoded_documents) query_embeddings = mean_pooling(queries_outputs, encoded_queries["attention_mask"]) query_embeddings = query_embeddings[:, :truncate_dim] query_embeddings = F.normalize(query_embeddings, p=2, dim=1) doc_embeddings = mean_pooling(documents_outputs, encoded_documents["attention_mask"]) doc_embeddings = doc_embeddings[:, :truncate_dim] doc_embeddings = F.normalize(doc_embeddings, p=2, dim=1) print(query_embeddings.shape, doc_embeddings.shape) # torch.Size([2, 256]) torch.Size([1, 256]) similarities = query_embeddings @ doc_embeddings.T print(similarities) # tensor([[0.7759], # [0.3419]]) ``` Note the small differences compared to the full 768-dimensional similarities. </details> ### Transformers.js If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@huggingface/transformers) using: ```bash npm i @huggingface/transformers ``` Then, you can compute embeddings as follows: ```javascript import { pipeline, matmul } from '@huggingface/transformers'; // Create a feature extraction pipeline const extractor = await pipeline( "feature-extraction", "nomic-ai/modernbert-embed-base", { dtype: "fp32" }, // Supported options: "fp32", "fp16", "q8", "q4", "q4f16" ); // Embed queries and documents const query_embeddings = await extractor([ "search_query: What is TSNE?", "search_query: Who is Laurens van der Maaten?", ], { pooling: "mean", normalize: true }, ); const doc_embeddings = await extractor([ "search_document: TSNE is a dimensionality reduction algorithm created by Laurens van Der Maaten", ], { pooling: "mean", normalize: true }, ); // Compute similarity scores const similarities = await matmul(query_embeddings, doc_embeddings.transpose(1, 0)); console.log(similarities.tolist()); // [[0.721383273601532], [0.3259955644607544]] ``` ## Training Click the Nomic Atlas map below to visualize a 5M sample of our contrastive pretraining data! [![image/webp](https://cdn-uploads.huggingface.co/production/uploads/607997c83a565c15675055b3/pjhJhuNyRfPagRd_c_iUz.webp)](https://atlas.nomic.ai/map/nomic-text-embed-v1-5m-sample) We train our embedder using a multi-stage training pipeline. Starting from a long-context [BERT model](https://huggingface.co/nomic-ai/nomic-bert-2048), the first unsupervised contrastive stage trains on a dataset generated from weakly related text pairs, such as question-answer pairs from forums like StackExchange and Quora, title-body pairs from Amazon reviews, and summarizations from news articles. In the second finetuning stage, higher quality labeled datasets such as search queries and answers from web searches are leveraged. Data curation and hard-example mining is crucial in this stage. For more details, see the Nomic Embed [Technical Report](https://static.nomic.ai/reports/2024_Nomic_Embed_Text_Technical_Report.pdf) and corresponding [blog post](https://blog.nomic.ai/posts/nomic-embed-text-v1). Training data to train the models is released in its entirety. For more details, see the `contrastors` [repository](https://github.com/nomic-ai/contrastors) ## Join the Nomic Community - Nomic: [https://nomic.ai](https://nomic.ai) - Discord: [https://discord.gg/myY5YDR8z8](https://discord.gg/myY5YDR8z8) - Twitter: [https://twitter.com/nomic_ai](https://twitter.com/nomic_ai) ## Citation If you find the model, dataset, or training code useful, please cite our work ```bibtex @misc{nussbaum2024nomic, title={Nomic Embed: Training a Reproducible Long Context Text Embedder}, author={Zach Nussbaum and John X. Morris and Brandon Duderstadt and Andriy Mulyar}, year={2024}, eprint={2402.01613}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "BIOSSES", "SCIFACT" ]
facebook/mms-1b-all
facebook
automatic-speech-recognition
[ "transformers", "pytorch", "safetensors", "wav2vec2", "automatic-speech-recognition", "mms", "ab", "af", "ak", "am", "ar", "as", "av", "ay", "az", "ba", "bm", "be", "bn", "bi", "bo", "sh", "br", "bg", "ca", "cs", "ce", "cv", "ku", "cy", "da", "de", "dv", "dz", "el", "en", "eo", "et", "eu", "ee", "fo", "fa", "fj", "fi", "fr", "fy", "ff", "ga", "gl", "gn", "gu", "zh", "ht", "ha", "he", "hi", "hu", "hy", "ig", "ia", "ms", "is", "it", "jv", "ja", "kn", "ka", "kk", "kr", "km", "ki", "rw", "ky", "ko", "kv", "lo", "la", "lv", "ln", "lt", "lb", "lg", "mh", "ml", "mr", "mk", "mg", "mt", "mn", "mi", "my", "nl", "no", "ne", "ny", "oc", "om", "or", "os", "pa", "pl", "pt", "ps", "qu", "ro", "rn", "ru", "sg", "sk", "sl", "sm", "sn", "sd", "so", "es", "sq", "su", "sv", "sw", "ta", "tt", "te", "tg", "tl", "th", "ti", "ts", "tr", "uk", "vi", "wo", "xh", "yo", "zu", "za", "dataset:google/fleurs", "arxiv:2305.13516", "license:cc-by-nc-4.0", "endpoints_compatible", "region:us" ]
"2023-05-27T11:43:21Z"
2023-06-15T10:45:44+00:00
130,893
131
--- datasets: - google/fleurs language: - ab - af - ak - am - ar - as - av - ay - az - ba - bm - be - bn - bi - bo - sh - br - bg - ca - cs - ce - cv - ku - cy - da - de - dv - dz - el - en - eo - et - eu - ee - fo - fa - fj - fi - fr - fy - ff - ga - gl - gn - gu - zh - ht - ha - he - hi - sh - hu - hy - ig - ia - ms - is - it - jv - ja - kn - ka - kk - kr - km - ki - rw - ky - ko - kv - lo - la - lv - ln - lt - lb - lg - mh - ml - mr - ms - mk - mg - mt - mn - mi - my - zh - nl - 'no' - 'no' - ne - ny - oc - om - or - os - pa - pl - pt - ms - ps - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - qu - ro - rn - ru - sg - sk - sl - sm - sn - sd - so - es - sq - su - sv - sw - ta - tt - te - tg - tl - th - ti - ts - tr - uk - ms - vi - wo - xh - ms - yo - ms - zu - za license: cc-by-nc-4.0 metrics: - wer tags: - mms --- # Massively Multilingual Speech (MMS) - Finetuned ASR - ALL This checkpoint is a model fine-tuned for multi-lingual ASR and part of Facebook's [Massive Multilingual Speech project](https://research.facebook.com/publications/scaling-speech-technology-to-1000-languages/). This checkpoint is based on the [Wav2Vec2 architecture](https://huggingface.co/docs/transformers/model_doc/wav2vec2) and makes use of adapter models to transcribe 1000+ languages. The checkpoint consists of **1 billion parameters** and has been fine-tuned from [facebook/mms-1b](https://huggingface.co/facebook/mms-1b) on 1162 languages. ## Table Of Content - [Example](#example) - [Supported Languages](#supported-languages) - [Model details](#model-details) - [Additional links](#additional-links) ## Example This MMS checkpoint can be used with [Transformers](https://github.com/huggingface/transformers) to transcribe audio of 1107 different languages. Let's look at a simple example. First, we install transformers and some other libraries ``` pip install torch accelerate torchaudio datasets pip install --upgrade transformers ```` **Note**: In order to use MMS you need to have at least `transformers >= 4.30` installed. If the `4.30` version is not yet available [on PyPI](https://pypi.org/project/transformers/) make sure to install `transformers` from source: ``` pip install git+https://github.com/huggingface/transformers.git ``` Next, we load a couple of audio samples via `datasets`. Make sure that the audio data is sampled to 16000 kHz. ```py from datasets import load_dataset, Audio # English stream_data = load_dataset("mozilla-foundation/common_voice_13_0", "en", split="test", streaming=True) stream_data = stream_data.cast_column("audio", Audio(sampling_rate=16000)) en_sample = next(iter(stream_data))["audio"]["array"] # French stream_data = load_dataset("mozilla-foundation/common_voice_13_0", "fr", split="test", streaming=True) stream_data = stream_data.cast_column("audio", Audio(sampling_rate=16000)) fr_sample = next(iter(stream_data))["audio"]["array"] ``` Next, we load the model and processor ```py from transformers import Wav2Vec2ForCTC, AutoProcessor import torch model_id = "facebook/mms-1b-all" processor = AutoProcessor.from_pretrained(model_id) model = Wav2Vec2ForCTC.from_pretrained(model_id) ``` Now we process the audio data, pass the processed audio data to the model and transcribe the model output, just like we usually do for Wav2Vec2 models such as [facebook/wav2vec2-base-960h](https://huggingface.co/facebook/wav2vec2-base-960h) ```py inputs = processor(en_sample, sampling_rate=16_000, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs).logits ids = torch.argmax(outputs, dim=-1)[0] transcription = processor.decode(ids) # 'joe keton disapproved of films and buster also had reservations about the media' ``` We can now keep the same model in memory and simply switch out the language adapters by calling the convenient [`load_adapter()`]() function for the model and [`set_target_lang()`]() for the tokenizer. We pass the target language as an input - "fra" for French. ```py processor.tokenizer.set_target_lang("fra") model.load_adapter("fra") inputs = processor(fr_sample, sampling_rate=16_000, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs).logits ids = torch.argmax(outputs, dim=-1)[0] transcription = processor.decode(ids) # "ce dernier est volé tout au long de l'histoire romaine" ``` In the same way the language can be switched out for all other supported languages. Please have a look at: ```py processor.tokenizer.vocab.keys() ``` For more details, please have a look at [the official docs](https://huggingface.co/docs/transformers/main/en/model_doc/mms). ## Supported Languages This model supports 1162 languages. Unclick the following to toogle all supported languages of this checkpoint in [ISO 639-3 code](https://en.wikipedia.org/wiki/ISO_639-3). You can find more details about the languages and their ISO 649-3 codes in the [MMS Language Coverage Overview](https://dl.fbaipublicfiles.com/mms/misc/language_coverage_mms.html). <details> <summary>Click to toggle</summary> - abi - abk - abp - aca - acd - ace - acf - ach - acn - acr - acu - ade - adh - adj - adx - aeu - afr - agd - agg - agn - agr - agu - agx - aha - ahk - aia - aka - akb - ake - akp - alj - alp - alt - alz - ame - amf - amh - ami - amk - ann - any - aoz - apb - apr - ara - arl - asa - asg - asm - ast - ata - atb - atg - ati - atq - ava - avn - avu - awa - awb - ayo - ayr - ayz - azb - azg - azj-script_cyrillic - azj-script_latin - azz - bak - bam - ban - bao - bas - bav - bba - bbb - bbc - bbo - bcc-script_arabic - bcc-script_latin - bcl - bcw - bdg - bdh - bdq - bdu - bdv - beh - bel - bem - ben - bep - bex - bfa - bfo - bfy - bfz - bgc - bgq - bgr - bgt - bgw - bha - bht - bhz - bib - bim - bis - biv - bjr - bjv - bjw - bjz - bkd - bkv - blh - blt - blx - blz - bmq - bmr - bmu - bmv - bng - bno - bnp - boa - bod - boj - bom - bor - bos - bov - box - bpr - bps - bqc - bqi - bqj - bqp - bre - bru - bsc - bsq - bss - btd - bts - btt - btx - bud - bul - bus - bvc - bvz - bwq - bwu - byr - bzh - bzi - bzj - caa - cab - cac-dialect_sanmateoixtatan - cac-dialect_sansebastiancoatan - cak-dialect_central - cak-dialect_santamariadejesus - cak-dialect_santodomingoxenacoj - cak-dialect_southcentral - cak-dialect_western - cak-dialect_yepocapa - cap - car - cas - cat - cax - cbc - cbi - cbr - cbs - cbt - cbu - cbv - cce - cco - cdj - ceb - ceg - cek - ces - cfm - cgc - che - chf - chv - chz - cjo - cjp - cjs - ckb - cko - ckt - cla - cle - cly - cme - cmn-script_simplified - cmo-script_khmer - cmo-script_latin - cmr - cnh - cni - cnl - cnt - coe - cof - cok - con - cot - cou - cpa - cpb - cpu - crh - crk-script_latin - crk-script_syllabics - crn - crq - crs - crt - csk - cso - ctd - ctg - cto - ctu - cuc - cui - cuk - cul - cwa - cwe - cwt - cya - cym - daa - dah - dan - dar - dbj - dbq - ddn - ded - des - deu - dga - dgi - dgk - dgo - dgr - dhi - did - dig - dik - dip - div - djk - dnj-dialect_blowowest - dnj-dialect_gweetaawueast - dnt - dnw - dop - dos - dsh - dso - dtp - dts - dug - dwr - dyi - dyo - dyu - dzo - eip - eka - ell - emp - enb - eng - enx - epo - ese - ess - est - eus - evn - ewe - eza - fal - fao - far - fas - fij - fin - flr - fmu - fon - fra - frd - fry - ful - gag-script_cyrillic - gag-script_latin - gai - gam - gau - gbi - gbk - gbm - gbo - gde - geb - gej - gil - gjn - gkn - gld - gle - glg - glk - gmv - gna - gnd - gng - gof-script_latin - gog - gor - gqr - grc - gri - grn - grt - gso - gub - guc - gud - guh - guj - guk - gum - guo - guq - guu - gux - gvc - gvl - gwi - gwr - gym - gyr - had - hag - hak - hap - hat - hau - hay - heb - heh - hif - hig - hil - hin - hlb - hlt - hne - hnn - hns - hoc - hoy - hrv - hsb - hto - hub - hui - hun - hus-dialect_centralveracruz - hus-dialect_westernpotosino - huu - huv - hvn - hwc - hye - hyw - iba - ibo - icr - idd - ifa - ifb - ife - ifk - ifu - ify - ign - ikk - ilb - ilo - imo - ina - inb - ind - iou - ipi - iqw - iri - irk - isl - ita - itl - itv - ixl-dialect_sangasparchajul - ixl-dialect_sanjuancotzal - ixl-dialect_santamarianebaj - izr - izz - jac - jam - jav - jbu - jen - jic - jiv - jmc - jmd - jpn - jun - juy - jvn - kaa - kab - kac - kak - kam - kan - kao - kaq - kat - kay - kaz - kbo - kbp - kbq - kbr - kby - kca - kcg - kdc - kde - kdh - kdi - kdj - kdl - kdn - kdt - kea - kek - ken - keo - ker - key - kez - kfb - kff-script_telugu - kfw - kfx - khg - khm - khq - kia - kij - kik - kin - kir - kjb - kje - kjg - kjh - kki - kkj - kle - klu - klv - klw - kma - kmd - kml - kmr-script_arabic - kmr-script_cyrillic - kmr-script_latin - kmu - knb - kne - knf - knj - knk - kno - kog - kor - kpq - kps - kpv - kpy - kpz - kqe - kqp - kqr - kqy - krc - kri - krj - krl - krr - krs - kru - ksb - ksr - kss - ktb - ktj - kub - kue - kum - kus - kvn - kvw - kwd - kwf - kwi - kxc - kxf - kxm - kxv - kyb - kyc - kyf - kyg - kyo - kyq - kyu - kyz - kzf - lac - laj - lam - lao - las - lat - lav - law - lbj - lbw - lcp - lee - lef - lem - lew - lex - lgg - lgl - lhu - lia - lid - lif - lin - lip - lis - lit - lje - ljp - llg - lln - lme - lnd - lns - lob - lok - lom - lon - loq - lsi - lsm - ltz - luc - lug - luo - lwo - lww - lzz - maa-dialect_sanantonio - maa-dialect_sanjeronimo - mad - mag - mah - mai - maj - mak - mal - mam-dialect_central - mam-dialect_northern - mam-dialect_southern - mam-dialect_western - maq - mar - maw - maz - mbb - mbc - mbh - mbj - mbt - mbu - mbz - mca - mcb - mcd - mco - mcp - mcq - mcu - mda - mdf - mdv - mdy - med - mee - mej - men - meq - met - mev - mfe - mfh - mfi - mfk - mfq - mfy - mfz - mgd - mge - mgh - mgo - mhi - mhr - mhu - mhx - mhy - mib - mie - mif - mih - mil - mim - min - mio - mip - miq - mit - miy - miz - mjl - mjv - mkd - mkl - mkn - mlg - mlt - mmg - mnb - mnf - mnk - mnw - mnx - moa - mog - mon - mop - mor - mos - mox - moz - mpg - mpm - mpp - mpx - mqb - mqf - mqj - mqn - mri - mrw - msy - mtd - mtj - mto - muh - mup - mur - muv - muy - mvp - mwq - mwv - mxb - mxq - mxt - mxv - mya - myb - myk - myl - myv - myx - myy - mza - mzi - mzj - mzk - mzm - mzw - nab - nag - nan - nas - naw - nca - nch - ncj - ncl - ncu - ndj - ndp - ndv - ndy - ndz - neb - new - nfa - nfr - nga - ngl - ngp - ngu - nhe - nhi - nhu - nhw - nhx - nhy - nia - nij - nim - nin - nko - nlc - nld - nlg - nlk - nmz - nnb - nno - nnq - nnw - noa - nob - nod - nog - not - npi - npl - npy - nso - nst - nsu - ntm - ntr - nuj - nus - nuz - nwb - nxq - nya - nyf - nyn - nyo - nyy - nzi - obo - oci - ojb-script_latin - ojb-script_syllabics - oku - old - omw - onb - ood - orm - ory - oss - ote - otq - ozm - pab - pad - pag - pam - pan - pao - pap - pau - pbb - pbc - pbi - pce - pcm - peg - pez - pib - pil - pir - pis - pjt - pkb - pls - plw - pmf - pny - poh-dialect_eastern - poh-dialect_western - poi - pol - por - poy - ppk - pps - prf - prk - prt - pse - pss - ptu - pui - pus - pwg - pww - pxm - qub - quc-dialect_central - quc-dialect_east - quc-dialect_north - quf - quh - qul - quw - quy - quz - qvc - qve - qvh - qvm - qvn - qvo - qvs - qvw - qvz - qwh - qxh - qxl - qxn - qxo - qxr - rah - rai - rap - rav - raw - rej - rel - rgu - rhg - rif-script_arabic - rif-script_latin - ril - rim - rjs - rkt - rmc-script_cyrillic - rmc-script_latin - rmo - rmy-script_cyrillic - rmy-script_latin - rng - rnl - roh-dialect_sursilv - roh-dialect_vallader - rol - ron - rop - rro - rub - ruf - rug - run - rus - sab - sag - sah - saj - saq - sas - sat - sba - sbd - sbl - sbp - sch - sck - sda - sea - seh - ses - sey - sgb - sgj - sgw - shi - shk - shn - sho - shp - sid - sig - sil - sja - sjm - sld - slk - slu - slv - sml - smo - sna - snd - sne - snn - snp - snw - som - soy - spa - spp - spy - sqi - sri - srm - srn - srp-script_cyrillic - srp-script_latin - srx - stn - stp - suc - suk - sun - sur - sus - suv - suz - swe - swh - sxb - sxn - sya - syl - sza - tac - taj - tam - tao - tap - taq - tat - tav - tbc - tbg - tbk - tbl - tby - tbz - tca - tcc - tcs - tcz - tdj - ted - tee - tel - tem - teo - ter - tes - tew - tex - tfr - tgj - tgk - tgl - tgo - tgp - tha - thk - thl - tih - tik - tir - tkr - tlb - tlj - tly - tmc - tmf - tna - tng - tnk - tnn - tnp - tnr - tnt - tob - toc - toh - tom - tos - tpi - tpm - tpp - tpt - trc - tri - trn - trs - tso - tsz - ttc - tte - ttq-script_tifinagh - tue - tuf - tuk-script_arabic - tuk-script_latin - tuo - tur - tvw - twb - twe - twu - txa - txq - txu - tye - tzh-dialect_bachajon - tzh-dialect_tenejapa - tzj-dialect_eastern - tzj-dialect_western - tzo-dialect_chamula - tzo-dialect_chenalho - ubl - ubu - udm - udu - uig-script_arabic - uig-script_cyrillic - ukr - umb - unr - upv - ura - urb - urd-script_arabic - urd-script_devanagari - urd-script_latin - urk - urt - ury - usp - uzb-script_cyrillic - uzb-script_latin - vag - vid - vie - vif - vmw - vmy - vot - vun - vut - wal-script_ethiopic - wal-script_latin - wap - war - waw - way - wba - wlo - wlx - wmw - wob - wol - wsg - wwa - xal - xdy - xed - xer - xho - xmm - xnj - xnr - xog - xon - xrb - xsb - xsm - xsr - xsu - xta - xtd - xte - xtm - xtn - xua - xuo - yaa - yad - yal - yam - yao - yas - yat - yaz - yba - ybb - ycl - ycn - yea - yka - yli - yor - yre - yua - yue-script_traditional - yuz - yva - zaa - zab - zac - zad - zae - zai - zam - zao - zaq - zar - zas - zav - zaw - zca - zga - zim - ziw - zlm - zmz - zne - zos - zpc - zpg - zpi - zpl - zpm - zpo - zpt - zpu - zpz - ztq - zty - zul - zyb - zyp - zza </details> ## Model details - **Developed by:** Vineel Pratap et al. - **Model type:** Multi-Lingual Automatic Speech Recognition model - **Language(s):** 1000+ languages, see [supported languages](#supported-languages) - **License:** CC-BY-NC 4.0 license - **Num parameters**: 1 billion - **Audio sampling rate**: 16,000 kHz - **Cite as:** @article{pratap2023mms, title={Scaling Speech Technology to 1,000+ Languages}, author={Vineel Pratap and Andros Tjandra and Bowen Shi and Paden Tomasello and Arun Babu and Sayani Kundu and Ali Elkahky and Zhaoheng Ni and Apoorv Vyas and Maryam Fazel-Zarandi and Alexei Baevski and Yossi Adi and Xiaohui Zhang and Wei-Ning Hsu and Alexis Conneau and Michael Auli}, journal={arXiv}, year={2023} } ## Additional Links - [Blog post](https://ai.facebook.com/blog/multilingual-model-speech-recognition/) - [Transformers documentation](https://huggingface.co/docs/transformers/main/en/model_doc/mms). - [Paper](https://arxiv.org/abs/2305.13516) - [GitHub Repository](https://github.com/facebookresearch/fairseq/tree/main/examples/mms#asr) - [Other **MMS** checkpoints](https://huggingface.co/models?other=mms) - MMS base checkpoints: - [facebook/mms-1b](https://huggingface.co/facebook/mms-1b) - [facebook/mms-300m](https://huggingface.co/facebook/mms-300m) - [Official Space](https://huggingface.co/spaces/facebook/MMS)
[ "CAS" ]
microsoft/Phi-3-mini-128k-instruct
microsoft
text-generation
[ "transformers", "safetensors", "phi3", "text-generation", "nlp", "code", "conversational", "custom_code", "en", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2024-04-22T16:26:23Z"
2025-03-02T22:28:37+00:00
128,236
1,637
--- language: - en license: mit license_link: https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/LICENSE pipeline_tag: text-generation tags: - nlp - code widget: - messages: - role: user content: Can you provide ways to eat combinations of bananas and dragonfruits? --- 🎉**Phi-4**: [[multimodal-instruct](https://huggingface.co/microsoft/Phi-4-multimodal-instruct) | [onnx](https://huggingface.co/microsoft/Phi-4-multimodal-instruct-onnx)]; [[mini-instruct](https://huggingface.co/microsoft/Phi-4-mini-instruct) | [onnx](https://huggingface.co/microsoft/Phi-4-mini-instruct-onnx)] ## Model Summary The Phi-3-Mini-128K-Instruct is a 3.8 billion-parameter, lightweight, state-of-the-art open model trained using the Phi-3 datasets. This dataset includes both synthetic data and filtered publicly available website data, with an emphasis on high-quality and reasoning-dense properties. The model belongs to the Phi-3 family with the Mini version in two variants [4K](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) and [128K](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) which is the context length (in tokens) that it can support. After initial training, the model underwent a post-training process that involved supervised fine-tuning and direct preference optimization to enhance its ability to follow instructions and adhere to safety measures. When evaluated against benchmarks that test common sense, language understanding, mathematics, coding, long-term context, and logical reasoning, the Phi-3 Mini-128K-Instruct demonstrated robust and state-of-the-art performance among models with fewer than 13 billion parameters. Resources and Technical Documentation: 🏡 [Phi-3 Portal](https://azure.microsoft.com/en-us/products/phi-3) <br> 📰 [Phi-3 Microsoft Blog](https://aka.ms/Phi-3Build2024) <br> 📖 [Phi-3 Technical Report](https://aka.ms/phi3-tech-report) <br> 🛠️ [Phi-3 on Azure AI Studio](https://aka.ms/phi3-azure-ai) <br> 👩‍🍳 [Phi-3 Cookbook](https://github.com/microsoft/Phi-3CookBook) <br> 🖥️ [Try It](https://aka.ms/try-phi3) | | Short Context | Long Context | | :- | :- | :- | | Mini | 4K [[HF]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-onnx) ; [[GGUF]](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-gguf) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct-onnx)| | Small | 8K [[HF]](https://huggingface.co/microsoft/Phi-3-small-8k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-small-8k-instruct-onnx-cuda) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-small-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-small-128k-instruct-onnx-cuda)| | Medium | 4K [[HF]](https://huggingface.co/microsoft/Phi-3-medium-4k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-medium-4k-instruct-onnx-cuda) | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-medium-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-medium-128k-instruct-onnx-cuda)| | Vision | | 128K [[HF]](https://huggingface.co/microsoft/Phi-3-vision-128k-instruct) ; [[ONNX]](https://huggingface.co/microsoft/Phi-3-vision-128k-instruct-onnx-cuda)| ## Intended Uses **Primary use cases** The model is intended for commercial and research use in English. The model provides uses for applications which require: 1) Memory/compute constrained environments 2) Latency bound scenarios 3) Strong reasoning (especially code, math and logic) Our model is designed to accelerate research on language and multimodal models, for use as a building block for generative AI powered features. **Use case considerations** Our models are not specifically designed or evaluated for all downstream purposes. Developers should consider common limitations of language models as they select use cases, and evaluate and mitigate for accuracy, safety, and fariness before using within a specific downstream use case, particularly for high risk scenarios. Developers should be aware of and adhere to applicable laws or regulations (including privacy, trade compliance laws, etc.) that are relevant to their use case. Nothing contained in this Model Card should be interpreted as or deemed a restriction or modification to the license the model is released under. ## Release Notes This is an update over the original instruction-tuned Phi-3-mini release based on valuable customer feedback. The model used additional post-training data leading to substantial gains on long-context understanding, instruction following, and structure output. We also improve multi-turn conversation quality, explicitly support <|system|> tag, and significantly improve reasoning capability. We believe most use cases will benefit from this release, but we encourage users to test in their particular AI applications. We appreciate the enthusiastic adoption of the Phi-3 model family, and continue to welcome all feedback from the community. These tables below highlights improvements on instruction following, structure output, reasoning, and long-context understanding of the new release on our public and internal benchmark datasets. | Benchmarks | Original | June 2024 Update | | :- | :- | :- | | Instruction Extra Hard | 5.7 | 5.9 | | Instruction Hard | 5.0 | 5.2 | | JSON Structure Output | 1.9 | 60.1 | | XML Structure Output | 47.8 | 52.9 | | GPQA | 25.9 | 29.7 | | MMLU | 68.1 | 69.7 | | **Average** | **25.7** | **37.3** | RULER: a retrieval-based benchmark for long context understanding | Model | 4K | 8K | 16K | 32K | 64K | 128K | Average | | :-------------------| :------| :------| :------| :------| :------| :------| :---------| | Original | 86.7 | 78.1 | 75.6 | 70.3 | 58.9 | 43.3 | **68.8** | | June 2024 Update | 92.4 | 91.1 | 90.8 | 87.9 | 79.8 | 65.6 | **84.6** | RepoQA: a benchmark for long context code understanding | Model | Python | C++ | Rust | Java | TypeScript | Average | | :-------------------| :--------| :-----| :------| :------| :------------| :---------| | Original | 27 | 29 | 40 | 33 | 33 | **32.4** | | June 2024 Update | 85 | 63 | 72 | 93 | 72 | **77** | Notes: if users would like to check out the previous version, use the git commit id **bb5bf1e4001277a606e11debca0ef80323e5f824**. For the model conversion, e.g. GGUF and other formats, we invite the community to experiment with various approaches and share your valuable feedback. Let's innovate together! ## How to Use Phi-3 Mini-128K-Instruct has been integrated in the development version (4.41.3) of `transformers`. Until the official version is released through `pip`, ensure that you are doing one of the following: * When loading the model, ensure that `trust_remote_code=True` is passed as an argument of the `from_pretrained()` function. * Update your local `transformers` to the development version: `pip uninstall -y transformers && pip install git+https://github.com/huggingface/transformers`. The previous command is an alternative to cloning and installing from the source. The current `transformers` version can be verified with: `pip list | grep transformers`. Examples of required packages: ``` flash_attn==2.5.8 torch==2.3.1 accelerate==0.31.0 transformers==4.41.2 ``` Phi-3 Mini-128K-Instruct is also available in [Azure AI Studio](https://aka.ms/try-phi3) ### Tokenizer Phi-3 Mini-128K-Instruct supports a vocabulary size of up to `32064` tokens. The [tokenizer files](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/blob/main/added_tokens.json) already provide placeholder tokens that can be used for downstream fine-tuning, but they can also be extended up to the model's vocabulary size. ### Chat Format Given the nature of the training data, the Phi-3 Mini-128K-Instruct model is best suited for prompts using the chat format as follows. You can provide the prompt as a question with a generic template as follow: ```markdown <|system|> You are a helpful assistant.<|end|> <|user|> Question?<|end|> <|assistant|> ``` For example: ```markdown <|system|> You are a helpful assistant.<|end|> <|user|> How to explain Internet for a medieval knight?<|end|> <|assistant|> ``` where the model generates the text after `<|assistant|>` . In case of few-shots prompt, the prompt can be formatted as the following: ```markdown <|system|> You are a helpful travel assistant.<|end|> <|user|> I am going to Paris, what should I see?<|end|> <|assistant|> Paris, the capital of France, is known for its stunning architecture, art museums, historical landmarks, and romantic atmosphere. Here are some of the top attractions to see in Paris:\n\n1. The Eiffel Tower: The iconic Eiffel Tower is one of the most recognizable landmarks in the world and offers breathtaking views of the city.\n2. The Louvre Museum: The Louvre is one of the world's largest and most famous museums, housing an impressive collection of art and artifacts, including the Mona Lisa.\n3. Notre-Dame Cathedral: This beautiful cathedral is one of the most famous landmarks in Paris and is known for its Gothic architecture and stunning stained glass windows.\n\nThese are just a few of the many attractions that Paris has to offer. With so much to see and do, it's no wonder that Paris is one of the most popular tourist destinations in the world."<|end|> <|user|> What is so great about #1?<|end|> <|assistant|> ``` ### Sample inference code This code snippets show how to get quickly started with running the model on a GPU: ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline torch.random.manual_seed(0) model = AutoModelForCausalLM.from_pretrained( "microsoft/Phi-3-mini-128k-instruct", device_map="cuda", torch_dtype="auto", trust_remote_code=True, ) tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct") messages = [ {"role": "system", "content": "You are a helpful AI assistant."}, {"role": "user", "content": "Can you provide ways to eat combinations of bananas and dragonfruits?"}, {"role": "assistant", "content": "Sure! Here are some ways to eat bananas and dragonfruits together: 1. Banana and dragonfruit smoothie: Blend bananas and dragonfruits together with some milk and honey. 2. Banana and dragonfruit salad: Mix sliced bananas and dragonfruits together with some lemon juice and honey."}, {"role": "user", "content": "What about solving an 2x + 3 = 7 equation?"}, ] pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, ) generation_args = { "max_new_tokens": 500, "return_full_text": False, "temperature": 0.0, "do_sample": False, } output = pipe(messages, **generation_args) print(output[0]['generated_text']) ``` Notes: If you want to use flash attention, call _AutoModelForCausalLM.from_pretrained()_ with _attn_implementation="flash_attention_2"_ ## Responsible AI Considerations Like other language models, the Phi series models can potentially behave in ways that are unfair, unreliable, or offensive. Some of the limiting behaviors to be aware of include: + Quality of Service: the Phi models are trained primarily on English text. Languages other than English will experience worse performance. English language varieties with less representation in the training data might experience worse performance than standard American English. + Representation of Harms & Perpetuation of Stereotypes: These models can over- or under-represent groups of people, erase representation of some groups, or reinforce demeaning or negative stereotypes. Despite safety post-training, these limitations may still be present due to differing levels of representation of different groups or prevalence of examples of negative stereotypes in training data that reflect real-world patterns and societal biases. + Inappropriate or Offensive Content: these models may produce other types of inappropriate or offensive content, which may make it inappropriate to deploy for sensitive contexts without additional mitigations that are specific to the use case. + Information Reliability: Language models can generate nonsensical content or fabricate content that might sound reasonable but is inaccurate or outdated. + Limited Scope for Code: Majority of Phi-3 training data is based in Python and use common packages such as "typing, math, random, collections, datetime, itertools". If the model generates Python scripts that utilize other packages or scripts in other languages, we strongly recommend users manually verify all API uses. Developers should apply responsible AI best practices and are responsible for ensuring that a specific use case complies with relevant laws and regulations (e.g. privacy, trade, etc.). Important areas for consideration include: + Allocation: Models may not be suitable for scenarios that could have consequential impact on legal status or the allocation of resources or life opportunities (ex: housing, employment, credit, etc.) without further assessments and additional debiasing techniques. + High-Risk Scenarios: Developers should assess suitability of using models in high-risk scenarios where unfair, unreliable or offensive outputs might be extremely costly or lead to harm. This includes providing advice in sensitive or expert domains where accuracy and reliability are critical (ex: legal or health advice). Additional safeguards should be implemented at the application level according to the deployment context. + Misinformation: Models may produce inaccurate information. Developers should follow transparency best practices and inform end-users they are interacting with an AI system. At the application level, developers can build feedback mechanisms and pipelines to ground responses in use-case specific, contextual information, a technique known as Retrieval Augmented Generation (RAG). + Generation of Harmful Content: Developers should assess outputs for their context and use available safety classifiers or custom solutions appropriate for their use case. + Misuse: Other forms of misuse such as fraud, spam, or malware production may be possible, and developers should ensure that their applications do not violate applicable laws and regulations. ## Training ### Model * Architecture: Phi-3 Mini-128K-Instruct has 3.8B parameters and is a dense decoder-only Transformer model. The model is fine-tuned with Supervised fine-tuning (SFT) and Direct Preference Optimization (DPO) to ensure alignment with human preferences and safety guidlines. * Inputs: Text. It is best suited for prompts using chat format. * Context length: 128K tokens * GPUs: 512 H100-80G * Training time: 10 days * Training data: 4.9T tokens * Outputs: Generated text in response to the input * Dates: Our models were trained between May and June 2024 * Status: This is a static model trained on an offline dataset with cutoff date October 2023. Future versions of the tuned models may be released as we improve models. * Release dates: June, 2024. ### Datasets Our training data includes a wide variety of sources, totaling 4.9 trillion tokens, and is a combination of 1) Publicly available documents filtered rigorously for quality, selected high-quality educational data, and code; 2) Newly created synthetic, “textbook-like” data for the purpose of teaching math, coding, common sense reasoning, general knowledge of the world (science, daily activities, theory of mind, etc.); 3) High quality chat format supervised data covering various topics to reflect human preferences on different aspects such as instruct-following, truthfulness, honesty and helpfulness. We are focusing on the quality of data that could potentially improve the reasoning ability for the model, and we filter the publicly available documents to contain the correct level of knowledge. As an example, the result of a game in premier league in a particular day might be good training data for frontier models, but we need to remove such information to leave more model capacity for reasoning for the small size models. More details about data can be found in the [Phi-3 Technical Report](https://aka.ms/phi3-tech-report). ### Fine-tuning A basic example of multi-GPUs supervised fine-tuning (SFT) with TRL and Accelerate modules is provided [here](https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/sample_finetune.py). ## Benchmarks We report the results under completion format for Phi-3-Mini-128K-Instruct on standard open-source benchmarks measuring the model's reasoning ability (both common sense reasoning and logical reasoning). We compare to Mistral-7b-v0.1, Mixtral-8x7b, Gemma 7B, Llama-3-8B-Instruct, and GPT-3.5. All the reported numbers are produced with the exact same pipeline to ensure that the numbers are comparable. These numbers might differ from other published numbers due to slightly different choices in the evaluation. As is now standard, we use few-shot prompts to evaluate the models, at temperature 0. The prompts and number of shots are part of a Microsoft internal tool to evaluate language models, and in particular we did no optimization to the pipeline for Phi-3. More specifically, we do not change prompts, pick different few-shot examples, change prompt format, or do any other form of optimization for the model. The number of k–shot examples is listed per-benchmark. | Category | Benchmark | Phi-3-Mini-128K-Ins | Gemma-7B | Mistral-7B | Mixtral-8x7B | Llama-3-8B-Ins | GPT3.5-Turbo-1106 | | :----------| :-----------| :---------------------| :----------| :------------| :--------------| :----------------| :-------------------| | Popular aggregated benchmark | AGI Eval <br>5-shot| 39.5 | 42.1 | 35.1 | 45.2 | 42 | 48.4 | | | MMLU <br>5-shot | 69.7 | 63.6 | 61.7 | 70.5 | 66.5 | 71.4 | | | BigBench Hard <br>3-shot | 72.1 | 59.6 | 57.3 | 69.7 | 51.5 | 68.3 | | Language Understanding | ANLI <br>7-shot | 52.3 | 48.7 | 47.1 | 55.2 | 57.3 | 58.1 | | | HellaSwag <br>5-shot | 70.5 | 49.8 | 58.5 | 70.4 | 71.1 | 78.8 | | Reasoning | ARC Challenge <br>10-shot | 85.5 | 78.3 | 78.6 | 87.3 | 82.8 | 87.4 | | | BoolQ <br>0-shot | 77.1 | 66 | 72.2 | 76.6 | 80.9 | 79.1 | | | MedQA <br>2-shot | 56.4 | 49.6 | 50 | 62.2 | 60.5 | 63.4 | | | OpenBookQA <br>10-shot | 78.8 | 78.6 | 79.8 | 85.8 | 82.6 | 86 | | | PIQA <br>5-shot | 80.1 | 78.1 | 77.7 | 86 | 75.7 | 86.6 | | | GPQA <br>0-shot | 29.7 | 2.9 | 15 | 6.9 | 32.4 | 29.9 | | | Social IQA <br>5-shot | 74.7 | 65.5 | 74.6 | 75.9 | 73.9 | 68.3 | | | TruthfulQA (MC2) <br>10-shot | 64.8 | 52.1 | 53 | 60.1 | 63.2 | 67.7 | | | WinoGrande <br>5-shot | 71.0 | 55.6 | 54.2 | 62 | 65 | 68.8 | | Factual Knowledge | TriviaQA <br>5-shot | 57.8 | 72.3 | 75.2 | 82.2 | 67.7 | 85.8 | | Math | GSM8K CoTT <br>8-shot | 85.3 | 59.8 | 46.4 | 64.7 | 77.4 | 78.1 | | Code Generation | HumanEval <br>0-shot | 60.4 | 34.1 | 28.0 | 37.8 | 60.4 | 62.2 | | | MBPP <br>3-shot | 70.0 | 51.5 | 50.8 | 60.2 | 67.7 | 77.8 | | **Average** | | **66.4** | **56.0** | **56.4** | **64.4** | **65.5** | **70.3** | **Long Context**: Phi-3 Mini-128K-Instruct supports 128K context length, therefore the model is capable of several long context tasks including long document/meeting summarization, long document QA. | Benchmark | Phi-3 Mini-128K-Instruct | Mistral-7B | Mixtral 8x7B | LLaMA-3-8B-Instruct | | :---------------| :--------------------------|:------------|:--------------|:---------------------| | GovReport | 25.3 | 4.9 | 20.3 | 10.3 | | QMSum | 21.9 | 15.5 | 20.6 | 2.9 | | Qasper | 41.6 | 23.5 | 26.6 | 8.1 | | SQuALITY | 24.1 | 14.7 | 16.2 | 25 | | SummScreenFD | 16.8 | 9.3 | 11.3 | 5.1 | | **Average** | **25.9** | **13.6** | **19.0** | **10.3** | We take a closer look at different categories across 100 public benchmark datasets at the table below: | Category | Phi-3-Mini-128K-Instruct | Gemma-7B | Mistral-7B | Mixtral 8x7B | Llama-3-8B-Instruct | GPT-3.5-Turbo | |:----------|:--------------------------|:----------|:------------|:--------------|:---------------------|:---------------| | Popular aggregated benchmark | 60.6 | 59.4 | 56.5 | 66.2 | 59.9 | 67.0 | | Reasoning | 69.4 | 60.3 | 62.8 | 68.1 | 69.6 | 71.7 | | Language understanding | 57.5 | 57.6 | 52.5 | 66.1 | 63.2 | 67.7 | | Code generation | 61.0 | 45.6 | 42.9 | 52.7 | 56.4 | 70.4 | | Math | 51.6 | 35.8 | 25.4 | 40.3 | 41.1 | 52.8 | | Factual knowledge | 35.8 | 46.7 | 49.8 | 58.6 | 43.1 | 63.4 | | Multilingual | 56.4 | 66.5 | 57.4 | 66.7 | 66.6 | 71.0 | | Robustness | 61.1 | 38.4 | 40.6 | 51.0 | 64.5 | 69.3 | Overall, the model with only 3.8B-param achieves a similar level of language understanding and reasoning ability as much larger models. However, it is still fundamentally limited by its size for certain tasks. The model simply does not have the capacity to store too much world knowledge, which can be seen for example with low performance on TriviaQA. However, we believe such weakness can be resolved by augmenting Phi-3-Mini with a search engine. ## Cross Platform Support [ONNX runtime](https://onnxruntime.ai/blogs/accelerating-phi-3) now supports Phi-3 mini models across platforms and hardware. Optimized phi-3 models are also published here in ONNX format, to run with ONNX Runtime on CPU and GPU across devices, including server platforms, Windows, Linux and Mac desktops, and mobile CPUs, with the precision best suited to each of these targets. DirectML GPU acceleration is supported for Windows desktops GPUs (AMD, Intel, and NVIDIA). Along with DML, ONNX Runtime provides cross platform support for Phi3 mini across a range of devices CPU, GPU, and mobile. Here are some of the optimized configurations we have added: 1. ONNX models for int4 DML: Quantized to int4 via AWQ 2. ONNX model for fp16 CUDA 3. ONNX model for int4 CUDA: Quantized to int4 via RTN 4. ONNX model for int4 CPU and Mobile: Quantized to int4 via RTN ## Software * [PyTorch](https://github.com/pytorch/pytorch) * [Transformers](https://github.com/huggingface/transformers) * [Flash-Attention](https://github.com/HazyResearch/flash-attention) ## Hardware Note that by default, the Phi-3 Mini-128K-Instruct model uses flash attention, which requires certain types of GPU hardware to run. We have tested on the following GPU types: * NVIDIA A100 * NVIDIA A6000 * NVIDIA H100 If you want to run the model on: * NVIDIA V100 or earlier generation GPUs: call AutoModelForCausalLM.from_pretrained() with attn_implementation="eager" * Optimized inference on GPU, CPU, and Mobile: use the **ONNX** models [128K](https://aka.ms/phi3-mini-128k-instruct-onnx) ## License The model is licensed under the [MIT license](https://huggingface.co/microsoft/Phi-3-mini-128k/resolve/main/LICENSE). ## Trademarks This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft trademarks or logos is subject to and must follow [Microsoft’s Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. Any use of third-party trademarks or logos are subject to those third-party’s policies.
[ "MEDQA" ]
neuralmagic/bge-large-en-v1.5-quant
neuralmagic
feature-extraction
[ "transformers", "onnx", "bert", "feature-extraction", "sparse", "sparsity", "quantized", "embeddings", "int8", "mteb", "deepsparse", "en", "license:mit", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-10-03T15:50:50Z"
2023-11-13T17:05:07+00:00
126,932
21
--- language: - en license: mit tags: - sparse - sparsity - quantized - onnx - embeddings - int8 - mteb - deepsparse model-index: - name: bge-large-en-v1.5-quant results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.53731343283583 - type: ap value: 38.30609312253564 - type: f1 value: 69.42802757893695 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.27346145216443 - type: cos_sim_spearman value: 88.36526647458979 - type: euclidean_pearson value: 86.83053354694746 - type: euclidean_spearman value: 87.56223612880584 - type: manhattan_pearson value: 86.59250609226758 - type: manhattan_spearman value: 87.70681773644885 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 86.18998669716373 - type: cos_sim_spearman value: 82.06129973984048 - type: euclidean_pearson value: 83.65969509485801 - type: euclidean_spearman value: 81.91666612708826 - type: manhattan_pearson value: 83.6906794731384 - type: manhattan_spearman value: 81.91752705367436 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.93407086985752 - type: cos_sim_spearman value: 78.82992283957066 - type: euclidean_pearson value: 83.39733473832982 - type: euclidean_spearman value: 78.86999229850214 - type: manhattan_pearson value: 83.39397058098533 - type: manhattan_spearman value: 78.85397971200753 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 87.2586009863056 - type: cos_sim_spearman value: 87.99415514558852 - type: euclidean_pearson value: 86.98993652364359 - type: euclidean_spearman value: 87.72725335668807 - type: manhattan_pearson value: 86.897205761048 - type: manhattan_spearman value: 87.65231103509018 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.41417660460755 - type: cos_sim_spearman value: 83.50291886604928 - type: euclidean_pearson value: 84.67758839660924 - type: euclidean_spearman value: 83.4368059512681 - type: manhattan_pearson value: 84.66027228213025 - type: manhattan_spearman value: 83.43472054456252 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.02513262365703 - type: cos_sim_spearman value: 89.00430907638267 - type: euclidean_pearson value: 88.16290361497319 - type: euclidean_spearman value: 88.6645154822661 - type: manhattan_pearson value: 88.15337528825458 - type: manhattan_spearman value: 88.66202950081507 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.10194022827035 - type: cos_sim_spearman value: 86.45367112223394 - type: euclidean_pearson value: 85.45292931769094 - type: euclidean_spearman value: 86.06607589083283 - type: manhattan_pearson value: 85.4111233047049 - type: manhattan_spearman value: 86.04379654118996 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 89.86966589113663 - type: cos_sim_spearman value: 89.5617056243649 - type: euclidean_pearson value: 89.018495917952 - type: euclidean_spearman value: 88.387335721179 - type: manhattan_pearson value: 89.07568042943448 - type: manhattan_spearman value: 88.51733863475219 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 68.38465344518238 - type: cos_sim_spearman value: 68.15219488291783 - type: euclidean_pearson value: 68.99169681132668 - type: euclidean_spearman value: 68.01334641045888 - type: manhattan_pearson value: 68.84952679202642 - type: manhattan_spearman value: 67.85430179655137 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.60574360222778 - type: cos_sim_spearman value: 87.8878986593873 - type: euclidean_pearson value: 87.11557232168404 - type: euclidean_spearman value: 87.40944677043365 - type: manhattan_pearson value: 87.10395398212532 - type: manhattan_spearman value: 87.35977283466168 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.84752475247525 - type: cos_sim_ap value: 96.49316696572335 - type: cos_sim_f1 value: 92.35352532274081 - type: cos_sim_precision value: 91.71597633136095 - type: cos_sim_recall value: 93.0 - type: dot_accuracy value: 99.77326732673268 - type: dot_ap value: 93.5497681978726 - type: dot_f1 value: 88.35582208895552 - type: dot_precision value: 88.31168831168831 - type: dot_recall value: 88.4 - type: euclidean_accuracy value: 99.84653465346534 - type: euclidean_ap value: 96.36378999360083 - type: euclidean_f1 value: 92.33052944087086 - type: euclidean_precision value: 91.38099902056807 - type: euclidean_recall value: 93.30000000000001 - type: manhattan_accuracy value: 99.84455445544555 - type: manhattan_ap value: 96.36035171233175 - type: manhattan_f1 value: 92.13260761999011 - type: manhattan_precision value: 91.1851126346719 - type: manhattan_recall value: 93.10000000000001 - type: max_accuracy value: 99.84752475247525 - type: max_ap value: 96.49316696572335 - type: max_f1 value: 92.35352532274081 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.26828396018358 - type: cos_sim_ap value: 77.79878217023162 - type: cos_sim_f1 value: 71.0425694621463 - type: cos_sim_precision value: 68.71301775147928 - type: cos_sim_recall value: 73.53562005277044 - type: dot_accuracy value: 84.01978899684092 - type: dot_ap value: 66.12134149171163 - type: dot_f1 value: 63.283507097098365 - type: dot_precision value: 60.393191081275475 - type: dot_recall value: 66.46437994722955 - type: euclidean_accuracy value: 87.24444179531503 - type: euclidean_ap value: 77.84821131946212 - type: euclidean_f1 value: 71.30456661215247 - type: euclidean_precision value: 68.1413801394566 - type: euclidean_recall value: 74.77572559366754 - type: manhattan_accuracy value: 87.19079692436074 - type: manhattan_ap value: 77.78054941055291 - type: manhattan_f1 value: 71.13002127393318 - type: manhattan_precision value: 67.65055939062128 - type: manhattan_recall value: 74.9868073878628 - type: max_accuracy value: 87.26828396018358 - type: max_ap value: 77.84821131946212 - type: max_f1 value: 71.30456661215247 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.91023402025847 - type: cos_sim_ap value: 85.94088151184411 - type: cos_sim_f1 value: 78.25673997223645 - type: cos_sim_precision value: 74.45433059919367 - type: cos_sim_recall value: 82.46843239913767 - type: dot_accuracy value: 87.91865564481701 - type: dot_ap value: 82.75373957440969 - type: dot_f1 value: 75.97383507276201 - type: dot_precision value: 72.67294713160854 - type: dot_recall value: 79.5888512473052 - type: euclidean_accuracy value: 88.8539604921023 - type: euclidean_ap value: 85.71590936389937 - type: euclidean_f1 value: 77.82902261742242 - type: euclidean_precision value: 74.7219270279844 - type: euclidean_recall value: 81.20572836464429 - type: manhattan_accuracy value: 88.78992509799356 - type: manhattan_ap value: 85.70200619366904 - type: manhattan_f1 value: 77.85875848203065 - type: manhattan_precision value: 72.94315506222671 - type: manhattan_recall value: 83.48475515860795 - type: max_accuracy value: 88.91023402025847 - type: max_ap value: 85.94088151184411 - type: max_f1 value: 78.25673997223645 --- # bge-large-en-v1.5-quant <div> <img src="https://huggingface.co/zeroshot/bge-large-en-v1.5-quant/resolve/main/bge-large-latency.png" alt="latency" width="500" style="display:inline-block; margin-right:10px;"/> </div> [DeepSparse](https://github.com/neuralmagic/deepsparse) is able to improve latency performance on a 10 core laptop by 4.8X and up to 3.5X on a 16 core AWS instance. ## Usage This is the quantized (INT8) ONNX variant of the [bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) embeddings model accelerated with [Sparsify](https://github.com/neuralmagic/sparsify) for quantization and [DeepSparseSentenceTransformers](https://github.com/neuralmagic/deepsparse/tree/main/src/deepsparse/sentence_transformers) for inference. ```bash pip install -U deepsparse-nightly[sentence_transformers] ``` ```python from deepsparse.sentence_transformers import DeepSparseSentenceTransformer model = DeepSparseSentenceTransformer('neuralmagic/bge-large-en-v1.5-quant', export=False) # Our sentences we like to encode sentences = ['This framework generates embeddings for each input sentence', 'Sentences are passed as a list of string.', 'The quick brown fox jumps over the lazy dog.'] # Sentences are encoded by calling model.encode() embeddings = model.encode(sentences) # Print the embeddings for sentence, embedding in zip(sentences, embeddings): print("Sentence:", sentence) print("Embedding:", embedding.shape) print("") ``` For general questions on these models and sparsification methods, reach out to the engineering team on our [community Slack](https://join.slack.com/t/discuss-neuralmagic/shared_invite/zt-q1a1cnvo-YBoICSIw3L1dmQpjBeDurQ).
[ "BIOSSES" ]
shibing624/text2vec-base-multilingual
shibing624
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "transformers", "text2vec", "mteb", "zh", "en", "de", "fr", "it", "nl", "pt", "pl", "ru", "dataset:shibing624/nli-zh-all", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-06-22T06:28:12Z"
2024-07-31T09:01:11+00:00
125,023
53
--- datasets: - shibing624/nli-zh-all language: - zh - en - de - fr - it - nl - pt - pl - ru library_name: sentence-transformers license: apache-2.0 metrics: - spearmanr pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers - text2vec - mteb model-index: - name: text2vec-base-multilingual results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 70.97014925373134 - type: ap value: 33.95151328318672 - type: f1 value: 65.14740155705596 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 68.69379014989293 - type: ap value: 79.68277579733802 - type: f1 value: 66.54960052336921 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 70.90704647676162 - type: ap value: 20.747518928580437 - type: f1 value: 58.64365465884924 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 61.605995717344754 - type: ap value: 14.135974879487028 - type: f1 value: 49.980224800472136 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 66.103375 - type: ap value: 61.10087197664471 - type: f1 value: 65.75198509894145 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 33.134 - type: f1 value: 32.7905397597083 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 33.388 - type: f1 value: 33.190561196873084 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 34.824 - type: f1 value: 34.297290157740726 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 33.449999999999996 - type: f1 value: 33.08017234412433 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 30.046 - type: f1 value: 29.857141661482228 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 32.522 - type: f1 value: 31.854699911472174 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 32.31918856561886 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 25.503481615956137 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 57.91471462820568 - type: mrr value: 71.82990370663501 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 68.83853315193127 - type: cos_sim_spearman value: 66.16174850417771 - type: euclidean_pearson value: 56.65313897263153 - type: euclidean_spearman value: 52.69156205876939 - type: manhattan_pearson value: 56.97282154658304 - type: manhattan_spearman value: 53.167476517261015 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 78.08441558441558 - type: f1 value: 77.99825264827898 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 28.98583420521256 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 23.195091778460892 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 43.35 - type: f1 value: 38.80269436557695 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 59.348 - type: ap value: 55.75065220262251 - type: f1 value: 58.72117519082607 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 81.04879160966712 - type: f1 value: 80.86889779192701 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 78.59397013243168 - type: f1 value: 77.09902761555972 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 79.24282855236824 - type: f1 value: 78.75883867079015 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 76.16661446915127 - type: f1 value: 76.30204722831901 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 78.74506991753317 - type: f1 value: 77.50560442779701 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 77.67088607594937 - type: f1 value: 77.21442956887493 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 62.786137710898316 - type: f1 value: 46.23474201126368 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 55.285996055226825 - type: f1 value: 37.98039513682919 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 58.67911941294196 - type: f1 value: 40.541410807124954 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 53.257124960851854 - type: f1 value: 38.42982319259366 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 59.62352097525995 - type: f1 value: 41.28886486568534 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 58.799276672694404 - type: f1 value: 43.68379466247341 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.42030934767989 - type: f1 value: 44.12201543566376 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 37.67652992602556 - type: f1 value: 35.422091900843164 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 45.02353732347007 - type: f1 value: 41.852484084738194 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 48.70880968392737 - type: f1 value: 46.904360615435046 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 43.78950907868191 - type: f1 value: 41.58872353920405 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 28.759246805648957 - type: f1 value: 27.41182001374226 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.74176193678547 - type: f1 value: 53.82727354182497 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.55682582380632 - type: f1 value: 49.41963627941866 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.46940147948891 - type: f1 value: 55.28178711367465 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.83322125084063 - type: f1 value: 61.836172900845554 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.27505043712172 - type: f1 value: 57.642436374361154 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.05178211163417 - type: f1 value: 56.858998820504056 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.357094821788834 - type: f1 value: 54.79711189260453 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.79959650302623 - type: f1 value: 57.59158671719513 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.1768661735037 - type: f1 value: 48.886397276270515 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.06455951580362 - type: f1 value: 55.01530952684585 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.3591123066577 - type: f1 value: 55.9277783370191 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.108271687962336 - type: f1 value: 51.195023400664596 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.26832548755883 - type: f1 value: 56.60774065423401 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 35.806993947545394 - type: f1 value: 34.290418953173294 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.27841291190315 - type: f1 value: 56.9438998642419 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.78009414929389 - type: f1 value: 59.15780842483667 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 31.153328850033624 - type: f1 value: 30.11004596099605 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.50235373234701 - type: f1 value: 44.040585262624745 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 40.99193006052455 - type: f1 value: 39.505480119272484 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.95696032279758 - type: f1 value: 43.093638940785326 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.73100201748486 - type: f1 value: 52.79750744404114 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.865501008742434 - type: f1 value: 53.64798408964839 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.891728312037664 - type: f1 value: 45.261229414636055 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.2259583053127 - type: f1 value: 50.5903419246987 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.277067921990586 - type: f1 value: 52.472042479965886 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.95696032279757 - type: f1 value: 49.79330411854258 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.63685272360457 - type: f1 value: 52.81267480650003 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.451916610625425 - type: f1 value: 57.34790386645091 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.91055817081372 - type: f1 value: 56.39195048528157 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.84196368527236 - type: f1 value: 58.72244763127063 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.04102219233354 - type: f1 value: 55.67040186148946 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.01613987895091 - type: f1 value: 57.203949825484855 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.35843981170141 - type: f1 value: 54.18656338999773 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.47948890383322 - type: f1 value: 54.772224557130954 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.43981170141224 - type: f1 value: 56.09260971364242 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 33.9609952925353 - type: f1 value: 33.18853392353405 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 44.29388029589778 - type: f1 value: 41.51986533284474 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 47.13517148621385 - type: f1 value: 43.94784138379624 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.856086079354405 - type: f1 value: 56.618177384748456 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 35.35978480161398 - type: f1 value: 34.060680080365046 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.630127774041696 - type: f1 value: 57.46288652988266 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.7908540685945 - type: f1 value: 51.46934239116157 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.6469401479489 - type: f1 value: 53.9903066185816 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.85743106926698 - type: f1 value: 59.31579548450755 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.46805648957633 - type: f1 value: 57.48469733657326 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.86415601882985 - type: f1 value: 49.41696672602645 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 41.183591123066584 - type: f1 value: 40.04563865770774 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.08069939475455 - type: f1 value: 50.724800165846126 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 51.287827841291204 - type: f1 value: 50.72873776739851 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 46.53328850033624 - type: f1 value: 45.93317866639667 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 34.347679892400805 - type: f1 value: 31.941581141280828 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.073301950235376 - type: f1 value: 62.228728940111054 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.398789509078675 - type: f1 value: 54.80778341609032 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.79892400806993 - type: f1 value: 60.69430756982446 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.96368527236046 - type: f1 value: 66.5893927997656 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.21250840618695 - type: f1 value: 62.347177794128925 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.43779421654339 - type: f1 value: 61.307701312085605 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.09952925353059 - type: f1 value: 60.313907927386914 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.38601210490922 - type: f1 value: 63.05968938353488 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.2878278412912 - type: f1 value: 55.92927644838597 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.62878278412912 - type: f1 value: 60.25299253652635 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.28850033624748 - type: f1 value: 62.77053246337031 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 54.875588433086754 - type: f1 value: 54.30717357279134 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.99394754539341 - type: f1 value: 61.73085530883037 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 38.581035642232685 - type: f1 value: 36.96287269695893 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.350369872225976 - type: f1 value: 61.807327324823966 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.17148621385338 - type: f1 value: 65.29620144656751 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 36.12642905178212 - type: f1 value: 35.334393048479484 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.26899798251513 - type: f1 value: 49.041065960139434 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 44.24344317417619 - type: f1 value: 42.42177854872125 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 47.370544720914594 - type: f1 value: 46.589722581465324 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.89038332212508 - type: f1 value: 57.753607921990394 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.506388702084756 - type: f1 value: 56.0485860423295 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.06388702084734 - type: f1 value: 50.109364641824584 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 55.053799596503026 - type: f1 value: 54.490665705666686 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.77135171486213 - type: f1 value: 58.2808650158803 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 55.71620712844654 - type: f1 value: 53.863034882475304 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.26227303295225 - type: f1 value: 59.86604657147016 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.3759246805649 - type: f1 value: 62.45257339288533 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.552118359112306 - type: f1 value: 61.354449605776765 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.40753194351043 - type: f1 value: 61.98779889528889 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.68258238063214 - type: f1 value: 60.59973978976571 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.31002017484868 - type: f1 value: 62.412312268503655 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 61.429051782111635 - type: f1 value: 61.60095590401424 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.229320780094156 - type: f1 value: 61.02251426747547 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.42501681237391 - type: f1 value: 63.461494430605235 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 38.51714862138534 - type: f1 value: 37.12466722986362 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 46.99731002017485 - type: f1 value: 45.859147049984834 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 51.01882985877605 - type: f1 value: 49.01040173136056 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.234700739744454 - type: f1 value: 62.732294595214746 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 38.72225958305312 - type: f1 value: 36.603231928120906 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.48554135843982 - type: f1 value: 63.97380562022752 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.7955615332885 - type: f1 value: 55.95308241204802 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 57.06455951580362 - type: f1 value: 56.95570494066693 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.8338937457969 - type: f1 value: 65.6778746906008 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.369199731002034 - type: f1 value: 63.527650116059945 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 29.442504112215538 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 26.16062814161053 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 65.319 - type: map_at_10 value: 78.72 - type: map_at_100 value: 79.44600000000001 - type: map_at_1000 value: 79.469 - type: map_at_3 value: 75.693 - type: map_at_5 value: 77.537 - type: mrr_at_1 value: 75.24 - type: mrr_at_10 value: 82.304 - type: mrr_at_100 value: 82.485 - type: mrr_at_1000 value: 82.489 - type: mrr_at_3 value: 81.002 - type: mrr_at_5 value: 81.817 - type: ndcg_at_1 value: 75.26 - type: ndcg_at_10 value: 83.07 - type: ndcg_at_100 value: 84.829 - type: ndcg_at_1000 value: 85.087 - type: ndcg_at_3 value: 79.67699999999999 - type: ndcg_at_5 value: 81.42 - type: precision_at_1 value: 75.26 - type: precision_at_10 value: 12.697 - type: precision_at_100 value: 1.4829999999999999 - type: precision_at_1000 value: 0.154 - type: precision_at_3 value: 34.849999999999994 - type: precision_at_5 value: 23.054 - type: recall_at_1 value: 65.319 - type: recall_at_10 value: 91.551 - type: recall_at_100 value: 98.053 - type: recall_at_1000 value: 99.516 - type: recall_at_3 value: 81.819 - type: recall_at_5 value: 86.66199999999999 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 31.249791587189996 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 43.302922383029816 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 84.80670811345861 - type: cos_sim_spearman value: 79.97373018384307 - type: euclidean_pearson value: 83.40205934125837 - type: euclidean_spearman value: 79.73331008251854 - type: manhattan_pearson value: 83.3320983393412 - type: manhattan_spearman value: 79.677919746045 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.3816087627948 - type: cos_sim_spearman value: 80.91314664846955 - type: euclidean_pearson value: 85.10603071031096 - type: euclidean_spearman value: 79.42663939501841 - type: manhattan_pearson value: 85.16096376014066 - type: manhattan_spearman value: 79.51936545543191 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 80.44665329940209 - type: cos_sim_spearman value: 82.86479010707745 - type: euclidean_pearson value: 84.06719627734672 - type: euclidean_spearman value: 84.9356099976297 - type: manhattan_pearson value: 84.10370009572624 - type: manhattan_spearman value: 84.96828040546536 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 86.05704260568437 - type: cos_sim_spearman value: 87.36399473803172 - type: euclidean_pearson value: 86.8895170159388 - type: euclidean_spearman value: 87.16246440866921 - type: manhattan_pearson value: 86.80814774538997 - type: manhattan_spearman value: 87.09320142699522 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 85.97825118945852 - type: cos_sim_spearman value: 88.31438033558268 - type: euclidean_pearson value: 87.05174694758092 - type: euclidean_spearman value: 87.80659468392355 - type: manhattan_pearson value: 86.98831322198717 - type: manhattan_spearman value: 87.72820615049285 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 78.68745420126719 - type: cos_sim_spearman value: 81.6058424699445 - type: euclidean_pearson value: 81.16540133861879 - type: euclidean_spearman value: 81.86377535458067 - type: manhattan_pearson value: 81.13813317937021 - type: manhattan_spearman value: 81.87079962857256 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 68.06192660936868 - type: cos_sim_spearman value: 68.2376353514075 - type: euclidean_pearson value: 60.68326946956215 - type: euclidean_spearman value: 59.19352349785952 - type: manhattan_pearson value: 60.6592944683418 - type: manhattan_spearman value: 59.167534419270865 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 76.78098264855684 - type: cos_sim_spearman value: 78.02670452969812 - type: euclidean_pearson value: 77.26694463661255 - type: euclidean_spearman value: 77.47007626009587 - type: manhattan_pearson value: 77.25070088632027 - type: manhattan_spearman value: 77.36368265830724 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 78.45418506379532 - type: cos_sim_spearman value: 78.60412019902428 - type: euclidean_pearson value: 79.90303710850512 - type: euclidean_spearman value: 78.67123625004957 - type: manhattan_pearson value: 80.09189580897753 - type: manhattan_spearman value: 79.02484481441483 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 82.35556731232779 - type: cos_sim_spearman value: 81.48249735354844 - type: euclidean_pearson value: 81.66748026636621 - type: euclidean_spearman value: 80.35571574338547 - type: manhattan_pearson value: 81.38214732806365 - type: manhattan_spearman value: 79.9018202958774 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 86.4527703176897 - type: cos_sim_spearman value: 85.81084095829584 - type: euclidean_pearson value: 86.43489162324457 - type: euclidean_spearman value: 85.27110976093296 - type: manhattan_pearson value: 86.43674259444512 - type: manhattan_spearman value: 85.05719308026032 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 76.00411240034492 - type: cos_sim_spearman value: 76.33887356560854 - type: euclidean_pearson value: 76.81730660019446 - type: euclidean_spearman value: 75.04432185451306 - type: manhattan_pearson value: 77.22298813168995 - type: manhattan_spearman value: 75.56420330256725 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 79.1447136836213 - type: cos_sim_spearman value: 81.80823850788917 - type: euclidean_pearson value: 80.84505734814422 - type: euclidean_spearman value: 81.714168092736 - type: manhattan_pearson value: 80.84713816174187 - type: manhattan_spearman value: 81.61267814749516 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 87.01257457052873 - type: cos_sim_spearman value: 87.91146458004216 - type: euclidean_pearson value: 88.36771859717994 - type: euclidean_spearman value: 87.73182474597515 - type: manhattan_pearson value: 88.26551451003671 - type: manhattan_spearman value: 87.71675151388992 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 79.20121618382373 - type: cos_sim_spearman value: 78.05794691968603 - type: euclidean_pearson value: 79.93819925682054 - type: euclidean_spearman value: 78.00586118701553 - type: manhattan_pearson value: 80.05598625820885 - type: manhattan_spearman value: 78.04802948866832 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 81.51743373871778 - type: cos_sim_spearman value: 80.98266651818703 - type: euclidean_pearson value: 81.11875722505269 - type: euclidean_spearman value: 79.45188413284538 - type: manhattan_pearson value: 80.7988457619225 - type: manhattan_spearman value: 79.49643569311485 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 81.78679924046351 - type: cos_sim_spearman value: 80.9986574147117 - type: euclidean_pearson value: 82.09130079135713 - type: euclidean_spearman value: 80.66215667390159 - type: manhattan_pearson value: 82.0328610549654 - type: manhattan_spearman value: 80.31047226932408 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 58.08082172994642 - type: cos_sim_spearman value: 62.9940530222459 - type: euclidean_pearson value: 58.47927303460365 - type: euclidean_spearman value: 60.8440317609258 - type: manhattan_pearson value: 58.32438211697841 - type: manhattan_spearman value: 60.69642636776064 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 33.83985707464123 - type: cos_sim_spearman value: 46.89093209603036 - type: euclidean_pearson value: 34.63602187576556 - type: euclidean_spearman value: 46.31087228200712 - type: manhattan_pearson value: 34.66899391543166 - type: manhattan_spearman value: 46.33049538425276 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 51.61315965767736 - type: cos_sim_spearman value: 58.9434266730386 - type: euclidean_pearson value: 50.35885602217862 - type: euclidean_spearman value: 58.238679883286025 - type: manhattan_pearson value: 53.01732044381151 - type: manhattan_spearman value: 58.10482351761412 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 26.771738440430177 - type: cos_sim_spearman value: 34.807259227816054 - type: euclidean_pearson value: 17.82657835823811 - type: euclidean_spearman value: 34.27912898498941 - type: manhattan_pearson value: 19.121527758886312 - type: manhattan_spearman value: 34.4940050226265 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 52.8354704676683 - type: cos_sim_spearman value: 57.28629534815841 - type: euclidean_pearson value: 54.10329332004385 - type: euclidean_spearman value: 58.15030615859976 - type: manhattan_pearson value: 55.42372087433115 - type: manhattan_spearman value: 57.52270736584036 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 31.01976557986924 - type: cos_sim_spearman value: 54.506959483927616 - type: euclidean_pearson value: 36.917863022119086 - type: euclidean_spearman value: 53.750194241538566 - type: manhattan_pearson value: 37.200177833241085 - type: manhattan_spearman value: 53.507659188082535 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 46.38635647225934 - type: cos_sim_spearman value: 54.50892732637536 - type: euclidean_pearson value: 40.8331015184763 - type: euclidean_spearman value: 53.142903182230924 - type: manhattan_pearson value: 43.07655692906317 - type: manhattan_spearman value: 53.5833474125901 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 60.52525456662916 - type: cos_sim_spearman value: 63.23975489531082 - type: euclidean_pearson value: 58.989191722317514 - type: euclidean_spearman value: 62.536326639863894 - type: manhattan_pearson value: 61.32982866201855 - type: manhattan_spearman value: 63.068262822520516 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.63798684577696 - type: cos_sim_spearman value: 74.09937723367189 - type: euclidean_pearson value: 63.77494904383906 - type: euclidean_spearman value: 71.15932571292481 - type: manhattan_pearson value: 63.69646122775205 - type: manhattan_spearman value: 70.54960698541632 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 36.50262468726711 - type: cos_sim_spearman value: 45.00322499674274 - type: euclidean_pearson value: 32.58759216581778 - type: euclidean_spearman value: 40.13720951315429 - type: manhattan_pearson value: 34.88422299605277 - type: manhattan_spearman value: 40.63516862200963 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 56.498552617040275 - type: cos_sim_spearman value: 67.71358426124443 - type: euclidean_pearson value: 57.16474781778287 - type: euclidean_spearman value: 65.721515493531 - type: manhattan_pearson value: 59.25227610738926 - type: manhattan_spearman value: 65.89743680340739 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 55.97978814727984 - type: cos_sim_spearman value: 65.85821395092104 - type: euclidean_pearson value: 59.11117270978519 - type: euclidean_spearman value: 64.50062069934965 - type: manhattan_pearson value: 59.4436213778161 - type: manhattan_spearman value: 64.4003273074382 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 58.00873192515712 - type: cos_sim_spearman value: 60.167708809138745 - type: euclidean_pearson value: 56.91950637760252 - type: euclidean_spearman value: 58.50593399441014 - type: manhattan_pearson value: 58.683747352584994 - type: manhattan_spearman value: 59.38110066799761 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.26020658151187 - type: cos_sim_spearman value: 61.29236187204147 - type: euclidean_pearson value: 55.993896804147056 - type: euclidean_spearman value: 58.654928232615354 - type: manhattan_pearson value: 56.612492816099426 - type: manhattan_spearman value: 58.65144067094258 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 49.13817835368122 - type: cos_sim_spearman value: 50.78524216975442 - type: euclidean_pearson value: 46.56046454501862 - type: euclidean_spearman value: 50.3935060082369 - type: manhattan_pearson value: 48.0232348418531 - type: manhattan_spearman value: 50.79528358464199 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 44.274388638585286 - type: cos_sim_spearman value: 49.43124017389838 - type: euclidean_pearson value: 42.45909582681174 - type: euclidean_spearman value: 49.661383797129055 - type: manhattan_pearson value: 42.5771970142383 - type: manhattan_spearman value: 50.14423414390715 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 26.119500839749776 - type: cos_sim_spearman value: 39.324070169024424 - type: euclidean_pearson value: 35.83247077201831 - type: euclidean_spearman value: 42.61903924348457 - type: manhattan_pearson value: 35.50415034487894 - type: manhattan_spearman value: 41.87998075949351 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 72.62575835691209 - type: cos_sim_spearman value: 73.24670207647144 - type: euclidean_pearson value: 78.07793323914657 - type: euclidean_spearman value: 73.24670207647144 - type: manhattan_pearson value: 77.51429306378206 - type: manhattan_spearman value: 73.24670207647144 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.09375596849891 - type: cos_sim_spearman value: 86.44881302053585 - type: euclidean_pearson value: 84.71259163967213 - type: euclidean_spearman value: 85.63661992344069 - type: manhattan_pearson value: 84.64466537502614 - type: manhattan_spearman value: 85.53769949940238 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 70.2056154684549 - type: mrr value: 89.52703161036494 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.57623762376238 - type: cos_sim_ap value: 83.53051588811371 - type: cos_sim_f1 value: 77.72704211060375 - type: cos_sim_precision value: 78.88774459320288 - type: cos_sim_recall value: 76.6 - type: dot_accuracy value: 99.06435643564356 - type: dot_ap value: 27.003124923857463 - type: dot_f1 value: 34.125269978401725 - type: dot_precision value: 37.08920187793427 - type: dot_recall value: 31.6 - type: euclidean_accuracy value: 99.61485148514852 - type: euclidean_ap value: 85.47332647001774 - type: euclidean_f1 value: 80.0808897876643 - type: euclidean_precision value: 80.98159509202453 - type: euclidean_recall value: 79.2 - type: manhattan_accuracy value: 99.61683168316831 - type: manhattan_ap value: 85.41969859598552 - type: manhattan_f1 value: 79.77755308392315 - type: manhattan_precision value: 80.67484662576688 - type: manhattan_recall value: 78.9 - type: max_accuracy value: 99.61683168316831 - type: max_ap value: 85.47332647001774 - type: max_f1 value: 80.0808897876643 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 34.35688940053467 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 30.64427069276576 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 44.89500754900078 - type: mrr value: 45.33215558950853 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.653069624224084 - type: cos_sim_spearman value: 30.10187112430319 - type: dot_pearson value: 28.966278202103666 - type: dot_spearman value: 28.342234095507767 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 65.96839999999999 - type: ap value: 11.846327590186444 - type: f1 value: 50.518102944693574 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 55.220713073005086 - type: f1 value: 55.47856175692088 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 31.581473892235877 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 82.94093103653812 - type: cos_sim_ap value: 62.48963249213361 - type: cos_sim_f1 value: 58.9541137429912 - type: cos_sim_precision value: 52.05091937765205 - type: cos_sim_recall value: 67.96833773087072 - type: dot_accuracy value: 78.24998509864696 - type: dot_ap value: 40.82371294480071 - type: dot_f1 value: 44.711163153786096 - type: dot_precision value: 35.475379374419326 - type: dot_recall value: 60.4485488126649 - type: euclidean_accuracy value: 83.13166835548668 - type: euclidean_ap value: 63.459878609769774 - type: euclidean_f1 value: 60.337199569532466 - type: euclidean_precision value: 55.171659741963694 - type: euclidean_recall value: 66.56992084432719 - type: manhattan_accuracy value: 83.00649698992669 - type: manhattan_ap value: 63.263161177904905 - type: manhattan_f1 value: 60.17122874713614 - type: manhattan_precision value: 55.40750610703975 - type: manhattan_recall value: 65.8311345646438 - type: max_accuracy value: 83.13166835548668 - type: max_ap value: 63.459878609769774 - type: max_f1 value: 60.337199569532466 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 87.80416812201653 - type: cos_sim_ap value: 83.45540469219863 - type: cos_sim_f1 value: 75.58836427422892 - type: cos_sim_precision value: 71.93934335002783 - type: cos_sim_recall value: 79.62734832152756 - type: dot_accuracy value: 83.04226336011176 - type: dot_ap value: 70.63007268018524 - type: dot_f1 value: 65.35980325765405 - type: dot_precision value: 60.84677151768532 - type: dot_recall value: 70.59593470896212 - type: euclidean_accuracy value: 87.60430007373773 - type: euclidean_ap value: 83.10068502536592 - type: euclidean_f1 value: 75.02510506936439 - type: euclidean_precision value: 72.56637168141593 - type: euclidean_recall value: 77.65629812134279 - type: manhattan_accuracy value: 87.60041914076145 - type: manhattan_ap value: 83.05480769911229 - type: manhattan_f1 value: 74.98522895125554 - type: manhattan_precision value: 72.04797047970479 - type: manhattan_recall value: 78.17215891592238 - type: max_accuracy value: 87.80416812201653 - type: max_ap value: 83.45540469219863 - type: max_f1 value: 75.58836427422892 --- # shibing624/text2vec-base-multilingual This is a CoSENT(Cosine Sentence) model: shibing624/text2vec-base-multilingual. It maps sentences to a 384 dimensional dense vector space and can be used for tasks like sentence embeddings, text matching or semantic search. - training dataset: https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-multilingual-dataset - base model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 - max_seq_length: 256 - best epoch: 4 - sentence embedding dim: 384 ## Evaluation For an automated evaluation of this model, see the *Evaluation Benchmark*: [text2vec](https://github.com/shibing624/text2vec) ## Languages Available languages are: de, en, es, fr, it, nl, pl, pt, ru, zh ### Release Models - 本项目release模型的中文匹配评测结果: | Arch | BaseModel | Model | ATEC | BQ | LCQMC | PAWSX | STS-B | SOHU-dd | SOHU-dc | Avg | QPS | |:-----------|:-------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------|:-----:|:-----:|:-----:|:-----:|:-----:|:-------:|:-------:|:---------:|:-----:| | Word2Vec | word2vec | [w2v-light-tencent-chinese](https://ai.tencent.com/ailab/nlp/en/download.html) | 20.00 | 31.49 | 59.46 | 2.57 | 55.78 | 55.04 | 20.70 | 35.03 | 23769 | | SBERT | xlm-roberta-base | [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2) | 18.42 | 38.52 | 63.96 | 10.14 | 78.90 | 63.01 | 52.28 | 46.46 | 3138 | | Instructor | hfl/chinese-roberta-wwm-ext | [moka-ai/m3e-base](https://huggingface.co/moka-ai/m3e-base) | 41.27 | 63.81 | 74.87 | 12.20 | 76.96 | 75.83 | 60.55 | 57.93 | 2980 | | CoSENT | hfl/chinese-macbert-base | [shibing624/text2vec-base-chinese](https://huggingface.co/shibing624/text2vec-base-chinese) | 31.93 | 42.67 | 70.16 | 17.21 | 79.30 | 70.27 | 50.42 | 51.61 | 3008 | | CoSENT | hfl/chinese-lert-large | [GanymedeNil/text2vec-large-chinese](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 32.61 | 44.59 | 69.30 | 14.51 | 79.44 | 73.01 | 59.04 | 53.12 | 2092 | | CoSENT | nghuyong/ernie-3.0-base-zh | [shibing624/text2vec-base-chinese-sentence](https://huggingface.co/shibing624/text2vec-base-chinese-sentence) | 43.37 | 61.43 | 73.48 | 38.90 | 78.25 | 70.60 | 53.08 | 59.87 | 3089 | | CoSENT | nghuyong/ernie-3.0-base-zh | [shibing624/text2vec-base-chinese-paraphrase](https://huggingface.co/shibing624/text2vec-base-chinese-paraphrase) | 44.89 | 63.58 | 74.24 | 40.90 | 78.93 | 76.70 | 63.30 | **63.08** | 3066 | | CoSENT | sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 | [shibing624/text2vec-base-multilingual](https://huggingface.co/shibing624/text2vec-base-multilingual) | 32.39 | 50.33 | 65.64 | 32.56 | 74.45 | 68.88 | 51.17 | 53.67 | 4004 | 说明: - 结果评测指标:spearman系数 - `shibing624/text2vec-base-chinese`模型,是用CoSENT方法训练,基于`hfl/chinese-macbert-base`在中文STS-B数据训练得到,并在中文STS-B测试集评估达到较好效果,运行[examples/training_sup_text_matching_model.py](https://github.com/shibing624/text2vec/blob/master/examples/training_sup_text_matching_model.py)代码可训练模型,模型文件已经上传HF model hub,中文通用语义匹配任务推荐使用 - `shibing624/text2vec-base-chinese-sentence`模型,是用CoSENT方法训练,基于`nghuyong/ernie-3.0-base-zh`用人工挑选后的中文STS数据集[shibing624/nli-zh-all/text2vec-base-chinese-sentence-dataset](https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-sentence-dataset)训练得到,并在中文各NLI测试集评估达到较好效果,运行[examples/training_sup_text_matching_model_jsonl_data.py](https://github.com/shibing624/text2vec/blob/master/examples/training_sup_text_matching_model_jsonl_data.py)代码可训练模型,模型文件已经上传HF model hub,中文s2s(句子vs句子)语义匹配任务推荐使用 - `shibing624/text2vec-base-chinese-paraphrase`模型,是用CoSENT方法训练,基于`nghuyong/ernie-3.0-base-zh`用人工挑选后的中文STS数据集[shibing624/nli-zh-all/text2vec-base-chinese-paraphrase-dataset](https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-paraphrase-dataset),数据集相对于[shibing624/nli-zh-all/text2vec-base-chinese-sentence-dataset](https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-sentence-dataset)加入了s2p(sentence to paraphrase)数据,强化了其长文本的表征能力,并在中文各NLI测试集评估达到SOTA,运行[examples/training_sup_text_matching_model_jsonl_data.py](https://github.com/shibing624/text2vec/blob/master/examples/training_sup_text_matching_model_jsonl_data.py)代码可训练模型,模型文件已经上传HF model hub,中文s2p(句子vs段落)语义匹配任务推荐使用 - `shibing624/text2vec-base-multilingual`模型,是用CoSENT方法训练,基于`sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2`用人工挑选后的多语言STS数据集[shibing624/nli-zh-all/text2vec-base-multilingual-dataset](https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-multilingual-dataset)训练得到,并在中英文测试集评估相对于原模型效果有提升,运行[examples/training_sup_text_matching_model_jsonl_data.py](https://github.com/shibing624/text2vec/blob/master/examples/training_sup_text_matching_model_jsonl_data.py)代码可训练模型,模型文件已经上传HF model hub,多语言语义匹配任务推荐使用 - `w2v-light-tencent-chinese`是腾讯词向量的Word2Vec模型,CPU加载使用,适用于中文字面匹配任务和缺少数据的冷启动情况 - QPS的GPU测试环境是Tesla V100,显存32GB 模型训练实验报告:[实验报告](https://github.com/shibing624/text2vec/blob/master/docs/model_report.md) ## Usage (text2vec) Using this model becomes easy when you have [text2vec](https://github.com/shibing624/text2vec) installed: ``` pip install -U text2vec ``` Then you can use the model like this: ```python from text2vec import SentenceModel sentences = ['如何更换花呗绑定银行卡', 'How to replace the Huabei bundled bank card'] model = SentenceModel('shibing624/text2vec-base-multilingual') embeddings = model.encode(sentences) print(embeddings) ``` ## Usage (HuggingFace Transformers) Without [text2vec](https://github.com/shibing624/text2vec), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings. Install transformers: ``` pip install transformers ``` Then load model and predict: ```python from transformers import AutoTokenizer, AutoModel import torch # Mean Pooling - Take attention mask into account for correct averaging def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] # First element of model_output contains all token embeddings input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('shibing624/text2vec-base-multilingual') model = AutoModel.from_pretrained('shibing624/text2vec-base-multilingual') sentences = ['如何更换花呗绑定银行卡', 'How to replace the Huabei bundled bank card'] # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, mean pooling. sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask']) print("Sentence embeddings:") print(sentence_embeddings) ``` ## Usage (sentence-transformers) [sentence-transformers](https://github.com/UKPLab/sentence-transformers) is a popular library to compute dense vector representations for sentences. Install sentence-transformers: ``` pip install -U sentence-transformers ``` Then load model and predict: ```python from sentence_transformers import SentenceTransformer m = SentenceTransformer("shibing624/text2vec-base-multilingual") sentences = ['如何更换花呗绑定银行卡', 'How to replace the Huabei bundled bank card'] sentence_embeddings = m.encode(sentences) print("Sentence embeddings:") print(sentence_embeddings) ``` ## Full Model Architecture ``` CoSENT( (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: BertModel (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_mean_tokens': True}) ) ``` ## Intended uses Our model is intented to be used as a sentence and short paragraph encoder. Given an input text, it ouptuts a vector which captures the semantic information. The sentence vector may be used for information retrieval, clustering or sentence similarity tasks. By default, input text longer than 256 word pieces is truncated. ## Training procedure ### Pre-training We use the pretrained [`sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2`](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2) model. Please refer to the model card for more detailed information about the pre-training procedure. ### Fine-tuning We fine-tune the model using a contrastive objective. Formally, we compute the cosine similarity from each possible sentence pairs from the batch. We then apply the rank loss by comparing with true pairs and false pairs. ## Citing & Authors This model was trained by [text2vec](https://github.com/shibing624/text2vec). If you find this model helpful, feel free to cite: ```bibtex @software{text2vec, author = {Ming Xu}, title = {text2vec: A Tool for Text to Vector}, year = {2023}, url = {https://github.com/shibing624/text2vec}, } ```
[ "BIOSSES" ]
microsoft/BiomedNLP-BiomedBERT-base-uncased-abstract-fulltext
microsoft
fill-mask
[ "transformers", "pytorch", "jax", "bert", "fill-mask", "exbert", "en", "arxiv:2007.15779", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2022-03-02T23:29:05Z"
2023-11-06T18:03:43+00:00
124,839
227
--- language: en license: mit tags: - exbert widget: - text: '[MASK] is a tumor suppressor gene.' --- ## MSR BiomedBERT (abstracts + full text) <div style="border: 2px solid orange; border-radius:10px; padding:0px 10px; width: fit-content;"> * This model was previously named **"PubMedBERT (abstracts + full text)"**. * You can either adopt the new model name "microsoft/BiomedNLP-BiomedBERT-base-uncased-abstract-fulltext" or update your `transformers` library to version 4.22+ if you need to refer to the old name. </div> Pretraining large neural language models, such as BERT, has led to impressive gains on many natural language processing (NLP) tasks. However, most pretraining efforts focus on general domain corpora, such as newswire and Web. A prevailing assumption is that even domain-specific pretraining can benefit by starting from general-domain language models. [Recent work](https://arxiv.org/abs/2007.15779) shows that for domains with abundant unlabeled text, such as biomedicine, pretraining language models from scratch results in substantial gains over continual pretraining of general-domain language models. BiomedBERT is pretrained from scratch using _abstracts_ from [PubMed](https://pubmed.ncbi.nlm.nih.gov/) and _full-text_ articles from [PubMedCentral](https://www.ncbi.nlm.nih.gov/pmc/). This model achieves state-of-the-art performance on many biomedical NLP tasks, and currently holds the top score on the [Biomedical Language Understanding and Reasoning Benchmark](https://aka.ms/BLURB). ## Citation If you find BiomedBERT useful in your research, please cite the following paper: ```latex @misc{pubmedbert, author = {Yu Gu and Robert Tinn and Hao Cheng and Michael Lucas and Naoto Usuyama and Xiaodong Liu and Tristan Naumann and Jianfeng Gao and Hoifung Poon}, title = {Domain-Specific Language Model Pretraining for Biomedical Natural Language Processing}, year = {2020}, eprint = {arXiv:2007.15779}, } ``` <a href="https://huggingface.co/exbert/?model=microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext&modelKind=bidirectional&sentence=Gefitinib%20is%20an%20EGFR%20tyrosine%20kinase%20inhibitor,%20which%20is%20often%20used%20for%20breast%20cancer%20and%20NSCLC%20treatment.&layer=3&heads=..0,1,2,3,4,5,6,7,8,9,10,11&threshold=0.7&tokenInd=17&tokenSide=right&maskInds=..&hideClsSep=true"> <img width="300px" src="https://cdn-media.huggingface.co/exbert/button.png"> </a>
[ "BLURB" ]
BAAI/bge-base-zh-v1.5
BAAI
feature-extraction
[ "sentence-transformers", "pytorch", "bert", "feature-extraction", "sentence-similarity", "transformers", "zh", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-09-12T05:21:53Z"
2023-10-12T03:35:51+00:00
122,857
78
--- language: - zh license: mit tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers --- <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding can map any text to a low-dimensional dense vector which can be used for tasks like retrieval, classification, clustering, or semantic search. And it also can be used in vector databases for LLMs. ************* 🌟**Updates**🌟 ************* - 10/12/2023: Release [LLM-Embedder](./FlagEmbedding/llm_embedder/README.md), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Paper](https://arxiv.org/pdf/2310.07554.pdf) :fire: - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [masive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR" ]
ali-vilab/In-Context-LoRA
ali-vilab
text-to-image
[ "diffusers", "text-to-image", "lora", "template:diffusion-lora", "arxiv:2410.23775", "arxiv:2410.15027", "base_model:black-forest-labs/FLUX.1-dev", "base_model:adapter:black-forest-labs/FLUX.1-dev", "license:mit", "region:us" ]
"2024-11-07T05:47:16Z"
2024-12-17T06:13:20+00:00
116,222
569
--- base_model: black-forest-labs/FLUX.1-dev license: mit tags: - text-to-image - lora - diffusers - template:diffusion-lora --- 📢 [[Project Page](https://ali-vilab.github.io/In-Context-LoRA-Page/)] [[Github Repo](https://github.com/ali-vilab/In-Context-LoRA)] [[Paper](https://arxiv.org/abs/2410.23775)] # 🔥 Latest News - **[2024-12-17]** 🚀 We are excited to release **[IDEA-Bench](https://ali-vilab.github.io/IDEA-Bench-Page/)**, a comprehensive benchmark designed to assess the zero-shot task generalization abilities of generative models. The benchmark includes **100** real-world design tasks across **275** unique cases. Despite its general-purpose focus, the top-performing model, EMU2, achieves a score of only **6.81** out of 100, highlighting the current challenges in this domain. Explore the benchmark and challenge the limits of model performance! - **[2024-11-16]** 🌟 The community continues to innovate with IC-LoRA! Exciting projects include models, ComfyUI nodes and workflows for **Virtual Try-on, Product Design, Object Mitigation, Role Play**, and more. Explore their creations in **[Community Creations Using IC-LoRA](#community-creations-using-ic-lora)**. Huge thanks to all contributors for their incredible efforts! ## Community Creations Using IC-LoRA We are thrilled to showcase the community's innovative projects leveraging In-Context LoRA (IC-LoRA). If you have additional recommendations or projects to share, **please don't hesitate to send a [Pull Request](https://github.com/ali-vilab/In-Context-LoRA/pulls)!** | Project Name | Type | Supported Tasks | Sample Results | |--------------|----------------------|---------------------------------------------------------------------------------|----------------| | 1. [Comfyui_Object_Migration](https://github.com/TTPlanetPig/Comfyui_Object_Migration) | ComfyUI Node & Workflow & LoRA Model | Clothing Migration, Cartoon Clothing to Realism, and More | ![Sample Result](./images/386534865-9612cf8a-858d-4684-819e-7b97981d993c.png) | | 2. [Flux Simple Try On - In Context Lora](https://civitai.com/models/950111/flux-simple-try-on-in-context-lora) | LoRA Model & ComfyUI Workflow | Virtual Try-on | ![Sample Result](./images/ComfyUI_temp_ditfb_00016_.jpeg) | | 3. [Flux In Context - visual identity Lora in Comfy](https://civitai.com/articles/8779) | ComfyUI Workflow | Visual Identity Transfer | ![Sample Result](./images/ComfyUI_00026_.jpeg) | | 4. [Workflows Flux In Context Lora For Product Design](https://civitai.com/models/933018/workflows-flux-in-context-lora-for-product-design) | ComfyUI Workflow | Product Design, Role Play, and More | ![Sample Result](./images/ComfyUI_temp_opjou_00016_.jpeg) | | 5. [Flux Product Design - In Context Lora](https://civitai.com/models/933026/flux-product-design-in-context-lora) | LoRA Model & ComfyUI Workflow | Product Design | ![Sample Result](./images/2024-11-10-002611_0.jpeg) | | 6. [In Context lora + Character story generator + flux+ shichen](https://civitai.com/models/951357/in-context-lora-character-story-generator-flux-shichen) | ComfyUI Workflow | Character Movie Story Generator | ![Sample Result](./images/role2story.jpeg) | | 7. [In- Context-Lora|Cute 4koma 可爱四格漫画](https://civitai.com/models/947702/in-context-loracute-4koma) | LoRA Model & ComfyUI Workflow | Comic Strip Generation | ![Sample Result](./images/ComfyUI_00098_.jpeg) | | 8. [Creative Effects & Design LoRA Pack (In-Context LORA)](https://civitai.com/models/929592/creative-effects-and-design-lora-pack-in-context-lora) | LoRA Model & ComfyUI Workflow | Movie-Shot Generation and More | ![Sample Result](./images/film-storyboard-1.jpeg) | We extend our heartfelt thanks to all contributors for their exceptional work in advancing the IC-LoRA ecosystem. ## Model Summary In-Context LoRA fine-tunes text-to-image models (*e.g.,* [FLUX](https://huggingface.co/black-forest-labs/FLUX.1-dev)) to generate image sets with customizable intrinsic relationships, optionally conditioned on another set using SDEdit. It can be adapted to a wide range of tasks This model hub includes In-Context LoRA models across 10 tasks. [MODEL ZOO](#model-zoo) details these models and their recommend settings. For more details on how these models are trained, please refer to our [paper](https://arxiv.org/abs/2410.23775). ## Key Idea The core concept of IC-LoRA is to **concatenate** both condition and target images into a single composite image while using **Natural Language** to define the task. This approach enables seamless adaptation to a wide range of applications. ## Features - **Task-Agnostic Framework**: IC-LoRA serves as a general framework, but it requires task-specific fine-tuning for diverse applications. - **Customizable Image-Set Generation**: You can fine-tune text-to-image models to **generate image sets** with customizable intrinsic relationships. - **Condition on Image-Set**: You can also **condition the generation of a set of images on another set of images**, enabling a wide range of controllable generation applications. For more detailed information and examples, please read our [Paper](https://arxiv.org/abs/2410.23775) or visit our [Project Page](https://ali-vilab.github.io/In-Context-LoRA-Page/). ## MODEL ZOO Below lists 10 In-Context LoRA models and their recommend settings. | Task | Model | Recommend Settings | Example Prompt | |---------------|-------------------|---------------------|---------------------------| | **1. Couple Profile Design** | [`couple-profile.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/couple-profile.safetensors) | `width: 2048, height: 1024` | `This two-part image portrays a couple of cartoon cats in detective attire; [LEFT] a black cat in a trench coat and fedora holds a magnifying glass and peers to the right, while [RIGHT] a white cat with a bow tie and matching hat raises an eyebrow in curiosity, creating a fun, noir-inspired scene against a dimly lit background.` | | **2. Film Storyboard** | [`film-storyboard.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/storyboard.safetensors) | `width: 1024, height: 1536` | `[MOVIE-SHOTS] In a vibrant festival, [SCENE-1] we find <Leo>, a shy boy, standing at the edge of a bustling carnival, eyes wide with awe at the colorful rides and laughter, [SCENE-2] transitioning to him reluctantly trying a daring game, his friends cheering him on, [SCENE-3] culminating in a triumphant moment as he wins a giant stuffed bear, his face beaming with pride as he holds it up for all to see.` | | **3. Font Design** | [`font-design.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/font-design.safetensors) | `width: 1792, height: 1216` | `The four-panel image showcases a playful bubble font in a vibrant pop-art style. [TOP-LEFT] displays "Pop Candy" in bright pink with a polka dot background; [TOP-RIGHT] shows "Sweet Treat" in purple, surrounded by candy illustrations; [BOTTOM-LEFT] has "Yum!" in a mix of bright colors; [BOTTOM-RIGHT] shows "Delicious" against a striped background, perfect for fun, kid-friendly products.` | | **4. Home Decoration** | [`home-decoration.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/home-decoration.safetensors) | `width: 1344, height: 1728` | `This four-panel image showcases a rustic living room with warm wood tones and cozy decor elements; [TOP-LEFT] features a large stone fireplace with wooden shelves filled with books and candles; [TOP-RIGHT] shows a vintage leather sofa draped in plaid blankets, complemented by a mix of textured cushions; [BOTTOM-LEFT] displays a corner with a wooden armchair beside a side table holding a steaming mug and a classic book; [BOTTOM-RIGHT] captures a cozy reading nook with a window seat, a soft fur throw, and decorative logs stacked neatly.` | | **5. Portrait Illustration** | [`portrait-illustration.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/portrait-illustration.safetensors) | `width: 1152, height: 1088` | `This two-panel image presents a transformation from a realistic portrait to a playful illustration, capturing both detail and artistic flair; [LEFT] the photograph shows a woman standing in a bustling marketplace, wearing a wide-brimmed hat, a flowing bohemian dress, and a leather crossbody bag; [RIGHT] the illustration panel exaggerates her accessories and features, with the bohemian dress depicted in vibrant patterns and bold colors, while the background is simplified into abstract market stalls, giving the scene an animated and lively feel.` | | **6. Portrait Photography** | [`portrait-photography.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/portrait-photography.safetensors) | `width: 1344, height: 1728` | `This [FOUR-PANEL] image illustrates a young artist's creative process in a bright and inspiring studio; [TOP-LEFT] she stands before a large canvas, brush in hand, adding vibrant colors to a partially completed painting, [TOP-RIGHT] she sits at a cluttered wooden table, sketching ideas in a notebook with various art supplies scattered around, [BOTTOM-LEFT] she takes a moment to step back and observe her work, adjusting her glasses thoughtfully, and [BOTTOM-RIGHT] she experiments with different textures by mixing paints directly on the palette, her focused expression showcasing her dedication to her craft.` | | **7. PPT Template** | [`ppt-templates.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/ppt-templates.safetensors) | `width: 1984, height: 1152` | `This four-panel image showcases a rustic-themed PowerPoint template for a culinary workshop; [TOP-LEFT] introduces "Farm to Table Cooking" in warm, earthy tones; [TOP-RIGHT] organizes workshop sections like "Ingredients," "Preparation," and "Serving"; [BOTTOM-LEFT] displays ingredient lists for seasonal produce; [BOTTOM-RIGHT] includes chef profiles with short bios.` | | **8. Sandstorm Visual Effect** | [`sandstorm-visual-effect.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/sandstorm-visual-effect.safetensors) | `width: 1408, height: 1600` | `[SANDSTORM-PSA] This two-part image showcases the transformation of a cyclist through a sandstorm visual effect; [TOP] the upper panel features a cyclist in vibrant gear pedaling steadily on a clear, open road with a serene sky in the background, highlighting focus and determination, [BOTTOM] the lower panel transforms the scene as the cyclist becomes enveloped in a fierce sandstorm, with sand particles swirling intensely around the bike and rider against a stormy, darkened backdrop, emphasizing chaos and power.` | | **9. Sparklers Visual Effect** | [`sparklers-visual-effect.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/sparklers-visual-effect.safetensors) | `width: 960, height: 1088` | `[REAL-SPARKLERS-OVERLAYS] The two-part image vividly illustrates a woodland proposal transformed by sparkler overlays; [TOP] the first panel depicts a man kneeling on one knee with an engagement ring before his partner in a forest clearing at dusk, with warm, natural lighting, [BOTTOM] while the second panel introduces glowing sparklers that form a heart shape around the couple, amplifying the romance and joy of the moment.` | | **10. Visual Identity Design** | [`visual-identity-design.safetensors`](https://huggingface.co/ali-vilab/In-Context-LoRA/blob/main/visual-identity-design.safetensors) | `width: 1472, height: 1024` | `The two-panel image showcases the joyful identity of a produce brand, with the left panel showing a smiling pineapple graphic and the brand name “Fresh Tropic” in a fun, casual font on a light aqua background; [LEFT] while the right panel translates the design onto a reusable shopping tote with the pineapple logo in black, held by a person in a market setting, emphasizing the brand’s approachable and eco-friendly vibe.` | ## LICENSE This model hub uses FLUX as the base model. Users must comply with FLUX's license when using this code. Please refer to [FLUX's License](https://github.com/black-forest-labs/flux/tree/main/model_licenses) for more details. ## Citation If you find this work useful in your research, please consider citing: ```bibtex @article{lhhuang2024iclora, title={In-Context LoRA for Diffusion Transformers}, author={Huang, Lianghua and Wang, Wei and Wu, Zhi-Fan and Shi, Yupeng and Dou, Huanzhang and Liang, Chen and Feng, Yutong and Liu, Yu and Zhou, Jingren}, journal={arXiv preprint arxiv:2410.23775}, year={2024} } ``` ```bibtex @article{lhhuang2024iclora, title={Group Diffusion Transformers are Unsupervised Multitask Learners}, author={Huang, Lianghua and Wang, Wei and Wu, Zhi-Fan and Dou, Huanzhang and Shi, Yupeng and Feng, Yutong and Liang, Chen and Liu, Yu and Zhou, Jingren}, journal={arXiv preprint arxiv:2410.15027}, year={2024} } ``` ## Download model Weights for these models are available in Safetensors format. [Download](/ali-vilab/In-Context-LoRA/tree/main) them in the Files & versions tab.
[ "BEAR", "CRAFT" ]
avsolatorio/GIST-large-Embedding-v0
avsolatorio
sentence-similarity
[ "sentence-transformers", "safetensors", "bert", "feature-extraction", "mteb", "sentence-similarity", "en", "arxiv:2402.16829", "arxiv:2212.09741", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-02-14T18:26:25Z"
2024-02-28T00:34:23+00:00
112,672
14
--- language: - en library_name: sentence-transformers license: mit pipeline_tag: sentence-similarity tags: - feature-extraction - mteb - sentence-similarity - sentence-transformers model-index: - name: GIST-large-Embedding-v0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.5820895522388 - type: ap value: 38.32190121241783 - type: f1 value: 69.44777155231054 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.40514999999998 - type: ap value: 90.2011565132406 - type: f1 value: 93.39486246843605 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 49.05999999999999 - type: f1 value: 48.58702718571088 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 38.407000000000004 - type: map_at_10 value: 54.822 - type: map_at_100 value: 55.387 - type: map_at_1000 value: 55.388999999999996 - type: map_at_3 value: 50.308 - type: map_at_5 value: 53.199 - type: mrr_at_1 value: 39.900000000000006 - type: mrr_at_10 value: 55.385 - type: mrr_at_100 value: 55.936 - type: mrr_at_1000 value: 55.93900000000001 - type: mrr_at_3 value: 50.853 - type: mrr_at_5 value: 53.738 - type: ndcg_at_1 value: 38.407000000000004 - type: ndcg_at_10 value: 63.38 - type: ndcg_at_100 value: 65.52900000000001 - type: ndcg_at_1000 value: 65.58800000000001 - type: ndcg_at_3 value: 54.26 - type: ndcg_at_5 value: 59.488 - type: precision_at_1 value: 38.407000000000004 - type: precision_at_10 value: 9.04 - type: precision_at_100 value: 0.992 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 21.906 - type: precision_at_5 value: 15.690000000000001 - type: recall_at_1 value: 38.407000000000004 - type: recall_at_10 value: 90.398 - type: recall_at_100 value: 99.21799999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 65.718 - type: recall_at_5 value: 78.45 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.49766333679089 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.57731111438094 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.70120072857361 - type: mrr value: 77.86714593501297 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 90.73821860690765 - type: cos_sim_spearman value: 89.17070651383446 - type: euclidean_pearson value: 88.28303958293029 - type: euclidean_spearman value: 88.81889126856979 - type: manhattan_pearson value: 88.09080621828731 - type: manhattan_spearman value: 88.55924679817751 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 88.10064935064933 - type: f1 value: 88.08460758973867 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.338228337929976 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.179156232378226 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 33.440999999999995 - type: map_at_10 value: 45.495000000000005 - type: map_at_100 value: 47.132000000000005 - type: map_at_1000 value: 47.253 - type: map_at_3 value: 41.766 - type: map_at_5 value: 43.873 - type: mrr_at_1 value: 40.772999999999996 - type: mrr_at_10 value: 51.627 - type: mrr_at_100 value: 52.364 - type: mrr_at_1000 value: 52.397000000000006 - type: mrr_at_3 value: 48.951 - type: mrr_at_5 value: 50.746 - type: ndcg_at_1 value: 40.772999999999996 - type: ndcg_at_10 value: 52.306 - type: ndcg_at_100 value: 57.753 - type: ndcg_at_1000 value: 59.36900000000001 - type: ndcg_at_3 value: 47.177 - type: ndcg_at_5 value: 49.71 - type: precision_at_1 value: 40.772999999999996 - type: precision_at_10 value: 10.129000000000001 - type: precision_at_100 value: 1.617 - type: precision_at_1000 value: 0.208 - type: precision_at_3 value: 22.985 - type: precision_at_5 value: 16.652 - type: recall_at_1 value: 33.440999999999995 - type: recall_at_10 value: 65.121 - type: recall_at_100 value: 87.55199999999999 - type: recall_at_1000 value: 97.41300000000001 - type: recall_at_3 value: 49.958999999999996 - type: recall_at_5 value: 57.14900000000001 - type: map_at_1 value: 32.126 - type: map_at_10 value: 42.856 - type: map_at_100 value: 44.134 - type: map_at_1000 value: 44.274 - type: map_at_3 value: 39.594 - type: map_at_5 value: 41.504999999999995 - type: mrr_at_1 value: 40.127 - type: mrr_at_10 value: 48.736000000000004 - type: mrr_at_100 value: 49.303999999999995 - type: mrr_at_1000 value: 49.356 - type: mrr_at_3 value: 46.263 - type: mrr_at_5 value: 47.878 - type: ndcg_at_1 value: 40.127 - type: ndcg_at_10 value: 48.695 - type: ndcg_at_100 value: 52.846000000000004 - type: ndcg_at_1000 value: 54.964 - type: ndcg_at_3 value: 44.275 - type: ndcg_at_5 value: 46.54 - type: precision_at_1 value: 40.127 - type: precision_at_10 value: 9.229 - type: precision_at_100 value: 1.473 - type: precision_at_1000 value: 0.19499999999999998 - type: precision_at_3 value: 21.444 - type: precision_at_5 value: 15.389 - type: recall_at_1 value: 32.126 - type: recall_at_10 value: 58.971 - type: recall_at_100 value: 76.115 - type: recall_at_1000 value: 89.556 - type: recall_at_3 value: 45.891 - type: recall_at_5 value: 52.242 - type: map_at_1 value: 41.312 - type: map_at_10 value: 54.510000000000005 - type: map_at_100 value: 55.544000000000004 - type: map_at_1000 value: 55.593 - type: map_at_3 value: 50.859 - type: map_at_5 value: 52.839999999999996 - type: mrr_at_1 value: 47.147 - type: mrr_at_10 value: 57.678 - type: mrr_at_100 value: 58.287 - type: mrr_at_1000 value: 58.312 - type: mrr_at_3 value: 55.025999999999996 - type: mrr_at_5 value: 56.55 - type: ndcg_at_1 value: 47.147 - type: ndcg_at_10 value: 60.672000000000004 - type: ndcg_at_100 value: 64.411 - type: ndcg_at_1000 value: 65.35499999999999 - type: ndcg_at_3 value: 54.643 - type: ndcg_at_5 value: 57.461 - type: precision_at_1 value: 47.147 - type: precision_at_10 value: 9.881 - type: precision_at_100 value: 1.27 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 24.556 - type: precision_at_5 value: 16.814999999999998 - type: recall_at_1 value: 41.312 - type: recall_at_10 value: 75.62299999999999 - type: recall_at_100 value: 91.388 - type: recall_at_1000 value: 98.08 - type: recall_at_3 value: 59.40299999999999 - type: recall_at_5 value: 66.43900000000001 - type: map_at_1 value: 27.609 - type: map_at_10 value: 37.614 - type: map_at_100 value: 38.584 - type: map_at_1000 value: 38.652 - type: map_at_3 value: 34.731 - type: map_at_5 value: 36.308 - type: mrr_at_1 value: 29.944 - type: mrr_at_10 value: 39.829 - type: mrr_at_100 value: 40.659 - type: mrr_at_1000 value: 40.709 - type: mrr_at_3 value: 37.269000000000005 - type: mrr_at_5 value: 38.625 - type: ndcg_at_1 value: 29.944 - type: ndcg_at_10 value: 43.082 - type: ndcg_at_100 value: 47.857 - type: ndcg_at_1000 value: 49.612 - type: ndcg_at_3 value: 37.578 - type: ndcg_at_5 value: 40.135 - type: precision_at_1 value: 29.944 - type: precision_at_10 value: 6.678000000000001 - type: precision_at_100 value: 0.951 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 16.045 - type: precision_at_5 value: 11.073 - type: recall_at_1 value: 27.609 - type: recall_at_10 value: 57.718 - type: recall_at_100 value: 79.768 - type: recall_at_1000 value: 92.868 - type: recall_at_3 value: 42.876 - type: recall_at_5 value: 49.104 - type: map_at_1 value: 18.071 - type: map_at_10 value: 27.471 - type: map_at_100 value: 28.71 - type: map_at_1000 value: 28.833 - type: map_at_3 value: 24.698 - type: map_at_5 value: 26.461000000000002 - type: mrr_at_1 value: 22.387999999999998 - type: mrr_at_10 value: 32.522 - type: mrr_at_100 value: 33.393 - type: mrr_at_1000 value: 33.455 - type: mrr_at_3 value: 29.830000000000002 - type: mrr_at_5 value: 31.472 - type: ndcg_at_1 value: 22.387999999999998 - type: ndcg_at_10 value: 33.278999999999996 - type: ndcg_at_100 value: 39.043 - type: ndcg_at_1000 value: 41.763 - type: ndcg_at_3 value: 28.310999999999996 - type: ndcg_at_5 value: 31.007 - type: precision_at_1 value: 22.387999999999998 - type: precision_at_10 value: 6.157 - type: precision_at_100 value: 1.042 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_3 value: 13.972000000000001 - type: precision_at_5 value: 10.274 - type: recall_at_1 value: 18.071 - type: recall_at_10 value: 46.025 - type: recall_at_100 value: 71.153 - type: recall_at_1000 value: 90.232 - type: recall_at_3 value: 32.311 - type: recall_at_5 value: 39.296 - type: map_at_1 value: 30.813000000000002 - type: map_at_10 value: 42.594 - type: map_at_100 value: 43.949 - type: map_at_1000 value: 44.052 - type: map_at_3 value: 39.1 - type: map_at_5 value: 41.111 - type: mrr_at_1 value: 37.824999999999996 - type: mrr_at_10 value: 48.06 - type: mrr_at_100 value: 48.91 - type: mrr_at_1000 value: 48.946 - type: mrr_at_3 value: 45.509 - type: mrr_at_5 value: 47.073 - type: ndcg_at_1 value: 37.824999999999996 - type: ndcg_at_10 value: 48.882 - type: ndcg_at_100 value: 54.330999999999996 - type: ndcg_at_1000 value: 56.120999999999995 - type: ndcg_at_3 value: 43.529 - type: ndcg_at_5 value: 46.217999999999996 - type: precision_at_1 value: 37.824999999999996 - type: precision_at_10 value: 8.845 - type: precision_at_100 value: 1.34 - type: precision_at_1000 value: 0.168 - type: precision_at_3 value: 20.757 - type: precision_at_5 value: 14.802999999999999 - type: recall_at_1 value: 30.813000000000002 - type: recall_at_10 value: 61.895999999999994 - type: recall_at_100 value: 84.513 - type: recall_at_1000 value: 95.817 - type: recall_at_3 value: 47.099000000000004 - type: recall_at_5 value: 54.031 - type: map_at_1 value: 25.735999999999997 - type: map_at_10 value: 36.799 - type: map_at_100 value: 38.246 - type: map_at_1000 value: 38.353 - type: map_at_3 value: 33.133 - type: map_at_5 value: 34.954 - type: mrr_at_1 value: 31.849 - type: mrr_at_10 value: 41.928 - type: mrr_at_100 value: 42.846000000000004 - type: mrr_at_1000 value: 42.894 - type: mrr_at_3 value: 39.117000000000004 - type: mrr_at_5 value: 40.521 - type: ndcg_at_1 value: 31.849 - type: ndcg_at_10 value: 43.143 - type: ndcg_at_100 value: 48.963 - type: ndcg_at_1000 value: 51.041000000000004 - type: ndcg_at_3 value: 37.218 - type: ndcg_at_5 value: 39.542 - type: precision_at_1 value: 31.849 - type: precision_at_10 value: 8.231 - type: precision_at_100 value: 1.277 - type: precision_at_1000 value: 0.164 - type: precision_at_3 value: 18.037 - type: precision_at_5 value: 12.945 - type: recall_at_1 value: 25.735999999999997 - type: recall_at_10 value: 56.735 - type: recall_at_100 value: 81.04 - type: recall_at_1000 value: 94.845 - type: recall_at_3 value: 40.239999999999995 - type: recall_at_5 value: 46.378 - type: map_at_1 value: 27.580333333333336 - type: map_at_10 value: 37.70558333333334 - type: map_at_100 value: 38.94941666666667 - type: map_at_1000 value: 39.062083333333334 - type: map_at_3 value: 34.63333333333334 - type: map_at_5 value: 36.35241666666666 - type: mrr_at_1 value: 32.64866666666667 - type: mrr_at_10 value: 42.018499999999996 - type: mrr_at_100 value: 42.83391666666666 - type: mrr_at_1000 value: 42.884166666666665 - type: mrr_at_3 value: 39.476499999999994 - type: mrr_at_5 value: 40.96983333333334 - type: ndcg_at_1 value: 32.64866666666667 - type: ndcg_at_10 value: 43.43866666666667 - type: ndcg_at_100 value: 48.569833333333335 - type: ndcg_at_1000 value: 50.6495 - type: ndcg_at_3 value: 38.327166666666656 - type: ndcg_at_5 value: 40.76941666666667 - type: precision_at_1 value: 32.64866666666667 - type: precision_at_10 value: 7.652333333333332 - type: precision_at_100 value: 1.2066666666666666 - type: precision_at_1000 value: 0.15841666666666668 - type: precision_at_3 value: 17.75108333333333 - type: precision_at_5 value: 12.641916666666669 - type: recall_at_1 value: 27.580333333333336 - type: recall_at_10 value: 56.02591666666667 - type: recall_at_100 value: 78.317 - type: recall_at_1000 value: 92.52608333333332 - type: recall_at_3 value: 41.84283333333333 - type: recall_at_5 value: 48.105666666666664 - type: map_at_1 value: 27.876 - type: map_at_10 value: 34.521 - type: map_at_100 value: 35.581 - type: map_at_1000 value: 35.674 - type: map_at_3 value: 32.501000000000005 - type: map_at_5 value: 33.602 - type: mrr_at_1 value: 31.441999999999997 - type: mrr_at_10 value: 37.669999999999995 - type: mrr_at_100 value: 38.523 - type: mrr_at_1000 value: 38.59 - type: mrr_at_3 value: 35.762 - type: mrr_at_5 value: 36.812 - type: ndcg_at_1 value: 31.441999999999997 - type: ndcg_at_10 value: 38.46 - type: ndcg_at_100 value: 43.479 - type: ndcg_at_1000 value: 45.858 - type: ndcg_at_3 value: 34.668 - type: ndcg_at_5 value: 36.416 - type: precision_at_1 value: 31.441999999999997 - type: precision_at_10 value: 5.782 - type: precision_at_100 value: 0.91 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 14.417 - type: precision_at_5 value: 9.876999999999999 - type: recall_at_1 value: 27.876 - type: recall_at_10 value: 47.556 - type: recall_at_100 value: 70.39699999999999 - type: recall_at_1000 value: 87.969 - type: recall_at_3 value: 37.226 - type: recall_at_5 value: 41.43 - type: map_at_1 value: 18.854000000000003 - type: map_at_10 value: 26.632 - type: map_at_100 value: 27.849 - type: map_at_1000 value: 27.977 - type: map_at_3 value: 24.089 - type: map_at_5 value: 25.477 - type: mrr_at_1 value: 22.987 - type: mrr_at_10 value: 30.781999999999996 - type: mrr_at_100 value: 31.746000000000002 - type: mrr_at_1000 value: 31.818 - type: mrr_at_3 value: 28.43 - type: mrr_at_5 value: 29.791 - type: ndcg_at_1 value: 22.987 - type: ndcg_at_10 value: 31.585 - type: ndcg_at_100 value: 37.32 - type: ndcg_at_1000 value: 40.072 - type: ndcg_at_3 value: 27.058 - type: ndcg_at_5 value: 29.137999999999998 - type: precision_at_1 value: 22.987 - type: precision_at_10 value: 5.76 - type: precision_at_100 value: 1.018 - type: precision_at_1000 value: 0.14400000000000002 - type: precision_at_3 value: 12.767000000000001 - type: precision_at_5 value: 9.257 - type: recall_at_1 value: 18.854000000000003 - type: recall_at_10 value: 42.349 - type: recall_at_100 value: 68.15299999999999 - type: recall_at_1000 value: 87.44 - type: recall_at_3 value: 29.715999999999998 - type: recall_at_5 value: 35.085 - type: map_at_1 value: 28.094 - type: map_at_10 value: 38.22 - type: map_at_100 value: 39.352 - type: map_at_1000 value: 39.452 - type: map_at_3 value: 35.339 - type: map_at_5 value: 36.78 - type: mrr_at_1 value: 33.022 - type: mrr_at_10 value: 42.466 - type: mrr_at_100 value: 43.3 - type: mrr_at_1000 value: 43.356 - type: mrr_at_3 value: 40.159 - type: mrr_at_5 value: 41.272999999999996 - type: ndcg_at_1 value: 33.022 - type: ndcg_at_10 value: 43.976 - type: ndcg_at_100 value: 49.008 - type: ndcg_at_1000 value: 51.154999999999994 - type: ndcg_at_3 value: 38.891 - type: ndcg_at_5 value: 40.897 - type: precision_at_1 value: 33.022 - type: precision_at_10 value: 7.396999999999999 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.14200000000000002 - type: precision_at_3 value: 17.724 - type: precision_at_5 value: 12.239 - type: recall_at_1 value: 28.094 - type: recall_at_10 value: 57.162 - type: recall_at_100 value: 78.636 - type: recall_at_1000 value: 93.376 - type: recall_at_3 value: 43.328 - type: recall_at_5 value: 48.252 - type: map_at_1 value: 24.937 - type: map_at_10 value: 34.82 - type: map_at_100 value: 36.405 - type: map_at_1000 value: 36.626 - type: map_at_3 value: 31.548 - type: map_at_5 value: 33.355000000000004 - type: mrr_at_1 value: 30.435000000000002 - type: mrr_at_10 value: 39.946 - type: mrr_at_100 value: 40.873 - type: mrr_at_1000 value: 40.910000000000004 - type: mrr_at_3 value: 37.088 - type: mrr_at_5 value: 38.808 - type: ndcg_at_1 value: 30.435000000000002 - type: ndcg_at_10 value: 41.25 - type: ndcg_at_100 value: 47.229 - type: ndcg_at_1000 value: 49.395 - type: ndcg_at_3 value: 35.801 - type: ndcg_at_5 value: 38.457 - type: precision_at_1 value: 30.435000000000002 - type: precision_at_10 value: 8.083 - type: precision_at_100 value: 1.601 - type: precision_at_1000 value: 0.247 - type: precision_at_3 value: 17.061999999999998 - type: precision_at_5 value: 12.767000000000001 - type: recall_at_1 value: 24.937 - type: recall_at_10 value: 53.905 - type: recall_at_100 value: 80.607 - type: recall_at_1000 value: 93.728 - type: recall_at_3 value: 38.446000000000005 - type: recall_at_5 value: 45.188 - type: map_at_1 value: 22.095000000000002 - type: map_at_10 value: 30.935000000000002 - type: map_at_100 value: 31.907000000000004 - type: map_at_1000 value: 32.006 - type: map_at_3 value: 28.242 - type: map_at_5 value: 29.963 - type: mrr_at_1 value: 23.845 - type: mrr_at_10 value: 32.978 - type: mrr_at_100 value: 33.802 - type: mrr_at_1000 value: 33.867000000000004 - type: mrr_at_3 value: 30.314000000000004 - type: mrr_at_5 value: 32.089 - type: ndcg_at_1 value: 23.845 - type: ndcg_at_10 value: 35.934 - type: ndcg_at_100 value: 40.598 - type: ndcg_at_1000 value: 43.089 - type: ndcg_at_3 value: 30.776999999999997 - type: ndcg_at_5 value: 33.711999999999996 - type: precision_at_1 value: 23.845 - type: precision_at_10 value: 5.656 - type: precision_at_100 value: 0.861 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 13.247 - type: precision_at_5 value: 9.612 - type: recall_at_1 value: 22.095000000000002 - type: recall_at_10 value: 49.25 - type: recall_at_100 value: 70.482 - type: recall_at_1000 value: 88.98899999999999 - type: recall_at_3 value: 35.619 - type: recall_at_5 value: 42.674 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 14.154 - type: map_at_10 value: 24.654999999999998 - type: map_at_100 value: 26.723999999999997 - type: map_at_1000 value: 26.912000000000003 - type: map_at_3 value: 20.4 - type: map_at_5 value: 22.477 - type: mrr_at_1 value: 32.117000000000004 - type: mrr_at_10 value: 44.590999999999994 - type: mrr_at_100 value: 45.425 - type: mrr_at_1000 value: 45.456 - type: mrr_at_3 value: 41.281 - type: mrr_at_5 value: 43.219 - type: ndcg_at_1 value: 32.117000000000004 - type: ndcg_at_10 value: 33.994 - type: ndcg_at_100 value: 41.438 - type: ndcg_at_1000 value: 44.611000000000004 - type: ndcg_at_3 value: 27.816000000000003 - type: ndcg_at_5 value: 29.816 - type: precision_at_1 value: 32.117000000000004 - type: precision_at_10 value: 10.756 - type: precision_at_100 value: 1.8679999999999999 - type: precision_at_1000 value: 0.246 - type: precision_at_3 value: 20.803 - type: precision_at_5 value: 15.987000000000002 - type: recall_at_1 value: 14.154 - type: recall_at_10 value: 40.489999999999995 - type: recall_at_100 value: 65.635 - type: recall_at_1000 value: 83.276 - type: recall_at_3 value: 25.241000000000003 - type: recall_at_5 value: 31.211 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.332 - type: map_at_10 value: 20.462 - type: map_at_100 value: 29.473 - type: map_at_1000 value: 31.215 - type: map_at_3 value: 14.466999999999999 - type: map_at_5 value: 16.922 - type: mrr_at_1 value: 69.5 - type: mrr_at_10 value: 77.039 - type: mrr_at_100 value: 77.265 - type: mrr_at_1000 value: 77.271 - type: mrr_at_3 value: 75.5 - type: mrr_at_5 value: 76.4 - type: ndcg_at_1 value: 57.125 - type: ndcg_at_10 value: 42.958 - type: ndcg_at_100 value: 48.396 - type: ndcg_at_1000 value: 55.897 - type: ndcg_at_3 value: 47.188 - type: ndcg_at_5 value: 44.376 - type: precision_at_1 value: 69.5 - type: precision_at_10 value: 34.5 - type: precision_at_100 value: 11.18 - type: precision_at_1000 value: 2.13 - type: precision_at_3 value: 51.083 - type: precision_at_5 value: 43.1 - type: recall_at_1 value: 9.332 - type: recall_at_10 value: 26.422 - type: recall_at_100 value: 56.098000000000006 - type: recall_at_1000 value: 79.66 - type: recall_at_3 value: 15.703 - type: recall_at_5 value: 19.644000000000002 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 54.72 - type: f1 value: 49.67819606587526 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 74.97 - type: map_at_10 value: 82.956 - type: map_at_100 value: 83.193 - type: map_at_1000 value: 83.208 - type: map_at_3 value: 81.837 - type: map_at_5 value: 82.57 - type: mrr_at_1 value: 80.783 - type: mrr_at_10 value: 87.546 - type: mrr_at_100 value: 87.627 - type: mrr_at_1000 value: 87.63 - type: mrr_at_3 value: 86.79400000000001 - type: mrr_at_5 value: 87.32799999999999 - type: ndcg_at_1 value: 80.783 - type: ndcg_at_10 value: 86.54899999999999 - type: ndcg_at_100 value: 87.355 - type: ndcg_at_1000 value: 87.629 - type: ndcg_at_3 value: 84.82 - type: ndcg_at_5 value: 85.83800000000001 - type: precision_at_1 value: 80.783 - type: precision_at_10 value: 10.327 - type: precision_at_100 value: 1.094 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 32.218 - type: precision_at_5 value: 20.012 - type: recall_at_1 value: 74.97 - type: recall_at_10 value: 93.072 - type: recall_at_100 value: 96.218 - type: recall_at_1000 value: 97.991 - type: recall_at_3 value: 88.357 - type: recall_at_5 value: 90.983 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 21.12 - type: map_at_10 value: 35.908 - type: map_at_100 value: 37.895 - type: map_at_1000 value: 38.068000000000005 - type: map_at_3 value: 31.189 - type: map_at_5 value: 33.908 - type: mrr_at_1 value: 42.901 - type: mrr_at_10 value: 52.578 - type: mrr_at_100 value: 53.308 - type: mrr_at_1000 value: 53.342 - type: mrr_at_3 value: 50.385999999999996 - type: mrr_at_5 value: 51.62799999999999 - type: ndcg_at_1 value: 42.901 - type: ndcg_at_10 value: 44.302 - type: ndcg_at_100 value: 51.132999999999996 - type: ndcg_at_1000 value: 53.848 - type: ndcg_at_3 value: 40.464 - type: ndcg_at_5 value: 41.743 - type: precision_at_1 value: 42.901 - type: precision_at_10 value: 12.423 - type: precision_at_100 value: 1.968 - type: precision_at_1000 value: 0.246 - type: precision_at_3 value: 27.622999999999998 - type: precision_at_5 value: 20.278 - type: recall_at_1 value: 21.12 - type: recall_at_10 value: 52.091 - type: recall_at_100 value: 77.062 - type: recall_at_1000 value: 93.082 - type: recall_at_3 value: 37.223 - type: recall_at_5 value: 43.826 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 38.940000000000005 - type: map_at_10 value: 62.239999999999995 - type: map_at_100 value: 63.141000000000005 - type: map_at_1000 value: 63.205999999999996 - type: map_at_3 value: 58.738 - type: map_at_5 value: 60.924 - type: mrr_at_1 value: 77.88000000000001 - type: mrr_at_10 value: 83.7 - type: mrr_at_100 value: 83.882 - type: mrr_at_1000 value: 83.889 - type: mrr_at_3 value: 82.748 - type: mrr_at_5 value: 83.381 - type: ndcg_at_1 value: 77.88000000000001 - type: ndcg_at_10 value: 70.462 - type: ndcg_at_100 value: 73.564 - type: ndcg_at_1000 value: 74.78099999999999 - type: ndcg_at_3 value: 65.524 - type: ndcg_at_5 value: 68.282 - type: precision_at_1 value: 77.88000000000001 - type: precision_at_10 value: 14.81 - type: precision_at_100 value: 1.7229999999999999 - type: precision_at_1000 value: 0.188 - type: precision_at_3 value: 42.083999999999996 - type: precision_at_5 value: 27.43 - type: recall_at_1 value: 38.940000000000005 - type: recall_at_10 value: 74.051 - type: recall_at_100 value: 86.158 - type: recall_at_1000 value: 94.146 - type: recall_at_3 value: 63.126000000000005 - type: recall_at_5 value: 68.575 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 91.23440000000001 - type: ap value: 87.33490392265892 - type: f1 value: 91.21374626021836 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.137999999999998 - type: map_at_10 value: 34.471000000000004 - type: map_at_100 value: 35.634 - type: map_at_1000 value: 35.685 - type: map_at_3 value: 30.587999999999997 - type: map_at_5 value: 32.812999999999995 - type: mrr_at_1 value: 22.736 - type: mrr_at_10 value: 35.092 - type: mrr_at_100 value: 36.193999999999996 - type: mrr_at_1000 value: 36.238 - type: mrr_at_3 value: 31.28 - type: mrr_at_5 value: 33.498 - type: ndcg_at_1 value: 22.736 - type: ndcg_at_10 value: 41.388999999999996 - type: ndcg_at_100 value: 46.967999999999996 - type: ndcg_at_1000 value: 48.178 - type: ndcg_at_3 value: 33.503 - type: ndcg_at_5 value: 37.484 - type: precision_at_1 value: 22.736 - type: precision_at_10 value: 6.54 - type: precision_at_100 value: 0.9339999999999999 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.249999999999998 - type: precision_at_5 value: 10.562000000000001 - type: recall_at_1 value: 22.137999999999998 - type: recall_at_10 value: 62.629999999999995 - type: recall_at_100 value: 88.375 - type: recall_at_1000 value: 97.529 - type: recall_at_3 value: 41.245 - type: recall_at_5 value: 50.808 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.25079799361606 - type: f1 value: 95.00726023695032 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 78.23757409940721 - type: f1 value: 58.534958803195714 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.20040349697378 - type: f1 value: 74.31261149784696 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.35104236718227 - type: f1 value: 79.7373049864316 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.478828180753126 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.25696147904426 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 32.82488548405117 - type: mrr value: 34.066706809031096 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.557 - type: map_at_10 value: 15.055 - type: map_at_100 value: 19.575 - type: map_at_1000 value: 21.267 - type: map_at_3 value: 10.86 - type: map_at_5 value: 12.83 - type: mrr_at_1 value: 50.464 - type: mrr_at_10 value: 59.050999999999995 - type: mrr_at_100 value: 59.436 - type: mrr_at_1000 value: 59.476 - type: mrr_at_3 value: 56.811 - type: mrr_at_5 value: 58.08 - type: ndcg_at_1 value: 47.988 - type: ndcg_at_10 value: 38.645 - type: ndcg_at_100 value: 36.339 - type: ndcg_at_1000 value: 45.279 - type: ndcg_at_3 value: 43.35 - type: ndcg_at_5 value: 41.564 - type: precision_at_1 value: 49.845 - type: precision_at_10 value: 28.544999999999998 - type: precision_at_100 value: 9.322 - type: precision_at_1000 value: 2.258 - type: precision_at_3 value: 40.144000000000005 - type: precision_at_5 value: 35.913000000000004 - type: recall_at_1 value: 6.557 - type: recall_at_10 value: 19.5 - type: recall_at_100 value: 37.153999999999996 - type: recall_at_1000 value: 69.581 - type: recall_at_3 value: 12.133 - type: recall_at_5 value: 15.43 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 31.740000000000002 - type: map_at_10 value: 48.150999999999996 - type: map_at_100 value: 49.125 - type: map_at_1000 value: 49.149 - type: map_at_3 value: 43.645 - type: map_at_5 value: 46.417 - type: mrr_at_1 value: 35.892 - type: mrr_at_10 value: 50.524 - type: mrr_at_100 value: 51.232 - type: mrr_at_1000 value: 51.24999999999999 - type: mrr_at_3 value: 46.852 - type: mrr_at_5 value: 49.146 - type: ndcg_at_1 value: 35.892 - type: ndcg_at_10 value: 56.08800000000001 - type: ndcg_at_100 value: 60.077000000000005 - type: ndcg_at_1000 value: 60.632 - type: ndcg_at_3 value: 47.765 - type: ndcg_at_5 value: 52.322 - type: precision_at_1 value: 35.892 - type: precision_at_10 value: 9.296 - type: precision_at_100 value: 1.154 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 21.92 - type: precision_at_5 value: 15.781999999999998 - type: recall_at_1 value: 31.740000000000002 - type: recall_at_10 value: 77.725 - type: recall_at_100 value: 94.841 - type: recall_at_1000 value: 99.003 - type: recall_at_3 value: 56.407 - type: recall_at_5 value: 66.848 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.429 - type: map_at_10 value: 85.42699999999999 - type: map_at_100 value: 86.063 - type: map_at_1000 value: 86.077 - type: map_at_3 value: 82.573 - type: map_at_5 value: 84.371 - type: mrr_at_1 value: 82.34 - type: mrr_at_10 value: 88.247 - type: mrr_at_100 value: 88.357 - type: mrr_at_1000 value: 88.357 - type: mrr_at_3 value: 87.38 - type: mrr_at_5 value: 87.981 - type: ndcg_at_1 value: 82.34 - type: ndcg_at_10 value: 88.979 - type: ndcg_at_100 value: 90.18599999999999 - type: ndcg_at_1000 value: 90.254 - type: ndcg_at_3 value: 86.378 - type: ndcg_at_5 value: 87.821 - type: precision_at_1 value: 82.34 - type: precision_at_10 value: 13.482 - type: precision_at_100 value: 1.537 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.852999999999994 - type: precision_at_5 value: 24.798000000000002 - type: recall_at_1 value: 71.429 - type: recall_at_10 value: 95.64099999999999 - type: recall_at_100 value: 99.723 - type: recall_at_1000 value: 99.98 - type: recall_at_3 value: 88.011 - type: recall_at_5 value: 92.246 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 60.62148584103299 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 63.2923987272903 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.128 - type: map_at_10 value: 14.63 - type: map_at_100 value: 17.285 - type: map_at_1000 value: 17.676 - type: map_at_3 value: 9.993 - type: map_at_5 value: 12.286999999999999 - type: mrr_at_1 value: 25.4 - type: mrr_at_10 value: 38.423 - type: mrr_at_100 value: 39.497 - type: mrr_at_1000 value: 39.531 - type: mrr_at_3 value: 34.9 - type: mrr_at_5 value: 37.01 - type: ndcg_at_1 value: 25.4 - type: ndcg_at_10 value: 24.062 - type: ndcg_at_100 value: 33.823 - type: ndcg_at_1000 value: 39.663 - type: ndcg_at_3 value: 22.246 - type: ndcg_at_5 value: 19.761 - type: precision_at_1 value: 25.4 - type: precision_at_10 value: 12.85 - type: precision_at_100 value: 2.71 - type: precision_at_1000 value: 0.41000000000000003 - type: precision_at_3 value: 21.4 - type: precision_at_5 value: 17.86 - type: recall_at_1 value: 5.128 - type: recall_at_10 value: 26.06 - type: recall_at_100 value: 54.993 - type: recall_at_1000 value: 83.165 - type: recall_at_3 value: 13.003 - type: recall_at_5 value: 18.117 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 87.5466779326323 - type: cos_sim_spearman value: 82.79782085421951 - type: euclidean_pearson value: 84.76929982677339 - type: euclidean_spearman value: 82.51802536005597 - type: manhattan_pearson value: 84.76736312526177 - type: manhattan_spearman value: 82.50799656335593 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 86.40486308108694 - type: cos_sim_spearman value: 77.12670500926937 - type: euclidean_pearson value: 85.23836845503847 - type: euclidean_spearman value: 78.41475117006176 - type: manhattan_pearson value: 85.24302039610805 - type: manhattan_spearman value: 78.4053162562707 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.83570289087565 - type: cos_sim_spearman value: 89.28563503553643 - type: euclidean_pearson value: 87.77516003996445 - type: euclidean_spearman value: 88.8656149534085 - type: manhattan_pearson value: 87.75568872417946 - type: manhattan_spearman value: 88.80445489340585 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 86.776406555485 - type: cos_sim_spearman value: 83.8288465070091 - type: euclidean_pearson value: 85.37827999808123 - type: euclidean_spearman value: 84.11079529992739 - type: manhattan_pearson value: 85.35336495689121 - type: manhattan_spearman value: 84.08618492649347 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.57644404820684 - type: cos_sim_spearman value: 89.69728364350713 - type: euclidean_pearson value: 88.28202320389443 - type: euclidean_spearman value: 88.9560567319321 - type: manhattan_pearson value: 88.29461100044172 - type: manhattan_spearman value: 88.96030920678558 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.05211938460621 - type: cos_sim_spearman value: 86.43413865667489 - type: euclidean_pearson value: 85.62760689259562 - type: euclidean_spearman value: 86.28867831982394 - type: manhattan_pearson value: 85.60828879163458 - type: manhattan_spearman value: 86.27823731462473 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.00254140466377 - type: cos_sim_spearman value: 89.66118745178284 - type: euclidean_pearson value: 89.46985446236553 - type: euclidean_spearman value: 88.92649032371526 - type: manhattan_pearson value: 89.49600028180247 - type: manhattan_spearman value: 88.86948431519099 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 68.93578321067938 - type: cos_sim_spearman value: 69.60639595839257 - type: euclidean_pearson value: 70.33485090574897 - type: euclidean_spearman value: 69.03380379185452 - type: manhattan_pearson value: 70.42097254943839 - type: manhattan_spearman value: 69.25296348304255 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.29588700755069 - type: cos_sim_spearman value: 88.30389489193672 - type: euclidean_pearson value: 87.60349838180346 - type: euclidean_spearman value: 87.91041868311692 - type: manhattan_pearson value: 87.59373630607907 - type: manhattan_spearman value: 87.88690174001724 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.8030655700857 - type: mrr value: 96.3950637234951 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 60.028000000000006 - type: map_at_10 value: 69.855 - type: map_at_100 value: 70.257 - type: map_at_1000 value: 70.283 - type: map_at_3 value: 66.769 - type: map_at_5 value: 68.679 - type: mrr_at_1 value: 62.666999999999994 - type: mrr_at_10 value: 70.717 - type: mrr_at_100 value: 71.00800000000001 - type: mrr_at_1000 value: 71.033 - type: mrr_at_3 value: 68.389 - type: mrr_at_5 value: 69.939 - type: ndcg_at_1 value: 62.666999999999994 - type: ndcg_at_10 value: 74.715 - type: ndcg_at_100 value: 76.364 - type: ndcg_at_1000 value: 76.89399999999999 - type: ndcg_at_3 value: 69.383 - type: ndcg_at_5 value: 72.322 - type: precision_at_1 value: 62.666999999999994 - type: precision_at_10 value: 10.067 - type: precision_at_100 value: 1.09 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27.111 - type: precision_at_5 value: 18.267 - type: recall_at_1 value: 60.028000000000006 - type: recall_at_10 value: 88.822 - type: recall_at_100 value: 96.167 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 74.367 - type: recall_at_5 value: 81.661 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.84554455445544 - type: cos_sim_ap value: 96.54482863244152 - type: cos_sim_f1 value: 92.13709677419355 - type: cos_sim_precision value: 92.88617886178862 - type: cos_sim_recall value: 91.4 - type: dot_accuracy value: 99.76039603960396 - type: dot_ap value: 93.20115278887057 - type: dot_f1 value: 87.92079207920793 - type: dot_precision value: 87.05882352941177 - type: dot_recall value: 88.8 - type: euclidean_accuracy value: 99.84950495049505 - type: euclidean_ap value: 96.53268343961348 - type: euclidean_f1 value: 92.23697650663942 - type: euclidean_precision value: 94.258872651357 - type: euclidean_recall value: 90.3 - type: manhattan_accuracy value: 99.85346534653465 - type: manhattan_ap value: 96.54495433438355 - type: manhattan_f1 value: 92.51012145748987 - type: manhattan_precision value: 93.64754098360656 - type: manhattan_recall value: 91.4 - type: max_accuracy value: 99.85346534653465 - type: max_ap value: 96.54495433438355 - type: max_f1 value: 92.51012145748987 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 66.46940443952006 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.396194493841584 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.881717673695555 - type: mrr value: 55.73439224174519 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.438177268254087 - type: cos_sim_spearman value: 30.96177698848688 - type: dot_pearson value: 30.513850376431435 - type: dot_spearman value: 29.932421046509706 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.21 - type: map_at_10 value: 1.727 - type: map_at_100 value: 9.881 - type: map_at_1000 value: 24.245 - type: map_at_3 value: 0.615 - type: map_at_5 value: 0.966 - type: mrr_at_1 value: 78.0 - type: mrr_at_10 value: 87.333 - type: mrr_at_100 value: 87.333 - type: mrr_at_1000 value: 87.333 - type: mrr_at_3 value: 86.333 - type: mrr_at_5 value: 87.333 - type: ndcg_at_1 value: 74.0 - type: ndcg_at_10 value: 69.12700000000001 - type: ndcg_at_100 value: 53.893 - type: ndcg_at_1000 value: 49.639 - type: ndcg_at_3 value: 74.654 - type: ndcg_at_5 value: 73.232 - type: precision_at_1 value: 78.0 - type: precision_at_10 value: 72.8 - type: precision_at_100 value: 55.42 - type: precision_at_1000 value: 21.73 - type: precision_at_3 value: 79.333 - type: precision_at_5 value: 77.2 - type: recall_at_1 value: 0.21 - type: recall_at_10 value: 1.9709999999999999 - type: recall_at_100 value: 13.555 - type: recall_at_1000 value: 46.961999999999996 - type: recall_at_3 value: 0.66 - type: recall_at_5 value: 1.052 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.456 - type: map_at_10 value: 9.426 - type: map_at_100 value: 16.066 - type: map_at_1000 value: 17.652 - type: map_at_3 value: 5.2459999999999996 - type: map_at_5 value: 6.5360000000000005 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 47.666 - type: mrr_at_100 value: 48.681999999999995 - type: mrr_at_1000 value: 48.681999999999995 - type: mrr_at_3 value: 43.878 - type: mrr_at_5 value: 46.224 - type: ndcg_at_1 value: 31.633 - type: ndcg_at_10 value: 23.454 - type: ndcg_at_100 value: 36.616 - type: ndcg_at_1000 value: 48.596000000000004 - type: ndcg_at_3 value: 28.267999999999997 - type: ndcg_at_5 value: 25.630999999999997 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 20.204 - type: precision_at_100 value: 7.754999999999999 - type: precision_at_1000 value: 1.5709999999999997 - type: precision_at_3 value: 29.252 - type: precision_at_5 value: 24.898 - type: recall_at_1 value: 2.456 - type: recall_at_10 value: 14.951 - type: recall_at_100 value: 48.399 - type: recall_at_1000 value: 85.077 - type: recall_at_3 value: 6.1370000000000005 - type: recall_at_5 value: 8.671 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.86240000000001 - type: ap value: 14.678570078747494 - type: f1 value: 55.295967793934445 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.17374080362195 - type: f1 value: 59.54410874861454 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 51.91227822485289 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.12523097097217 - type: cos_sim_ap value: 77.59606075943269 - type: cos_sim_f1 value: 71.11395646606915 - type: cos_sim_precision value: 69.07960199004975 - type: cos_sim_recall value: 73.27176781002639 - type: dot_accuracy value: 84.68736961316088 - type: dot_ap value: 68.47167450741459 - type: dot_f1 value: 64.42152354914874 - type: dot_precision value: 60.887949260042284 - type: dot_recall value: 68.3905013192612 - type: euclidean_accuracy value: 86.88084878106932 - type: euclidean_ap value: 77.27351204978599 - type: euclidean_f1 value: 70.99179716629381 - type: euclidean_precision value: 67.10526315789474 - type: euclidean_recall value: 75.35620052770449 - type: manhattan_accuracy value: 86.83316445133218 - type: manhattan_ap value: 77.21835357308716 - type: manhattan_f1 value: 71.05587004676349 - type: manhattan_precision value: 66.58210332103322 - type: manhattan_recall value: 76.17414248021109 - type: max_accuracy value: 87.12523097097217 - type: max_ap value: 77.59606075943269 - type: max_f1 value: 71.11395646606915 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.97232894787906 - type: cos_sim_ap value: 85.9613736469497 - type: cos_sim_f1 value: 78.40216655382532 - type: cos_sim_precision value: 72.97512437810946 - type: cos_sim_recall value: 84.70126270403449 - type: dot_accuracy value: 88.04866689952264 - type: dot_ap value: 83.15465089499936 - type: dot_f1 value: 76.32698287879329 - type: dot_precision value: 71.23223697378077 - type: dot_recall value: 82.20665229442562 - type: euclidean_accuracy value: 88.67543757519307 - type: euclidean_ap value: 85.4524355531532 - type: euclidean_f1 value: 77.78729106950081 - type: euclidean_precision value: 75.3009009009009 - type: euclidean_recall value: 80.44348629504158 - type: manhattan_accuracy value: 88.65991384328792 - type: manhattan_ap value: 85.43109069046837 - type: manhattan_f1 value: 77.72639551396425 - type: manhattan_precision value: 73.73402417962004 - type: manhattan_recall value: 82.17585463504774 - type: max_accuracy value: 88.97232894787906 - type: max_ap value: 85.9613736469497 - type: max_f1 value: 78.40216655382532 --- <h1 align="center">GIST Large Embedding v0</h1> *GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning* The model is fine-tuned on top of the [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) using the [MEDI dataset](https://github.com/xlang-ai/instructor-embedding.git) augmented with mined triplets from the [MTEB Classification](https://huggingface.co/mteb) training dataset (excluding data from the Amazon Polarity Classification task). The model does not require any instruction for generating embeddings. This means that queries for retrieval tasks can be directly encoded without crafting instructions. Technical paper: [GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning](https://arxiv.org/abs/2402.16829) # Data The dataset used is a compilation of the MEDI and MTEB Classification training datasets. Third-party datasets may be subject to additional terms and conditions under their associated licenses. A HuggingFace Dataset version of the compiled dataset, and the specific revision used to train the model, is available: - Dataset: [avsolatorio/medi-data-mteb_avs_triplets](https://huggingface.co/datasets/avsolatorio/medi-data-mteb_avs_triplets) - Revision: 238a0499b6e6b690cc64ea56fde8461daa8341bb The dataset contains a `task_type` key, which can be used to select only the mteb classification tasks (prefixed with `mteb_`). The **MEDI Dataset** is published in the following paper: [One Embedder, Any Task: Instruction-Finetuned Text Embeddings](https://arxiv.org/abs/2212.09741). The MTEB Benchmark results of the GIST embedding model, compared with the base model, suggest that the fine-tuning dataset has perturbed the model considerably, which resulted in significant improvements in certain tasks while adversely degrading performance in some. The retrieval performance for the TRECCOVID task is of note. The fine-tuning dataset does not contain significant knowledge about COVID-19, which could have caused the observed performance degradation. We found some evidence, detailed in the paper, that thematic coverage of the fine-tuning data can affect downstream performance. # Usage The model can be easily loaded using the Sentence Transformers library. ```Python import torch.nn.functional as F from sentence_transformers import SentenceTransformer revision = None # Replace with the specific revision to ensure reproducibility if the model is updated. model = SentenceTransformer("avsolatorio/GIST-large-Embedding-v0", revision=revision) texts = [ "Illustration of the REaLTabFormer model. The left block shows the non-relational tabular data model using GPT-2 with a causal LM head. In contrast, the right block shows how a relational dataset's child table is modeled using a sequence-to-sequence (Seq2Seq) model. The Seq2Seq model uses the observations in the parent table to condition the generation of the observations in the child table. The trained GPT-2 model on the parent table, with weights frozen, is also used as the encoder in the Seq2Seq model.", "Predicting human mobility holds significant practical value, with applications ranging from enhancing disaster risk planning to simulating epidemic spread. In this paper, we present the GeoFormer, a decoder-only transformer model adapted from the GPT architecture to forecast human mobility.", "As the economies of Southeast Asia continue adopting digital technologies, policy makers increasingly ask how to prepare the workforce for emerging labor demands. However, little is known about the skills that workers need to adapt to these changes" ] # Compute embeddings embeddings = model.encode(texts, convert_to_tensor=True) # Compute cosine-similarity for each pair of sentences scores = F.cosine_similarity(embeddings.unsqueeze(1), embeddings.unsqueeze(0), dim=-1) print(scores.cpu().numpy()) ``` # Training Parameters Below are the training parameters used to fine-tune the model: ``` Epochs = 40 Warmup ratio = 0.1 Learning rate = 5e-6 Batch size = 16 Checkpoint step = 171000 Contrastive loss temperature = 0.01 ``` # Evaluation The model was evaluated using the [MTEB Evaluation](https://huggingface.co/mteb) suite. # Citation Please cite our work if you use GISTEmbed or the datasets we published in your projects or research. 🤗 ``` @article{solatorio2024gistembed, title={GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning}, author={Aivin V. Solatorio}, journal={arXiv preprint arXiv:2402.16829}, year={2024}, URL={https://arxiv.org/abs/2402.16829} eprint={2402.16829}, archivePrefix={arXiv}, primaryClass={cs.LG} } ``` # Acknowledgements This work is supported by the "KCP IV - Exploring Data Use in the Development Economics Literature using Large Language Models (AI and LLMs)" project funded by the [Knowledge for Change Program (KCP)](https://www.worldbank.org/en/programs/knowledge-for-change) of the World Bank - RA-P503405-RESE-TF0C3444. The findings, interpretations, and conclusions expressed in this material are entirely those of the authors. They do not necessarily represent the views of the International Bank for Reconstruction and Development/World Bank and its affiliated organizations, or those of the Executive Directors of the World Bank or the governments they represent.
[ "BIOSSES", "SCIFACT" ]
Alibaba-NLP/gte-Qwen2-7B-instruct
Alibaba-NLP
sentence-similarity
[ "sentence-transformers", "safetensors", "qwen2", "text-generation", "mteb", "transformers", "Qwen2", "sentence-similarity", "custom_code", "arxiv:2308.03281", "license:apache-2.0", "model-index", "autotrain_compatible", "text-generation-inference", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-06-15T11:24:21Z"
2025-01-11T08:10:51+00:00
110,385
348
--- license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2 - sentence-similarity model-index: - name: gte-qwen2-7B-instruct results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 91.31343283582089 - type: ap value: 67.64251402604096 - type: f1 value: 87.53372530755692 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 97.497825 - type: ap value: 96.30329547047529 - type: f1 value: 97.49769793778039 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 62.564 - type: f1 value: 60.975777935041066 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 36.486000000000004 - type: map_at_10 value: 54.842 - type: map_at_100 value: 55.206999999999994 - type: map_at_1000 value: 55.206999999999994 - type: map_at_3 value: 49.893 - type: map_at_5 value: 53.105000000000004 - type: mrr_at_1 value: 37.34 - type: mrr_at_10 value: 55.143 - type: mrr_at_100 value: 55.509 - type: mrr_at_1000 value: 55.509 - type: mrr_at_3 value: 50.212999999999994 - type: mrr_at_5 value: 53.432 - type: ndcg_at_1 value: 36.486000000000004 - type: ndcg_at_10 value: 64.273 - type: ndcg_at_100 value: 65.66199999999999 - type: ndcg_at_1000 value: 65.66199999999999 - type: ndcg_at_3 value: 54.352999999999994 - type: ndcg_at_5 value: 60.131 - type: precision_at_1 value: 36.486000000000004 - type: precision_at_10 value: 9.395000000000001 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.428 - type: precision_at_5 value: 16.259 - type: recall_at_1 value: 36.486000000000004 - type: recall_at_10 value: 93.95400000000001 - type: recall_at_100 value: 99.644 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 67.283 - type: recall_at_5 value: 81.294 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 56.461169803700564 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 51.73600434466286 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 67.57827065898053 - type: mrr value: 79.08136569493911 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 83.53324575999243 - type: cos_sim_spearman value: 81.37173362822374 - type: euclidean_pearson value: 82.19243335103444 - type: euclidean_spearman value: 81.33679307304334 - type: manhattan_pearson value: 82.38752665975699 - type: manhattan_spearman value: 81.31510583189689 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 87.56818181818181 - type: f1 value: 87.25826722019875 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 50.09239610327673 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 46.64733054606282 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 33.997 - type: map_at_10 value: 48.176 - type: map_at_100 value: 49.82 - type: map_at_1000 value: 49.924 - type: map_at_3 value: 43.626 - type: map_at_5 value: 46.275 - type: mrr_at_1 value: 42.059999999999995 - type: mrr_at_10 value: 53.726 - type: mrr_at_100 value: 54.398 - type: mrr_at_1000 value: 54.416 - type: mrr_at_3 value: 50.714999999999996 - type: mrr_at_5 value: 52.639 - type: ndcg_at_1 value: 42.059999999999995 - type: ndcg_at_10 value: 55.574999999999996 - type: ndcg_at_100 value: 60.744 - type: ndcg_at_1000 value: 61.85699999999999 - type: ndcg_at_3 value: 49.363 - type: ndcg_at_5 value: 52.44 - type: precision_at_1 value: 42.059999999999995 - type: precision_at_10 value: 11.101999999999999 - type: precision_at_100 value: 1.73 - type: precision_at_1000 value: 0.218 - type: precision_at_3 value: 24.464 - type: precision_at_5 value: 18.026 - type: recall_at_1 value: 33.997 - type: recall_at_10 value: 70.35900000000001 - type: recall_at_100 value: 91.642 - type: recall_at_1000 value: 97.977 - type: recall_at_3 value: 52.76 - type: recall_at_5 value: 61.148 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 35.884 - type: map_at_10 value: 48.14 - type: map_at_100 value: 49.5 - type: map_at_1000 value: 49.63 - type: map_at_3 value: 44.646 - type: map_at_5 value: 46.617999999999995 - type: mrr_at_1 value: 44.458999999999996 - type: mrr_at_10 value: 53.751000000000005 - type: mrr_at_100 value: 54.37800000000001 - type: mrr_at_1000 value: 54.415 - type: mrr_at_3 value: 51.815 - type: mrr_at_5 value: 52.882 - type: ndcg_at_1 value: 44.458999999999996 - type: ndcg_at_10 value: 54.157 - type: ndcg_at_100 value: 58.362 - type: ndcg_at_1000 value: 60.178 - type: ndcg_at_3 value: 49.661 - type: ndcg_at_5 value: 51.74999999999999 - type: precision_at_1 value: 44.458999999999996 - type: precision_at_10 value: 10.248 - type: precision_at_100 value: 1.5890000000000002 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 23.928 - type: precision_at_5 value: 16.878999999999998 - type: recall_at_1 value: 35.884 - type: recall_at_10 value: 64.798 - type: recall_at_100 value: 82.345 - type: recall_at_1000 value: 93.267 - type: recall_at_3 value: 51.847 - type: recall_at_5 value: 57.601 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 39.383 - type: map_at_10 value: 53.714 - type: map_at_100 value: 54.838 - type: map_at_1000 value: 54.87800000000001 - type: map_at_3 value: 50.114999999999995 - type: map_at_5 value: 52.153000000000006 - type: mrr_at_1 value: 45.016 - type: mrr_at_10 value: 56.732000000000006 - type: mrr_at_100 value: 57.411 - type: mrr_at_1000 value: 57.431 - type: mrr_at_3 value: 54.044000000000004 - type: mrr_at_5 value: 55.639 - type: ndcg_at_1 value: 45.016 - type: ndcg_at_10 value: 60.228 - type: ndcg_at_100 value: 64.277 - type: ndcg_at_1000 value: 65.07 - type: ndcg_at_3 value: 54.124 - type: ndcg_at_5 value: 57.147000000000006 - type: precision_at_1 value: 45.016 - type: precision_at_10 value: 9.937 - type: precision_at_100 value: 1.288 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 24.471999999999998 - type: precision_at_5 value: 16.991 - type: recall_at_1 value: 39.383 - type: recall_at_10 value: 76.175 - type: recall_at_100 value: 93.02 - type: recall_at_1000 value: 98.60900000000001 - type: recall_at_3 value: 60.265 - type: recall_at_5 value: 67.46600000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 27.426000000000002 - type: map_at_10 value: 37.397000000000006 - type: map_at_100 value: 38.61 - type: map_at_1000 value: 38.678000000000004 - type: map_at_3 value: 34.150999999999996 - type: map_at_5 value: 36.137 - type: mrr_at_1 value: 29.944 - type: mrr_at_10 value: 39.654 - type: mrr_at_100 value: 40.638000000000005 - type: mrr_at_1000 value: 40.691 - type: mrr_at_3 value: 36.817 - type: mrr_at_5 value: 38.524 - type: ndcg_at_1 value: 29.944 - type: ndcg_at_10 value: 43.094 - type: ndcg_at_100 value: 48.789 - type: ndcg_at_1000 value: 50.339999999999996 - type: ndcg_at_3 value: 36.984 - type: ndcg_at_5 value: 40.248 - type: precision_at_1 value: 29.944 - type: precision_at_10 value: 6.78 - type: precision_at_100 value: 1.024 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 15.895000000000001 - type: precision_at_5 value: 11.39 - type: recall_at_1 value: 27.426000000000002 - type: recall_at_10 value: 58.464000000000006 - type: recall_at_100 value: 84.193 - type: recall_at_1000 value: 95.52000000000001 - type: recall_at_3 value: 42.172 - type: recall_at_5 value: 50.101 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 19.721 - type: map_at_10 value: 31.604 - type: map_at_100 value: 32.972 - type: map_at_1000 value: 33.077 - type: map_at_3 value: 27.218999999999998 - type: map_at_5 value: 29.53 - type: mrr_at_1 value: 25.0 - type: mrr_at_10 value: 35.843 - type: mrr_at_100 value: 36.785000000000004 - type: mrr_at_1000 value: 36.842000000000006 - type: mrr_at_3 value: 32.193 - type: mrr_at_5 value: 34.264 - type: ndcg_at_1 value: 25.0 - type: ndcg_at_10 value: 38.606 - type: ndcg_at_100 value: 44.272 - type: ndcg_at_1000 value: 46.527 - type: ndcg_at_3 value: 30.985000000000003 - type: ndcg_at_5 value: 34.43 - type: precision_at_1 value: 25.0 - type: precision_at_10 value: 7.811 - type: precision_at_100 value: 1.203 - type: precision_at_1000 value: 0.15 - type: precision_at_3 value: 15.423 - type: precision_at_5 value: 11.791 - type: recall_at_1 value: 19.721 - type: recall_at_10 value: 55.625 - type: recall_at_100 value: 79.34400000000001 - type: recall_at_1000 value: 95.208 - type: recall_at_3 value: 35.19 - type: recall_at_5 value: 43.626 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 33.784 - type: map_at_10 value: 47.522 - type: map_at_100 value: 48.949999999999996 - type: map_at_1000 value: 49.038 - type: map_at_3 value: 43.284 - type: map_at_5 value: 45.629 - type: mrr_at_1 value: 41.482 - type: mrr_at_10 value: 52.830999999999996 - type: mrr_at_100 value: 53.559999999999995 - type: mrr_at_1000 value: 53.588 - type: mrr_at_3 value: 50.016000000000005 - type: mrr_at_5 value: 51.614000000000004 - type: ndcg_at_1 value: 41.482 - type: ndcg_at_10 value: 54.569 - type: ndcg_at_100 value: 59.675999999999995 - type: ndcg_at_1000 value: 60.989000000000004 - type: ndcg_at_3 value: 48.187000000000005 - type: ndcg_at_5 value: 51.183 - type: precision_at_1 value: 41.482 - type: precision_at_10 value: 10.221 - type: precision_at_100 value: 1.486 - type: precision_at_1000 value: 0.17500000000000002 - type: precision_at_3 value: 23.548 - type: precision_at_5 value: 16.805 - type: recall_at_1 value: 33.784 - type: recall_at_10 value: 69.798 - type: recall_at_100 value: 90.098 - type: recall_at_1000 value: 98.176 - type: recall_at_3 value: 52.127 - type: recall_at_5 value: 59.861 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 28.038999999999998 - type: map_at_10 value: 41.904 - type: map_at_100 value: 43.36 - type: map_at_1000 value: 43.453 - type: map_at_3 value: 37.785999999999994 - type: map_at_5 value: 40.105000000000004 - type: mrr_at_1 value: 35.046 - type: mrr_at_10 value: 46.926 - type: mrr_at_100 value: 47.815000000000005 - type: mrr_at_1000 value: 47.849000000000004 - type: mrr_at_3 value: 44.273 - type: mrr_at_5 value: 45.774 - type: ndcg_at_1 value: 35.046 - type: ndcg_at_10 value: 48.937000000000005 - type: ndcg_at_100 value: 54.544000000000004 - type: ndcg_at_1000 value: 56.069 - type: ndcg_at_3 value: 42.858000000000004 - type: ndcg_at_5 value: 45.644 - type: precision_at_1 value: 35.046 - type: precision_at_10 value: 9.452 - type: precision_at_100 value: 1.429 - type: precision_at_1000 value: 0.173 - type: precision_at_3 value: 21.346999999999998 - type: precision_at_5 value: 15.342 - type: recall_at_1 value: 28.038999999999998 - type: recall_at_10 value: 64.59700000000001 - type: recall_at_100 value: 87.735 - type: recall_at_1000 value: 97.41300000000001 - type: recall_at_3 value: 47.368 - type: recall_at_5 value: 54.93900000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 28.17291666666667 - type: map_at_10 value: 40.025749999999995 - type: map_at_100 value: 41.39208333333333 - type: map_at_1000 value: 41.499249999999996 - type: map_at_3 value: 36.347 - type: map_at_5 value: 38.41391666666667 - type: mrr_at_1 value: 33.65925 - type: mrr_at_10 value: 44.085499999999996 - type: mrr_at_100 value: 44.94116666666667 - type: mrr_at_1000 value: 44.9855 - type: mrr_at_3 value: 41.2815 - type: mrr_at_5 value: 42.91491666666666 - type: ndcg_at_1 value: 33.65925 - type: ndcg_at_10 value: 46.430833333333325 - type: ndcg_at_100 value: 51.761 - type: ndcg_at_1000 value: 53.50899999999999 - type: ndcg_at_3 value: 40.45133333333333 - type: ndcg_at_5 value: 43.31483333333334 - type: precision_at_1 value: 33.65925 - type: precision_at_10 value: 8.4995 - type: precision_at_100 value: 1.3210000000000004 - type: precision_at_1000 value: 0.16591666666666666 - type: precision_at_3 value: 19.165083333333335 - type: precision_at_5 value: 13.81816666666667 - type: recall_at_1 value: 28.17291666666667 - type: recall_at_10 value: 61.12624999999999 - type: recall_at_100 value: 83.97266666666667 - type: recall_at_1000 value: 95.66550000000001 - type: recall_at_3 value: 44.661249999999995 - type: recall_at_5 value: 51.983333333333334 - type: map_at_1 value: 17.936 - type: map_at_10 value: 27.399 - type: map_at_100 value: 28.632 - type: map_at_1000 value: 28.738000000000003 - type: map_at_3 value: 24.456 - type: map_at_5 value: 26.06 - type: mrr_at_1 value: 19.224 - type: mrr_at_10 value: 28.998 - type: mrr_at_100 value: 30.11 - type: mrr_at_1000 value: 30.177 - type: mrr_at_3 value: 26.247999999999998 - type: mrr_at_5 value: 27.708 - type: ndcg_at_1 value: 19.224 - type: ndcg_at_10 value: 32.911 - type: ndcg_at_100 value: 38.873999999999995 - type: ndcg_at_1000 value: 41.277 - type: ndcg_at_3 value: 27.142 - type: ndcg_at_5 value: 29.755 - type: precision_at_1 value: 19.224 - type: precision_at_10 value: 5.6930000000000005 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 12.138 - type: precision_at_5 value: 8.909 - type: recall_at_1 value: 17.936 - type: recall_at_10 value: 48.096 - type: recall_at_100 value: 75.389 - type: recall_at_1000 value: 92.803 - type: recall_at_3 value: 32.812999999999995 - type: recall_at_5 value: 38.851 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 24.681 - type: map_at_10 value: 34.892 - type: map_at_100 value: 35.996 - type: map_at_1000 value: 36.083 - type: map_at_3 value: 31.491999999999997 - type: map_at_5 value: 33.632 - type: mrr_at_1 value: 28.528 - type: mrr_at_10 value: 37.694 - type: mrr_at_100 value: 38.613 - type: mrr_at_1000 value: 38.668 - type: mrr_at_3 value: 34.714 - type: mrr_at_5 value: 36.616 - type: ndcg_at_1 value: 28.528 - type: ndcg_at_10 value: 40.703 - type: ndcg_at_100 value: 45.993 - type: ndcg_at_1000 value: 47.847 - type: ndcg_at_3 value: 34.622 - type: ndcg_at_5 value: 38.035999999999994 - type: precision_at_1 value: 28.528 - type: precision_at_10 value: 6.902 - type: precision_at_100 value: 1.0370000000000001 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 15.798000000000002 - type: precision_at_5 value: 11.655999999999999 - type: recall_at_1 value: 24.681 - type: recall_at_10 value: 55.81 - type: recall_at_100 value: 79.785 - type: recall_at_1000 value: 92.959 - type: recall_at_3 value: 39.074 - type: recall_at_5 value: 47.568 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 18.627 - type: map_at_10 value: 27.872000000000003 - type: map_at_100 value: 29.237999999999996 - type: map_at_1000 value: 29.363 - type: map_at_3 value: 24.751 - type: map_at_5 value: 26.521 - type: mrr_at_1 value: 23.021 - type: mrr_at_10 value: 31.924000000000003 - type: mrr_at_100 value: 32.922000000000004 - type: mrr_at_1000 value: 32.988 - type: mrr_at_3 value: 29.192 - type: mrr_at_5 value: 30.798 - type: ndcg_at_1 value: 23.021 - type: ndcg_at_10 value: 33.535 - type: ndcg_at_100 value: 39.732 - type: ndcg_at_1000 value: 42.201 - type: ndcg_at_3 value: 28.153 - type: ndcg_at_5 value: 30.746000000000002 - type: precision_at_1 value: 23.021 - type: precision_at_10 value: 6.459 - type: precision_at_100 value: 1.1320000000000001 - type: precision_at_1000 value: 0.153 - type: precision_at_3 value: 13.719000000000001 - type: precision_at_5 value: 10.193000000000001 - type: recall_at_1 value: 18.627 - type: recall_at_10 value: 46.463 - type: recall_at_100 value: 74.226 - type: recall_at_1000 value: 91.28500000000001 - type: recall_at_3 value: 31.357000000000003 - type: recall_at_5 value: 38.067 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 31.457 - type: map_at_10 value: 42.888 - type: map_at_100 value: 44.24 - type: map_at_1000 value: 44.327 - type: map_at_3 value: 39.588 - type: map_at_5 value: 41.423 - type: mrr_at_1 value: 37.126999999999995 - type: mrr_at_10 value: 47.083000000000006 - type: mrr_at_100 value: 47.997 - type: mrr_at_1000 value: 48.044 - type: mrr_at_3 value: 44.574000000000005 - type: mrr_at_5 value: 46.202 - type: ndcg_at_1 value: 37.126999999999995 - type: ndcg_at_10 value: 48.833 - type: ndcg_at_100 value: 54.327000000000005 - type: ndcg_at_1000 value: 56.011 - type: ndcg_at_3 value: 43.541999999999994 - type: ndcg_at_5 value: 46.127 - type: precision_at_1 value: 37.126999999999995 - type: precision_at_10 value: 8.376999999999999 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.146 - type: precision_at_3 value: 20.211000000000002 - type: precision_at_5 value: 14.16 - type: recall_at_1 value: 31.457 - type: recall_at_10 value: 62.369 - type: recall_at_100 value: 85.444 - type: recall_at_1000 value: 96.65599999999999 - type: recall_at_3 value: 47.961 - type: recall_at_5 value: 54.676 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 27.139999999999997 - type: map_at_10 value: 38.801 - type: map_at_100 value: 40.549 - type: map_at_1000 value: 40.802 - type: map_at_3 value: 35.05 - type: map_at_5 value: 36.884 - type: mrr_at_1 value: 33.004 - type: mrr_at_10 value: 43.864 - type: mrr_at_100 value: 44.667 - type: mrr_at_1000 value: 44.717 - type: mrr_at_3 value: 40.777 - type: mrr_at_5 value: 42.319 - type: ndcg_at_1 value: 33.004 - type: ndcg_at_10 value: 46.022 - type: ndcg_at_100 value: 51.542 - type: ndcg_at_1000 value: 53.742000000000004 - type: ndcg_at_3 value: 39.795 - type: ndcg_at_5 value: 42.272 - type: precision_at_1 value: 33.004 - type: precision_at_10 value: 9.012 - type: precision_at_100 value: 1.7770000000000001 - type: precision_at_1000 value: 0.26 - type: precision_at_3 value: 19.038 - type: precision_at_5 value: 13.675999999999998 - type: recall_at_1 value: 27.139999999999997 - type: recall_at_10 value: 60.961 - type: recall_at_100 value: 84.451 - type: recall_at_1000 value: 98.113 - type: recall_at_3 value: 43.001 - type: recall_at_5 value: 49.896 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 22.076999999999998 - type: map_at_10 value: 35.44 - type: map_at_100 value: 37.651 - type: map_at_1000 value: 37.824999999999996 - type: map_at_3 value: 30.764999999999997 - type: map_at_5 value: 33.26 - type: mrr_at_1 value: 50.163000000000004 - type: mrr_at_10 value: 61.207 - type: mrr_at_100 value: 61.675000000000004 - type: mrr_at_1000 value: 61.692 - type: mrr_at_3 value: 58.60999999999999 - type: mrr_at_5 value: 60.307 - type: ndcg_at_1 value: 50.163000000000004 - type: ndcg_at_10 value: 45.882 - type: ndcg_at_100 value: 53.239999999999995 - type: ndcg_at_1000 value: 55.852000000000004 - type: ndcg_at_3 value: 40.514 - type: ndcg_at_5 value: 42.038 - type: precision_at_1 value: 50.163000000000004 - type: precision_at_10 value: 13.466000000000001 - type: precision_at_100 value: 2.164 - type: precision_at_1000 value: 0.266 - type: precision_at_3 value: 29.707 - type: precision_at_5 value: 21.694 - type: recall_at_1 value: 22.076999999999998 - type: recall_at_10 value: 50.193 - type: recall_at_100 value: 74.993 - type: recall_at_1000 value: 89.131 - type: recall_at_3 value: 35.472 - type: recall_at_5 value: 41.814 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.953 - type: map_at_10 value: 24.515 - type: map_at_100 value: 36.173 - type: map_at_1000 value: 38.351 - type: map_at_3 value: 16.592000000000002 - type: map_at_5 value: 20.036 - type: mrr_at_1 value: 74.25 - type: mrr_at_10 value: 81.813 - type: mrr_at_100 value: 82.006 - type: mrr_at_1000 value: 82.011 - type: mrr_at_3 value: 80.875 - type: mrr_at_5 value: 81.362 - type: ndcg_at_1 value: 62.5 - type: ndcg_at_10 value: 52.42 - type: ndcg_at_100 value: 56.808 - type: ndcg_at_1000 value: 63.532999999999994 - type: ndcg_at_3 value: 56.654 - type: ndcg_at_5 value: 54.18300000000001 - type: precision_at_1 value: 74.25 - type: precision_at_10 value: 42.699999999999996 - type: precision_at_100 value: 13.675 - type: precision_at_1000 value: 2.664 - type: precision_at_3 value: 60.5 - type: precision_at_5 value: 52.800000000000004 - type: recall_at_1 value: 9.953 - type: recall_at_10 value: 30.253999999999998 - type: recall_at_100 value: 62.516000000000005 - type: recall_at_1000 value: 84.163 - type: recall_at_3 value: 18.13 - type: recall_at_5 value: 22.771 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 79.455 - type: f1 value: 74.16798697647569 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 87.531 - type: map_at_10 value: 93.16799999999999 - type: map_at_100 value: 93.341 - type: map_at_1000 value: 93.349 - type: map_at_3 value: 92.444 - type: map_at_5 value: 92.865 - type: mrr_at_1 value: 94.014 - type: mrr_at_10 value: 96.761 - type: mrr_at_100 value: 96.762 - type: mrr_at_1000 value: 96.762 - type: mrr_at_3 value: 96.672 - type: mrr_at_5 value: 96.736 - type: ndcg_at_1 value: 94.014 - type: ndcg_at_10 value: 95.112 - type: ndcg_at_100 value: 95.578 - type: ndcg_at_1000 value: 95.68900000000001 - type: ndcg_at_3 value: 94.392 - type: ndcg_at_5 value: 94.72500000000001 - type: precision_at_1 value: 94.014 - type: precision_at_10 value: 11.065 - type: precision_at_100 value: 1.157 - type: precision_at_1000 value: 0.11800000000000001 - type: precision_at_3 value: 35.259 - type: precision_at_5 value: 21.599 - type: recall_at_1 value: 87.531 - type: recall_at_10 value: 97.356 - type: recall_at_100 value: 98.965 - type: recall_at_1000 value: 99.607 - type: recall_at_3 value: 95.312 - type: recall_at_5 value: 96.295 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 32.055 - type: map_at_10 value: 53.114 - type: map_at_100 value: 55.235 - type: map_at_1000 value: 55.345 - type: map_at_3 value: 45.854 - type: map_at_5 value: 50.025 - type: mrr_at_1 value: 60.34 - type: mrr_at_10 value: 68.804 - type: mrr_at_100 value: 69.309 - type: mrr_at_1000 value: 69.32199999999999 - type: mrr_at_3 value: 66.40899999999999 - type: mrr_at_5 value: 67.976 - type: ndcg_at_1 value: 60.34 - type: ndcg_at_10 value: 62.031000000000006 - type: ndcg_at_100 value: 68.00500000000001 - type: ndcg_at_1000 value: 69.286 - type: ndcg_at_3 value: 56.355999999999995 - type: ndcg_at_5 value: 58.687 - type: precision_at_1 value: 60.34 - type: precision_at_10 value: 17.176 - type: precision_at_100 value: 2.36 - type: precision_at_1000 value: 0.259 - type: precision_at_3 value: 37.14 - type: precision_at_5 value: 27.809 - type: recall_at_1 value: 32.055 - type: recall_at_10 value: 70.91 - type: recall_at_100 value: 91.83 - type: recall_at_1000 value: 98.871 - type: recall_at_3 value: 51.202999999999996 - type: recall_at_5 value: 60.563 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 43.68 - type: map_at_10 value: 64.389 - type: map_at_100 value: 65.24 - type: map_at_1000 value: 65.303 - type: map_at_3 value: 61.309000000000005 - type: map_at_5 value: 63.275999999999996 - type: mrr_at_1 value: 87.36 - type: mrr_at_10 value: 91.12 - type: mrr_at_100 value: 91.227 - type: mrr_at_1000 value: 91.229 - type: mrr_at_3 value: 90.57600000000001 - type: mrr_at_5 value: 90.912 - type: ndcg_at_1 value: 87.36 - type: ndcg_at_10 value: 73.076 - type: ndcg_at_100 value: 75.895 - type: ndcg_at_1000 value: 77.049 - type: ndcg_at_3 value: 68.929 - type: ndcg_at_5 value: 71.28 - type: precision_at_1 value: 87.36 - type: precision_at_10 value: 14.741000000000001 - type: precision_at_100 value: 1.694 - type: precision_at_1000 value: 0.185 - type: precision_at_3 value: 43.043 - type: precision_at_5 value: 27.681 - type: recall_at_1 value: 43.68 - type: recall_at_10 value: 73.707 - type: recall_at_100 value: 84.7 - type: recall_at_1000 value: 92.309 - type: recall_at_3 value: 64.564 - type: recall_at_5 value: 69.203 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.75399999999999 - type: ap value: 95.29389839242187 - type: f1 value: 96.75348377433475 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 25.176 - type: map_at_10 value: 38.598 - type: map_at_100 value: 39.707 - type: map_at_1000 value: 39.744 - type: map_at_3 value: 34.566 - type: map_at_5 value: 36.863 - type: mrr_at_1 value: 25.874000000000002 - type: mrr_at_10 value: 39.214 - type: mrr_at_100 value: 40.251 - type: mrr_at_1000 value: 40.281 - type: mrr_at_3 value: 35.291 - type: mrr_at_5 value: 37.545 - type: ndcg_at_1 value: 25.874000000000002 - type: ndcg_at_10 value: 45.98 - type: ndcg_at_100 value: 51.197 - type: ndcg_at_1000 value: 52.073 - type: ndcg_at_3 value: 37.785999999999994 - type: ndcg_at_5 value: 41.870000000000005 - type: precision_at_1 value: 25.874000000000002 - type: precision_at_10 value: 7.181 - type: precision_at_100 value: 0.979 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 16.051000000000002 - type: precision_at_5 value: 11.713 - type: recall_at_1 value: 25.176 - type: recall_at_10 value: 68.67699999999999 - type: recall_at_100 value: 92.55 - type: recall_at_1000 value: 99.164 - type: recall_at_3 value: 46.372 - type: recall_at_5 value: 56.16 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 99.03784769721841 - type: f1 value: 98.97791641821495 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 91.88326493388054 - type: f1 value: 73.74809928034335 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 85.41358439811701 - type: f1 value: 83.503679460639 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 89.77135171486215 - type: f1 value: 88.89843747468366 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 46.22695362087359 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 44.132372165849425 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.35680810650402 - type: mrr value: 34.72625715637218 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 7.165000000000001 - type: map_at_10 value: 15.424 - type: map_at_100 value: 20.28 - type: map_at_1000 value: 22.065 - type: map_at_3 value: 11.236 - type: map_at_5 value: 13.025999999999998 - type: mrr_at_1 value: 51.702999999999996 - type: mrr_at_10 value: 59.965 - type: mrr_at_100 value: 60.667 - type: mrr_at_1000 value: 60.702999999999996 - type: mrr_at_3 value: 58.772000000000006 - type: mrr_at_5 value: 59.267 - type: ndcg_at_1 value: 49.536 - type: ndcg_at_10 value: 40.6 - type: ndcg_at_100 value: 37.848 - type: ndcg_at_1000 value: 46.657 - type: ndcg_at_3 value: 46.117999999999995 - type: ndcg_at_5 value: 43.619 - type: precision_at_1 value: 51.393 - type: precision_at_10 value: 30.31 - type: precision_at_100 value: 9.972 - type: precision_at_1000 value: 2.329 - type: precision_at_3 value: 43.137 - type: precision_at_5 value: 37.585 - type: recall_at_1 value: 7.165000000000001 - type: recall_at_10 value: 19.689999999999998 - type: recall_at_100 value: 39.237 - type: recall_at_1000 value: 71.417 - type: recall_at_3 value: 12.247 - type: recall_at_5 value: 14.902999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 42.653999999999996 - type: map_at_10 value: 59.611999999999995 - type: map_at_100 value: 60.32300000000001 - type: map_at_1000 value: 60.336 - type: map_at_3 value: 55.584999999999994 - type: map_at_5 value: 58.19 - type: mrr_at_1 value: 47.683 - type: mrr_at_10 value: 62.06700000000001 - type: mrr_at_100 value: 62.537 - type: mrr_at_1000 value: 62.544999999999995 - type: mrr_at_3 value: 59.178 - type: mrr_at_5 value: 61.034 - type: ndcg_at_1 value: 47.654 - type: ndcg_at_10 value: 67.001 - type: ndcg_at_100 value: 69.73899999999999 - type: ndcg_at_1000 value: 69.986 - type: ndcg_at_3 value: 59.95700000000001 - type: ndcg_at_5 value: 64.025 - type: precision_at_1 value: 47.654 - type: precision_at_10 value: 10.367999999999999 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 26.651000000000003 - type: precision_at_5 value: 18.459 - type: recall_at_1 value: 42.653999999999996 - type: recall_at_10 value: 86.619 - type: recall_at_100 value: 98.04899999999999 - type: recall_at_1000 value: 99.812 - type: recall_at_3 value: 68.987 - type: recall_at_5 value: 78.158 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 72.538 - type: map_at_10 value: 86.702 - type: map_at_100 value: 87.31 - type: map_at_1000 value: 87.323 - type: map_at_3 value: 83.87 - type: map_at_5 value: 85.682 - type: mrr_at_1 value: 83.31 - type: mrr_at_10 value: 89.225 - type: mrr_at_100 value: 89.30399999999999 - type: mrr_at_1000 value: 89.30399999999999 - type: mrr_at_3 value: 88.44300000000001 - type: mrr_at_5 value: 89.005 - type: ndcg_at_1 value: 83.32000000000001 - type: ndcg_at_10 value: 90.095 - type: ndcg_at_100 value: 91.12 - type: ndcg_at_1000 value: 91.179 - type: ndcg_at_3 value: 87.606 - type: ndcg_at_5 value: 89.031 - type: precision_at_1 value: 83.32000000000001 - type: precision_at_10 value: 13.641 - type: precision_at_100 value: 1.541 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 38.377 - type: precision_at_5 value: 25.162000000000003 - type: recall_at_1 value: 72.538 - type: recall_at_10 value: 96.47200000000001 - type: recall_at_100 value: 99.785 - type: recall_at_1000 value: 99.99900000000001 - type: recall_at_3 value: 89.278 - type: recall_at_5 value: 93.367 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 73.55219145406065 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 74.13437105242755 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 6.873 - type: map_at_10 value: 17.944 - type: map_at_100 value: 21.171 - type: map_at_1000 value: 21.528 - type: map_at_3 value: 12.415 - type: map_at_5 value: 15.187999999999999 - type: mrr_at_1 value: 33.800000000000004 - type: mrr_at_10 value: 46.455 - type: mrr_at_100 value: 47.378 - type: mrr_at_1000 value: 47.394999999999996 - type: mrr_at_3 value: 42.367 - type: mrr_at_5 value: 44.972 - type: ndcg_at_1 value: 33.800000000000004 - type: ndcg_at_10 value: 28.907 - type: ndcg_at_100 value: 39.695 - type: ndcg_at_1000 value: 44.582 - type: ndcg_at_3 value: 26.949 - type: ndcg_at_5 value: 23.988 - type: precision_at_1 value: 33.800000000000004 - type: precision_at_10 value: 15.079999999999998 - type: precision_at_100 value: 3.056 - type: precision_at_1000 value: 0.42100000000000004 - type: precision_at_3 value: 25.167 - type: precision_at_5 value: 21.26 - type: recall_at_1 value: 6.873 - type: recall_at_10 value: 30.568 - type: recall_at_100 value: 62.062 - type: recall_at_1000 value: 85.37700000000001 - type: recall_at_3 value: 15.312999999999999 - type: recall_at_5 value: 21.575 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.37009118256057 - type: cos_sim_spearman value: 79.27986395671529 - type: euclidean_pearson value: 79.18037715442115 - type: euclidean_spearman value: 79.28004791561621 - type: manhattan_pearson value: 79.34062972800541 - type: manhattan_spearman value: 79.43106695543402 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 87.48474767383833 - type: cos_sim_spearman value: 79.54505388752513 - type: euclidean_pearson value: 83.43282704179565 - type: euclidean_spearman value: 79.54579919925405 - type: manhattan_pearson value: 83.77564492427952 - type: manhattan_spearman value: 79.84558396989286 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 88.803698035802 - type: cos_sim_spearman value: 88.83451367754881 - type: euclidean_pearson value: 88.28939285711628 - type: euclidean_spearman value: 88.83528996073112 - type: manhattan_pearson value: 88.28017412671795 - type: manhattan_spearman value: 88.9228828016344 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 85.27469288153428 - type: cos_sim_spearman value: 83.87477064876288 - type: euclidean_pearson value: 84.2601737035379 - type: euclidean_spearman value: 83.87431082479074 - type: manhattan_pearson value: 84.3621547772745 - type: manhattan_spearman value: 84.12094375000423 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 88.12749863201587 - type: cos_sim_spearman value: 88.54287568368565 - type: euclidean_pearson value: 87.90429700607999 - type: euclidean_spearman value: 88.5437689576261 - type: manhattan_pearson value: 88.19276653356833 - type: manhattan_spearman value: 88.99995393814679 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 85.68398747560902 - type: cos_sim_spearman value: 86.48815303460574 - type: euclidean_pearson value: 85.52356631237954 - type: euclidean_spearman value: 86.486391949551 - type: manhattan_pearson value: 85.67267981761788 - type: manhattan_spearman value: 86.7073696332485 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.9057107443124 - type: cos_sim_spearman value: 88.7312168757697 - type: euclidean_pearson value: 88.72810439714794 - type: euclidean_spearman value: 88.71976185854771 - type: manhattan_pearson value: 88.50433745949111 - type: manhattan_spearman value: 88.51726175544195 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 67.59391795109886 - type: cos_sim_spearman value: 66.87613008631367 - type: euclidean_pearson value: 69.23198488262217 - type: euclidean_spearman value: 66.85427723013692 - type: manhattan_pearson value: 69.50730124841084 - type: manhattan_spearman value: 67.10404669820792 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 87.0820605344619 - type: cos_sim_spearman value: 86.8518089863434 - type: euclidean_pearson value: 86.31087134689284 - type: euclidean_spearman value: 86.8518520517941 - type: manhattan_pearson value: 86.47203796160612 - type: manhattan_spearman value: 87.1080149734421 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 89.09255369305481 - type: mrr value: 97.10323445617563 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 61.260999999999996 - type: map_at_10 value: 74.043 - type: map_at_100 value: 74.37700000000001 - type: map_at_1000 value: 74.384 - type: map_at_3 value: 71.222 - type: map_at_5 value: 72.875 - type: mrr_at_1 value: 64.333 - type: mrr_at_10 value: 74.984 - type: mrr_at_100 value: 75.247 - type: mrr_at_1000 value: 75.25500000000001 - type: mrr_at_3 value: 73.167 - type: mrr_at_5 value: 74.35000000000001 - type: ndcg_at_1 value: 64.333 - type: ndcg_at_10 value: 79.06 - type: ndcg_at_100 value: 80.416 - type: ndcg_at_1000 value: 80.55600000000001 - type: ndcg_at_3 value: 74.753 - type: ndcg_at_5 value: 76.97500000000001 - type: precision_at_1 value: 64.333 - type: precision_at_10 value: 10.567 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 29.889 - type: precision_at_5 value: 19.533 - type: recall_at_1 value: 61.260999999999996 - type: recall_at_10 value: 93.167 - type: recall_at_100 value: 99.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 81.667 - type: recall_at_5 value: 87.394 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.71980198019801 - type: cos_sim_ap value: 92.81616007802704 - type: cos_sim_f1 value: 85.17548454688318 - type: cos_sim_precision value: 89.43894389438944 - type: cos_sim_recall value: 81.3 - type: dot_accuracy value: 99.71980198019801 - type: dot_ap value: 92.81398760591358 - type: dot_f1 value: 85.17548454688318 - type: dot_precision value: 89.43894389438944 - type: dot_recall value: 81.3 - type: euclidean_accuracy value: 99.71980198019801 - type: euclidean_ap value: 92.81560637245072 - type: euclidean_f1 value: 85.17548454688318 - type: euclidean_precision value: 89.43894389438944 - type: euclidean_recall value: 81.3 - type: manhattan_accuracy value: 99.73069306930694 - type: manhattan_ap value: 93.14005487480794 - type: manhattan_f1 value: 85.56263269639068 - type: manhattan_precision value: 91.17647058823529 - type: manhattan_recall value: 80.60000000000001 - type: max_accuracy value: 99.73069306930694 - type: max_ap value: 93.14005487480794 - type: max_f1 value: 85.56263269639068 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 79.86443362395185 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 49.40897096662564 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 55.66040806627947 - type: mrr value: 56.58670475766064 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 31.51015090598575 - type: cos_sim_spearman value: 31.35016454939226 - type: dot_pearson value: 31.5150068731 - type: dot_spearman value: 31.34790869023487 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.254 - type: map_at_10 value: 2.064 - type: map_at_100 value: 12.909 - type: map_at_1000 value: 31.761 - type: map_at_3 value: 0.738 - type: map_at_5 value: 1.155 - type: mrr_at_1 value: 96.0 - type: mrr_at_10 value: 98.0 - type: mrr_at_100 value: 98.0 - type: mrr_at_1000 value: 98.0 - type: mrr_at_3 value: 98.0 - type: mrr_at_5 value: 98.0 - type: ndcg_at_1 value: 93.0 - type: ndcg_at_10 value: 82.258 - type: ndcg_at_100 value: 64.34 - type: ndcg_at_1000 value: 57.912 - type: ndcg_at_3 value: 90.827 - type: ndcg_at_5 value: 86.79 - type: precision_at_1 value: 96.0 - type: precision_at_10 value: 84.8 - type: precision_at_100 value: 66.0 - type: precision_at_1000 value: 25.356 - type: precision_at_3 value: 94.667 - type: precision_at_5 value: 90.4 - type: recall_at_1 value: 0.254 - type: recall_at_10 value: 2.1950000000000003 - type: recall_at_100 value: 16.088 - type: recall_at_1000 value: 54.559000000000005 - type: recall_at_3 value: 0.75 - type: recall_at_5 value: 1.191 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 2.976 - type: map_at_10 value: 11.389000000000001 - type: map_at_100 value: 18.429000000000002 - type: map_at_1000 value: 20.113 - type: map_at_3 value: 6.483 - type: map_at_5 value: 8.770999999999999 - type: mrr_at_1 value: 40.816 - type: mrr_at_10 value: 58.118 - type: mrr_at_100 value: 58.489999999999995 - type: mrr_at_1000 value: 58.489999999999995 - type: mrr_at_3 value: 53.061 - type: mrr_at_5 value: 57.041 - type: ndcg_at_1 value: 40.816 - type: ndcg_at_10 value: 30.567 - type: ndcg_at_100 value: 42.44 - type: ndcg_at_1000 value: 53.480000000000004 - type: ndcg_at_3 value: 36.016 - type: ndcg_at_5 value: 34.257 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 25.714 - type: precision_at_100 value: 8.429 - type: precision_at_1000 value: 1.5939999999999999 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.878 - type: recall_at_1 value: 2.976 - type: recall_at_10 value: 17.854999999999997 - type: recall_at_100 value: 51.833 - type: recall_at_1000 value: 86.223 - type: recall_at_3 value: 7.887 - type: recall_at_5 value: 12.026 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 85.1174 - type: ap value: 30.169441069345748 - type: f1 value: 69.79254701873245 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 72.58347481607245 - type: f1 value: 72.74877295564937 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 53.90586138221305 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 87.35769207844072 - type: cos_sim_ap value: 77.9645072410354 - type: cos_sim_f1 value: 71.32352941176471 - type: cos_sim_precision value: 66.5903890160183 - type: cos_sim_recall value: 76.78100263852242 - type: dot_accuracy value: 87.37557370209214 - type: dot_ap value: 77.96250046429908 - type: dot_f1 value: 71.28932757557064 - type: dot_precision value: 66.95249130938586 - type: dot_recall value: 76.22691292875989 - type: euclidean_accuracy value: 87.35173153722357 - type: euclidean_ap value: 77.96520460741593 - type: euclidean_f1 value: 71.32470733210104 - type: euclidean_precision value: 66.91329479768785 - type: euclidean_recall value: 76.35883905013192 - type: manhattan_accuracy value: 87.25636287774931 - type: manhattan_ap value: 77.77752485611796 - type: manhattan_f1 value: 71.18148599269183 - type: manhattan_precision value: 66.10859728506787 - type: manhattan_recall value: 77.0976253298153 - type: max_accuracy value: 87.37557370209214 - type: max_ap value: 77.96520460741593 - type: max_f1 value: 71.32470733210104 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 89.38176737687739 - type: cos_sim_ap value: 86.58811861657401 - type: cos_sim_f1 value: 79.09430644097604 - type: cos_sim_precision value: 75.45085977911366 - type: cos_sim_recall value: 83.10748383122882 - type: dot_accuracy value: 89.38370784336554 - type: dot_ap value: 86.58840606004333 - type: dot_f1 value: 79.10179860068133 - type: dot_precision value: 75.44546153308643 - type: dot_recall value: 83.13058207576223 - type: euclidean_accuracy value: 89.38564830985369 - type: euclidean_ap value: 86.58820721061164 - type: euclidean_f1 value: 79.09070942235888 - type: euclidean_precision value: 75.38729937194697 - type: euclidean_recall value: 83.17677856482906 - type: manhattan_accuracy value: 89.40699344122326 - type: manhattan_ap value: 86.60631843011362 - type: manhattan_f1 value: 79.14949970570925 - type: manhattan_precision value: 75.78191039729502 - type: manhattan_recall value: 82.83030489682784 - type: max_accuracy value: 89.40699344122326 - type: max_ap value: 86.60631843011362 - type: max_f1 value: 79.14949970570925 - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 65.58442135663871 - type: cos_sim_spearman value: 72.2538631361313 - type: euclidean_pearson value: 70.97255486607429 - type: euclidean_spearman value: 72.25374250228647 - type: manhattan_pearson value: 70.83250199989911 - type: manhattan_spearman value: 72.14819496536272 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 59.99478404929932 - type: cos_sim_spearman value: 62.61836216999812 - type: euclidean_pearson value: 66.86429811933593 - type: euclidean_spearman value: 62.6183520374191 - type: manhattan_pearson value: 66.8063778911633 - type: manhattan_spearman value: 62.569607573241115 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.98400000000001 - type: f1 value: 51.21447361350723 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 79.11941660686553 - type: cos_sim_spearman value: 81.25029594540435 - type: euclidean_pearson value: 82.06973504238826 - type: euclidean_spearman value: 81.2501989488524 - type: manhattan_pearson value: 82.10094630392753 - type: manhattan_spearman value: 81.27987244392389 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 47.07270168705156 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 45.98511703185043 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 88.19895157194931 - type: mrr value: 90.21424603174603 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 88.03317320980119 - type: mrr value: 89.9461507936508 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 29.037000000000003 - type: map_at_10 value: 42.001 - type: map_at_100 value: 43.773 - type: map_at_1000 value: 43.878 - type: map_at_3 value: 37.637 - type: map_at_5 value: 40.034 - type: mrr_at_1 value: 43.136 - type: mrr_at_10 value: 51.158 - type: mrr_at_100 value: 52.083 - type: mrr_at_1000 value: 52.12 - type: mrr_at_3 value: 48.733 - type: mrr_at_5 value: 50.025 - type: ndcg_at_1 value: 43.136 - type: ndcg_at_10 value: 48.685 - type: ndcg_at_100 value: 55.513 - type: ndcg_at_1000 value: 57.242000000000004 - type: ndcg_at_3 value: 43.329 - type: ndcg_at_5 value: 45.438 - type: precision_at_1 value: 43.136 - type: precision_at_10 value: 10.56 - type: precision_at_100 value: 1.6129999999999998 - type: precision_at_1000 value: 0.184 - type: precision_at_3 value: 24.064 - type: precision_at_5 value: 17.269000000000002 - type: recall_at_1 value: 29.037000000000003 - type: recall_at_10 value: 59.245000000000005 - type: recall_at_100 value: 87.355 - type: recall_at_1000 value: 98.74000000000001 - type: recall_at_3 value: 42.99 - type: recall_at_5 value: 49.681999999999995 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 82.68190018039687 - type: cos_sim_ap value: 90.18017125327886 - type: cos_sim_f1 value: 83.64080906868193 - type: cos_sim_precision value: 79.7076890489303 - type: cos_sim_recall value: 87.98223053542202 - type: dot_accuracy value: 82.68190018039687 - type: dot_ap value: 90.18782350103646 - type: dot_f1 value: 83.64242087729039 - type: dot_precision value: 79.65313028764805 - type: dot_recall value: 88.05237315875614 - type: euclidean_accuracy value: 82.68190018039687 - type: euclidean_ap value: 90.1801957900632 - type: euclidean_f1 value: 83.63636363636364 - type: euclidean_precision value: 79.52772506852203 - type: euclidean_recall value: 88.19265840542437 - type: manhattan_accuracy value: 82.14070956103427 - type: manhattan_ap value: 89.96178420101427 - type: manhattan_f1 value: 83.21087838578791 - type: manhattan_precision value: 78.35605121850475 - type: manhattan_recall value: 88.70703764320785 - type: max_accuracy value: 82.68190018039687 - type: max_ap value: 90.18782350103646 - type: max_f1 value: 83.64242087729039 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 72.234 - type: map_at_10 value: 80.10000000000001 - type: map_at_100 value: 80.36 - type: map_at_1000 value: 80.363 - type: map_at_3 value: 78.315 - type: map_at_5 value: 79.607 - type: mrr_at_1 value: 72.392 - type: mrr_at_10 value: 80.117 - type: mrr_at_100 value: 80.36999999999999 - type: mrr_at_1000 value: 80.373 - type: mrr_at_3 value: 78.469 - type: mrr_at_5 value: 79.633 - type: ndcg_at_1 value: 72.392 - type: ndcg_at_10 value: 83.651 - type: ndcg_at_100 value: 84.749 - type: ndcg_at_1000 value: 84.83000000000001 - type: ndcg_at_3 value: 80.253 - type: ndcg_at_5 value: 82.485 - type: precision_at_1 value: 72.392 - type: precision_at_10 value: 9.557 - type: precision_at_100 value: 1.004 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 28.732000000000003 - type: precision_at_5 value: 18.377 - type: recall_at_1 value: 72.234 - type: recall_at_10 value: 94.573 - type: recall_at_100 value: 99.368 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 85.669 - type: recall_at_5 value: 91.01700000000001 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 26.173999999999996 - type: map_at_10 value: 80.04 - type: map_at_100 value: 82.94500000000001 - type: map_at_1000 value: 82.98100000000001 - type: map_at_3 value: 55.562999999999995 - type: map_at_5 value: 69.89800000000001 - type: mrr_at_1 value: 89.5 - type: mrr_at_10 value: 92.996 - type: mrr_at_100 value: 93.06400000000001 - type: mrr_at_1000 value: 93.065 - type: mrr_at_3 value: 92.658 - type: mrr_at_5 value: 92.84599999999999 - type: ndcg_at_1 value: 89.5 - type: ndcg_at_10 value: 87.443 - type: ndcg_at_100 value: 90.253 - type: ndcg_at_1000 value: 90.549 - type: ndcg_at_3 value: 85.874 - type: ndcg_at_5 value: 84.842 - type: precision_at_1 value: 89.5 - type: precision_at_10 value: 41.805 - type: precision_at_100 value: 4.827 - type: precision_at_1000 value: 0.49 - type: precision_at_3 value: 76.85 - type: precision_at_5 value: 64.8 - type: recall_at_1 value: 26.173999999999996 - type: recall_at_10 value: 89.101 - type: recall_at_100 value: 98.08099999999999 - type: recall_at_1000 value: 99.529 - type: recall_at_3 value: 57.902 - type: recall_at_5 value: 74.602 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 56.10000000000001 - type: map_at_10 value: 66.15299999999999 - type: map_at_100 value: 66.625 - type: map_at_1000 value: 66.636 - type: map_at_3 value: 63.632999999999996 - type: map_at_5 value: 65.293 - type: mrr_at_1 value: 56.10000000000001 - type: mrr_at_10 value: 66.15299999999999 - type: mrr_at_100 value: 66.625 - type: mrr_at_1000 value: 66.636 - type: mrr_at_3 value: 63.632999999999996 - type: mrr_at_5 value: 65.293 - type: ndcg_at_1 value: 56.10000000000001 - type: ndcg_at_10 value: 71.146 - type: ndcg_at_100 value: 73.27799999999999 - type: ndcg_at_1000 value: 73.529 - type: ndcg_at_3 value: 66.09 - type: ndcg_at_5 value: 69.08999999999999 - type: precision_at_1 value: 56.10000000000001 - type: precision_at_10 value: 8.68 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 24.4 - type: precision_at_5 value: 16.1 - type: recall_at_1 value: 56.10000000000001 - type: recall_at_10 value: 86.8 - type: recall_at_100 value: 96.39999999999999 - type: recall_at_1000 value: 98.3 - type: recall_at_3 value: 73.2 - type: recall_at_5 value: 80.5 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 54.52096960369373 - type: f1 value: 40.930845295808695 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 86.51031894934334 - type: ap value: 55.9516014323483 - type: f1 value: 81.54813679326381 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 69.67437838574276 - type: cos_sim_spearman value: 73.81314174653045 - type: euclidean_pearson value: 72.63430276680275 - type: euclidean_spearman value: 73.81358736777001 - type: manhattan_pearson value: 72.58743833842829 - type: manhattan_spearman value: 73.7590419009179 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: None metrics: - type: map value: 31.648613483640254 - type: mrr value: 30.37420634920635 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 73.28099999999999 - type: map_at_10 value: 81.977 - type: map_at_100 value: 82.222 - type: map_at_1000 value: 82.22699999999999 - type: map_at_3 value: 80.441 - type: map_at_5 value: 81.46600000000001 - type: mrr_at_1 value: 75.673 - type: mrr_at_10 value: 82.41000000000001 - type: mrr_at_100 value: 82.616 - type: mrr_at_1000 value: 82.621 - type: mrr_at_3 value: 81.094 - type: mrr_at_5 value: 81.962 - type: ndcg_at_1 value: 75.673 - type: ndcg_at_10 value: 85.15599999999999 - type: ndcg_at_100 value: 86.151 - type: ndcg_at_1000 value: 86.26899999999999 - type: ndcg_at_3 value: 82.304 - type: ndcg_at_5 value: 84.009 - type: precision_at_1 value: 75.673 - type: precision_at_10 value: 10.042 - type: precision_at_100 value: 1.052 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 30.673000000000002 - type: precision_at_5 value: 19.326999999999998 - type: recall_at_1 value: 73.28099999999999 - type: recall_at_10 value: 94.446 - type: recall_at_100 value: 98.737 - type: recall_at_1000 value: 99.649 - type: recall_at_3 value: 86.984 - type: recall_at_5 value: 91.024 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.08607935440484 - type: f1 value: 78.24879986066307 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.05917955615332 - type: f1 value: 85.05279279434997 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 56.2 - type: map_at_10 value: 62.57899999999999 - type: map_at_100 value: 63.154999999999994 - type: map_at_1000 value: 63.193 - type: map_at_3 value: 61.217 - type: map_at_5 value: 62.012 - type: mrr_at_1 value: 56.3 - type: mrr_at_10 value: 62.629000000000005 - type: mrr_at_100 value: 63.205999999999996 - type: mrr_at_1000 value: 63.244 - type: mrr_at_3 value: 61.267 - type: mrr_at_5 value: 62.062 - type: ndcg_at_1 value: 56.2 - type: ndcg_at_10 value: 65.592 - type: ndcg_at_100 value: 68.657 - type: ndcg_at_1000 value: 69.671 - type: ndcg_at_3 value: 62.808 - type: ndcg_at_5 value: 64.24499999999999 - type: precision_at_1 value: 56.2 - type: precision_at_10 value: 7.5 - type: precision_at_100 value: 0.899 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 22.467000000000002 - type: precision_at_5 value: 14.180000000000001 - type: recall_at_1 value: 56.2 - type: recall_at_10 value: 75.0 - type: recall_at_100 value: 89.9 - type: recall_at_1000 value: 97.89999999999999 - type: recall_at_3 value: 67.4 - type: recall_at_5 value: 70.89999999999999 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 76.87666666666667 - type: f1 value: 76.7317686219665 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 79.64266377910124 - type: cos_sim_ap value: 84.78274442344829 - type: cos_sim_f1 value: 81.16947472745292 - type: cos_sim_precision value: 76.47058823529412 - type: cos_sim_recall value: 86.48363252375924 - type: dot_accuracy value: 79.64266377910124 - type: dot_ap value: 84.7851404063692 - type: dot_f1 value: 81.16947472745292 - type: dot_precision value: 76.47058823529412 - type: dot_recall value: 86.48363252375924 - type: euclidean_accuracy value: 79.64266377910124 - type: euclidean_ap value: 84.78068373762378 - type: euclidean_f1 value: 81.14794656110837 - type: euclidean_precision value: 76.35009310986965 - type: euclidean_recall value: 86.58922914466737 - type: manhattan_accuracy value: 79.48023822414727 - type: manhattan_ap value: 84.72928897427576 - type: manhattan_f1 value: 81.32084770823064 - type: manhattan_precision value: 76.24768946395564 - type: manhattan_recall value: 87.11721224920802 - type: max_accuracy value: 79.64266377910124 - type: max_ap value: 84.7851404063692 - type: max_f1 value: 81.32084770823064 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 94.3 - type: ap value: 92.8664032274438 - type: f1 value: 94.29311102997727 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 48.51392279882909 - type: cos_sim_spearman value: 54.06338895994974 - type: euclidean_pearson value: 52.58480559573412 - type: euclidean_spearman value: 54.06417276612201 - type: manhattan_pearson value: 52.69525121721343 - type: manhattan_spearman value: 54.048147455389675 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 29.728387290757325 - type: cos_sim_spearman value: 31.366121633635284 - type: euclidean_pearson value: 29.14588368552961 - type: euclidean_spearman value: 31.36764411112844 - type: manhattan_pearson value: 29.63517350523121 - type: manhattan_spearman value: 31.94157020583762 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 63.64868296271406 - type: cos_sim_spearman value: 66.12800618164744 - type: euclidean_pearson value: 63.21405767340238 - type: euclidean_spearman value: 66.12786567790748 - type: manhattan_pearson value: 64.04300276525848 - type: manhattan_spearman value: 66.5066857145652 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 81.2302623912794 - type: cos_sim_spearman value: 81.16833673266562 - type: euclidean_pearson value: 79.47647843876024 - type: euclidean_spearman value: 81.16944349524972 - type: manhattan_pearson value: 79.84947238492208 - type: manhattan_spearman value: 81.64626599410026 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 67.80129586475687 - type: mrr value: 77.77402311635554 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 28.666999999999998 - type: map_at_10 value: 81.063 - type: map_at_100 value: 84.504 - type: map_at_1000 value: 84.552 - type: map_at_3 value: 56.897 - type: map_at_5 value: 70.073 - type: mrr_at_1 value: 92.087 - type: mrr_at_10 value: 94.132 - type: mrr_at_100 value: 94.19800000000001 - type: mrr_at_1000 value: 94.19999999999999 - type: mrr_at_3 value: 93.78999999999999 - type: mrr_at_5 value: 94.002 - type: ndcg_at_1 value: 92.087 - type: ndcg_at_10 value: 87.734 - type: ndcg_at_100 value: 90.736 - type: ndcg_at_1000 value: 91.184 - type: ndcg_at_3 value: 88.78 - type: ndcg_at_5 value: 87.676 - type: precision_at_1 value: 92.087 - type: precision_at_10 value: 43.46 - type: precision_at_100 value: 5.07 - type: precision_at_1000 value: 0.518 - type: precision_at_3 value: 77.49000000000001 - type: precision_at_5 value: 65.194 - type: recall_at_1 value: 28.666999999999998 - type: recall_at_10 value: 86.632 - type: recall_at_100 value: 96.646 - type: recall_at_1000 value: 98.917 - type: recall_at_3 value: 58.333999999999996 - type: recall_at_5 value: 72.974 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 52.971999999999994 - type: f1 value: 50.2898280984929 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 86.0797948663824 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 85.10759092255017 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 65.60000000000001 - type: map_at_10 value: 74.773 - type: map_at_100 value: 75.128 - type: map_at_1000 value: 75.136 - type: map_at_3 value: 73.05 - type: map_at_5 value: 74.13499999999999 - type: mrr_at_1 value: 65.60000000000001 - type: mrr_at_10 value: 74.773 - type: mrr_at_100 value: 75.128 - type: mrr_at_1000 value: 75.136 - type: mrr_at_3 value: 73.05 - type: mrr_at_5 value: 74.13499999999999 - type: ndcg_at_1 value: 65.60000000000001 - type: ndcg_at_10 value: 78.84299999999999 - type: ndcg_at_100 value: 80.40899999999999 - type: ndcg_at_1000 value: 80.57 - type: ndcg_at_3 value: 75.40599999999999 - type: ndcg_at_5 value: 77.351 - type: precision_at_1 value: 65.60000000000001 - type: precision_at_10 value: 9.139999999999999 - type: precision_at_100 value: 0.984 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 27.400000000000002 - type: precision_at_5 value: 17.380000000000003 - type: recall_at_1 value: 65.60000000000001 - type: recall_at_10 value: 91.4 - type: recall_at_100 value: 98.4 - type: recall_at_1000 value: 99.6 - type: recall_at_3 value: 82.19999999999999 - type: recall_at_5 value: 86.9 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 89.47 - type: ap value: 75.59561751845389 - type: f1 value: 87.95207751382563 - task: type: Clustering dataset: name: MTEB AlloProfClusteringP2P type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: v_measure value: 76.05592323841036 - type: v_measure value: 64.51718058866508 - task: type: Reranking dataset: name: MTEB AlloprofReranking type: lyon-nlp/mteb-fr-reranking-alloprof-s2p config: default split: test revision: 666fdacebe0291776e86f29345663dfaf80a0db9 metrics: - type: map value: 73.08278490943373 - type: mrr value: 74.66561454570449 - task: type: Retrieval dataset: name: MTEB AlloprofRetrieval type: lyon-nlp/alloprof config: default split: test revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b metrics: - type: map_at_1 value: 38.912 - type: map_at_10 value: 52.437999999999995 - type: map_at_100 value: 53.38 - type: map_at_1000 value: 53.427 - type: map_at_3 value: 48.879 - type: map_at_5 value: 50.934000000000005 - type: mrr_at_1 value: 44.085 - type: mrr_at_10 value: 55.337 - type: mrr_at_100 value: 56.016999999999996 - type: mrr_at_1000 value: 56.043 - type: mrr_at_3 value: 52.55499999999999 - type: mrr_at_5 value: 54.20399999999999 - type: ndcg_at_1 value: 44.085 - type: ndcg_at_10 value: 58.876 - type: ndcg_at_100 value: 62.714000000000006 - type: ndcg_at_1000 value: 63.721000000000004 - type: ndcg_at_3 value: 52.444 - type: ndcg_at_5 value: 55.692 - type: precision_at_1 value: 44.085 - type: precision_at_10 value: 9.21 - type: precision_at_100 value: 1.164 - type: precision_at_1000 value: 0.128 - type: precision_at_3 value: 23.043 - type: precision_at_5 value: 15.898000000000001 - type: recall_at_1 value: 38.912 - type: recall_at_10 value: 75.577 - type: recall_at_100 value: 92.038 - type: recall_at_1000 value: 99.325 - type: recall_at_3 value: 58.592 - type: recall_at_5 value: 66.235 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 55.532000000000004 - type: f1 value: 52.5783943471605 - task: type: Retrieval dataset: name: MTEB BSARDRetrieval type: maastrichtlawtech/bsard config: default split: test revision: 5effa1b9b5fa3b0f9e12523e6e43e5f86a6e6d59 metrics: - type: map_at_1 value: 8.108 - type: map_at_10 value: 14.710999999999999 - type: map_at_100 value: 15.891 - type: map_at_1000 value: 15.983 - type: map_at_3 value: 12.237 - type: map_at_5 value: 13.679 - type: mrr_at_1 value: 8.108 - type: mrr_at_10 value: 14.710999999999999 - type: mrr_at_100 value: 15.891 - type: mrr_at_1000 value: 15.983 - type: mrr_at_3 value: 12.237 - type: mrr_at_5 value: 13.679 - type: ndcg_at_1 value: 8.108 - type: ndcg_at_10 value: 18.796 - type: ndcg_at_100 value: 25.098 - type: ndcg_at_1000 value: 27.951999999999998 - type: ndcg_at_3 value: 13.712 - type: ndcg_at_5 value: 16.309 - type: precision_at_1 value: 8.108 - type: precision_at_10 value: 3.198 - type: precision_at_100 value: 0.626 - type: precision_at_1000 value: 0.086 - type: precision_at_3 value: 6.006 - type: precision_at_5 value: 4.865 - type: recall_at_1 value: 8.108 - type: recall_at_10 value: 31.982 - type: recall_at_100 value: 62.613 - type: recall_at_1000 value: 86.036 - type: recall_at_3 value: 18.018 - type: recall_at_5 value: 24.324 - task: type: Clustering dataset: name: MTEB HALClusteringS2S type: lyon-nlp/clustering-hal-s2s config: default split: test revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915 metrics: - type: v_measure value: 30.833269778867116 - task: type: Clustering dataset: name: MTEB MLSUMClusteringP2P type: mlsum config: default split: test revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7 metrics: - type: v_measure value: 50.0281928004713 - type: v_measure value: 43.699961510636534 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.68963357344191 - type: f1 value: 96.45175170820961 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 87.46946445349202 - type: f1 value: 65.79860440988624 - task: type: Classification dataset: name: MTEB MasakhaNEWSClassification (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: accuracy value: 82.60663507109005 - type: f1 value: 77.20462646604777 - task: type: Clustering dataset: name: MTEB MasakhaNEWSClusteringP2P (fra) type: masakhane/masakhanews config: fra split: test revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60 metrics: - type: v_measure value: 60.19311264967803 - type: v_measure value: 63.6235764409785 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 81.65097511768661 - type: f1 value: 78.77796091490924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 86.64425016812373 - type: f1 value: 85.4912728670017 - task: type: Retrieval dataset: name: MTEB MintakaRetrieval (fr) type: jinaai/mintakaqa config: fr split: test revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e metrics: - type: map_at_1 value: 35.913000000000004 - type: map_at_10 value: 48.147 - type: map_at_100 value: 48.91 - type: map_at_1000 value: 48.949 - type: map_at_3 value: 45.269999999999996 - type: map_at_5 value: 47.115 - type: mrr_at_1 value: 35.913000000000004 - type: mrr_at_10 value: 48.147 - type: mrr_at_100 value: 48.91 - type: mrr_at_1000 value: 48.949 - type: mrr_at_3 value: 45.269999999999996 - type: mrr_at_5 value: 47.115 - type: ndcg_at_1 value: 35.913000000000004 - type: ndcg_at_10 value: 54.03 - type: ndcg_at_100 value: 57.839 - type: ndcg_at_1000 value: 58.925000000000004 - type: ndcg_at_3 value: 48.217999999999996 - type: ndcg_at_5 value: 51.56699999999999 - type: precision_at_1 value: 35.913000000000004 - type: precision_at_10 value: 7.244000000000001 - type: precision_at_100 value: 0.9039999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 18.905 - type: precision_at_5 value: 12.981000000000002 - type: recall_at_1 value: 35.913000000000004 - type: recall_at_10 value: 72.441 - type: recall_at_100 value: 90.41799999999999 - type: recall_at_1000 value: 99.099 - type: recall_at_3 value: 56.716 - type: recall_at_5 value: 64.90599999999999 - task: type: PairClassification dataset: name: MTEB OpusparcusPC (fr) type: GEM/opusparcus config: fr split: test revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a metrics: - type: cos_sim_accuracy value: 99.90069513406156 - type: cos_sim_ap value: 100.0 - type: cos_sim_f1 value: 99.95032290114257 - type: cos_sim_precision value: 100.0 - type: cos_sim_recall value: 99.90069513406156 - type: dot_accuracy value: 99.90069513406156 - type: dot_ap value: 100.0 - type: dot_f1 value: 99.95032290114257 - type: dot_precision value: 100.0 - type: dot_recall value: 99.90069513406156 - type: euclidean_accuracy value: 99.90069513406156 - type: euclidean_ap value: 100.0 - type: euclidean_f1 value: 99.95032290114257 - type: euclidean_precision value: 100.0 - type: euclidean_recall value: 99.90069513406156 - type: manhattan_accuracy value: 99.90069513406156 - type: manhattan_ap value: 100.0 - type: manhattan_f1 value: 99.95032290114257 - type: manhattan_precision value: 100.0 - type: manhattan_recall value: 99.90069513406156 - type: max_accuracy value: 99.90069513406156 - type: max_ap value: 100.0 - type: max_f1 value: 99.95032290114257 - task: type: PairClassification dataset: name: MTEB PawsX (fr) type: paws-x config: fr split: test revision: 8a04d940a42cd40658986fdd8e3da561533a3646 metrics: - type: cos_sim_accuracy value: 75.25 - type: cos_sim_ap value: 80.86376001270014 - type: cos_sim_f1 value: 73.65945437441204 - type: cos_sim_precision value: 64.02289452166802 - type: cos_sim_recall value: 86.71096345514951 - type: dot_accuracy value: 75.25 - type: dot_ap value: 80.93686107633002 - type: dot_f1 value: 73.65945437441204 - type: dot_precision value: 64.02289452166802 - type: dot_recall value: 86.71096345514951 - type: euclidean_accuracy value: 75.25 - type: euclidean_ap value: 80.86379136218862 - type: euclidean_f1 value: 73.65945437441204 - type: euclidean_precision value: 64.02289452166802 - type: euclidean_recall value: 86.71096345514951 - type: manhattan_accuracy value: 75.3 - type: manhattan_ap value: 80.87826606097734 - type: manhattan_f1 value: 73.68421052631581 - type: manhattan_precision value: 64.0 - type: manhattan_recall value: 86.82170542635659 - type: max_accuracy value: 75.3 - type: max_ap value: 80.93686107633002 - type: max_f1 value: 73.68421052631581 - task: type: STS dataset: name: MTEB SICKFr type: Lajavaness/SICK-fr config: default split: test revision: e077ab4cf4774a1e36d86d593b150422fafd8e8a metrics: - type: cos_sim_pearson value: 81.42349425981143 - type: cos_sim_spearman value: 78.90454327031226 - type: euclidean_pearson value: 78.39086497435166 - type: euclidean_spearman value: 78.9046133980509 - type: manhattan_pearson value: 78.63743094286502 - type: manhattan_spearman value: 79.12136348449269 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 81.452697919749 - type: cos_sim_spearman value: 82.58116836039301 - type: euclidean_pearson value: 81.04038478932786 - type: euclidean_spearman value: 82.58116836039301 - type: manhattan_pearson value: 81.37075396187771 - type: manhattan_spearman value: 82.73678231355368 - task: type: STS dataset: name: MTEB STSBenchmarkMultilingualSTS (fr) type: stsb_multi_mt config: fr split: test revision: 93d57ef91790589e3ce9c365164337a8a78b7632 metrics: - type: cos_sim_pearson value: 85.7419764013806 - type: cos_sim_spearman value: 85.46085808849622 - type: euclidean_pearson value: 83.70449639870063 - type: euclidean_spearman value: 85.46159013076233 - type: manhattan_pearson value: 83.95259510313929 - type: manhattan_spearman value: 85.8029724659458 - task: type: Summarization dataset: name: MTEB SummEvalFr type: lyon-nlp/summarization-summeval-fr-p2p config: default split: test revision: b385812de6a9577b6f4d0f88c6a6e35395a94054 metrics: - type: cos_sim_pearson value: 32.61063271753325 - type: cos_sim_spearman value: 31.454589417353603 - type: dot_pearson value: 32.6106288643431 - type: dot_spearman value: 31.454589417353603 - task: type: Reranking dataset: name: MTEB SyntecReranking type: lyon-nlp/mteb-fr-reranking-syntec-s2p config: default split: test revision: b205c5084a0934ce8af14338bf03feb19499c84d metrics: - type: map value: 84.31666666666666 - type: mrr value: 84.31666666666666 - task: type: Retrieval dataset: name: MTEB SyntecRetrieval type: lyon-nlp/mteb-fr-retrieval-syntec-s2p config: default split: test revision: 77f7e271bf4a92b24fce5119f3486b583ca016ff metrics: - type: map_at_1 value: 63.0 - type: map_at_10 value: 73.471 - type: map_at_100 value: 73.87 - type: map_at_1000 value: 73.87 - type: map_at_3 value: 70.5 - type: map_at_5 value: 73.05 - type: mrr_at_1 value: 63.0 - type: mrr_at_10 value: 73.471 - type: mrr_at_100 value: 73.87 - type: mrr_at_1000 value: 73.87 - type: mrr_at_3 value: 70.5 - type: mrr_at_5 value: 73.05 - type: ndcg_at_1 value: 63.0 - type: ndcg_at_10 value: 78.255 - type: ndcg_at_100 value: 79.88 - type: ndcg_at_1000 value: 79.88 - type: ndcg_at_3 value: 72.702 - type: ndcg_at_5 value: 77.264 - type: precision_at_1 value: 63.0 - type: precision_at_10 value: 9.3 - type: precision_at_100 value: 1.0 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 26.333000000000002 - type: precision_at_5 value: 18.0 - type: recall_at_1 value: 63.0 - type: recall_at_10 value: 93.0 - type: recall_at_100 value: 100.0 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 79.0 - type: recall_at_5 value: 90.0 - task: type: Retrieval dataset: name: MTEB XPQARetrieval (fr) type: jinaai/xpqa config: fr split: test revision: c99d599f0a6ab9b85b065da6f9d94f9cf731679f metrics: - type: map_at_1 value: 40.338 - type: map_at_10 value: 61.927 - type: map_at_100 value: 63.361999999999995 - type: map_at_1000 value: 63.405 - type: map_at_3 value: 55.479 - type: map_at_5 value: 59.732 - type: mrr_at_1 value: 63.551 - type: mrr_at_10 value: 71.006 - type: mrr_at_100 value: 71.501 - type: mrr_at_1000 value: 71.509 - type: mrr_at_3 value: 69.07 - type: mrr_at_5 value: 70.165 - type: ndcg_at_1 value: 63.551 - type: ndcg_at_10 value: 68.297 - type: ndcg_at_100 value: 73.13199999999999 - type: ndcg_at_1000 value: 73.751 - type: ndcg_at_3 value: 62.999 - type: ndcg_at_5 value: 64.89 - type: precision_at_1 value: 63.551 - type: precision_at_10 value: 15.661 - type: precision_at_100 value: 1.9789999999999999 - type: precision_at_1000 value: 0.207 - type: precision_at_3 value: 38.273 - type: precision_at_5 value: 27.61 - type: recall_at_1 value: 40.338 - type: recall_at_10 value: 77.267 - type: recall_at_100 value: 95.892 - type: recall_at_1000 value: 99.75500000000001 - type: recall_at_3 value: 60.36 - type: recall_at_5 value: 68.825 - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 51.36126303874126 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 67.13717693836979 - type: f1 value: 57.27609848003782 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: clarin-knext/arguana-pl config: default split: test revision: 63fc86750af76253e8c760fc9e534bbf24d260a2 metrics: - type: map_at_1 value: 35.276999999999994 - type: map_at_10 value: 51.086 - type: map_at_100 value: 51.788000000000004 - type: map_at_1000 value: 51.791 - type: map_at_3 value: 46.147 - type: map_at_5 value: 49.078 - type: mrr_at_1 value: 35.917 - type: mrr_at_10 value: 51.315999999999995 - type: mrr_at_100 value: 52.018 - type: mrr_at_1000 value: 52.022 - type: mrr_at_3 value: 46.349000000000004 - type: mrr_at_5 value: 49.297000000000004 - type: ndcg_at_1 value: 35.276999999999994 - type: ndcg_at_10 value: 59.870999999999995 - type: ndcg_at_100 value: 62.590999999999994 - type: ndcg_at_1000 value: 62.661 - type: ndcg_at_3 value: 49.745 - type: ndcg_at_5 value: 55.067 - type: precision_at_1 value: 35.276999999999994 - type: precision_at_10 value: 8.791 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.057 - type: precision_at_5 value: 14.637 - type: recall_at_1 value: 35.276999999999994 - type: recall_at_10 value: 87.909 - type: recall_at_100 value: 99.14699999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 60.171 - type: recall_at_5 value: 73.18599999999999 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 78.03000000000002 - type: ap value: 29.12548553897622 - type: f1 value: 66.54857118886073 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 89.0 - type: cos_sim_ap value: 76.75437826834582 - type: cos_sim_f1 value: 66.4850136239782 - type: cos_sim_precision value: 68.92655367231639 - type: cos_sim_recall value: 64.21052631578948 - type: dot_accuracy value: 89.0 - type: dot_ap value: 76.75437826834582 - type: dot_f1 value: 66.4850136239782 - type: dot_precision value: 68.92655367231639 - type: dot_recall value: 64.21052631578948 - type: euclidean_accuracy value: 89.0 - type: euclidean_ap value: 76.75437826834582 - type: euclidean_f1 value: 66.4850136239782 - type: euclidean_precision value: 68.92655367231639 - type: euclidean_recall value: 64.21052631578948 - type: manhattan_accuracy value: 89.0 - type: manhattan_ap value: 76.66074220647083 - type: manhattan_f1 value: 66.47058823529412 - type: manhattan_precision value: 75.33333333333333 - type: manhattan_recall value: 59.473684210526315 - type: max_accuracy value: 89.0 - type: max_ap value: 76.75437826834582 - type: max_f1 value: 66.4850136239782 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 93.12903172428328 - type: cos_sim_spearman value: 92.66381487060741 - type: euclidean_pearson value: 90.37278396708922 - type: euclidean_spearman value: 92.66381487060741 - type: manhattan_pearson value: 90.32503296540962 - type: manhattan_spearman value: 92.6902938354313 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: clarin-knext/dbpedia-pl config: default split: test revision: 76afe41d9af165cc40999fcaa92312b8b012064a metrics: - type: map_at_1 value: 8.83 - type: map_at_10 value: 18.326 - type: map_at_100 value: 26.496 - type: map_at_1000 value: 28.455000000000002 - type: map_at_3 value: 12.933 - type: map_at_5 value: 15.168000000000001 - type: mrr_at_1 value: 66.0 - type: mrr_at_10 value: 72.76700000000001 - type: mrr_at_100 value: 73.203 - type: mrr_at_1000 value: 73.219 - type: mrr_at_3 value: 71.458 - type: mrr_at_5 value: 72.246 - type: ndcg_at_1 value: 55.375 - type: ndcg_at_10 value: 41.3 - type: ndcg_at_100 value: 45.891 - type: ndcg_at_1000 value: 52.905 - type: ndcg_at_3 value: 46.472 - type: ndcg_at_5 value: 43.734 - type: precision_at_1 value: 66.0 - type: precision_at_10 value: 33.074999999999996 - type: precision_at_100 value: 11.094999999999999 - type: precision_at_1000 value: 2.374 - type: precision_at_3 value: 48.583 - type: precision_at_5 value: 42.0 - type: recall_at_1 value: 8.83 - type: recall_at_10 value: 22.587 - type: recall_at_100 value: 50.61600000000001 - type: recall_at_1000 value: 73.559 - type: recall_at_3 value: 13.688 - type: recall_at_5 value: 16.855 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: clarin-knext/fiqa-pl config: default split: test revision: 2e535829717f8bf9dc829b7f911cc5bbd4e6608e metrics: - type: map_at_1 value: 20.587 - type: map_at_10 value: 33.095 - type: map_at_100 value: 35.24 - type: map_at_1000 value: 35.429 - type: map_at_3 value: 28.626 - type: map_at_5 value: 31.136999999999997 - type: mrr_at_1 value: 40.586 - type: mrr_at_10 value: 49.033 - type: mrr_at_100 value: 49.952999999999996 - type: mrr_at_1000 value: 49.992 - type: mrr_at_3 value: 46.553 - type: mrr_at_5 value: 48.035 - type: ndcg_at_1 value: 40.586 - type: ndcg_at_10 value: 41.046 - type: ndcg_at_100 value: 48.586 - type: ndcg_at_1000 value: 51.634 - type: ndcg_at_3 value: 36.773 - type: ndcg_at_5 value: 38.389 - type: precision_at_1 value: 40.586 - type: precision_at_10 value: 11.466 - type: precision_at_100 value: 1.909 - type: precision_at_1000 value: 0.245 - type: precision_at_3 value: 24.434 - type: precision_at_5 value: 18.426000000000002 - type: recall_at_1 value: 20.587 - type: recall_at_10 value: 47.986000000000004 - type: recall_at_100 value: 75.761 - type: recall_at_1000 value: 94.065 - type: recall_at_3 value: 33.339 - type: recall_at_5 value: 39.765 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: clarin-knext/hotpotqa-pl config: default split: test revision: a0bd479ac97b4ccb5bd6ce320c415d0bb4beb907 metrics: - type: map_at_1 value: 40.878 - type: map_at_10 value: 58.775999999999996 - type: map_at_100 value: 59.632 - type: map_at_1000 value: 59.707 - type: map_at_3 value: 56.074 - type: map_at_5 value: 57.629 - type: mrr_at_1 value: 81.756 - type: mrr_at_10 value: 86.117 - type: mrr_at_100 value: 86.299 - type: mrr_at_1000 value: 86.30600000000001 - type: mrr_at_3 value: 85.345 - type: mrr_at_5 value: 85.832 - type: ndcg_at_1 value: 81.756 - type: ndcg_at_10 value: 67.608 - type: ndcg_at_100 value: 70.575 - type: ndcg_at_1000 value: 71.99600000000001 - type: ndcg_at_3 value: 63.723 - type: ndcg_at_5 value: 65.70700000000001 - type: precision_at_1 value: 81.756 - type: precision_at_10 value: 13.619 - type: precision_at_100 value: 1.5939999999999999 - type: precision_at_1000 value: 0.178 - type: precision_at_3 value: 39.604 - type: precision_at_5 value: 25.332 - type: recall_at_1 value: 40.878 - type: recall_at_10 value: 68.096 - type: recall_at_100 value: 79.696 - type: recall_at_1000 value: 89.082 - type: recall_at_3 value: 59.406000000000006 - type: recall_at_5 value: 63.329 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: clarin-knext/msmarco-pl config: default split: test revision: 8634c07806d5cce3a6138e260e59b81760a0a640 metrics: - type: map_at_1 value: 2.1839999999999997 - type: map_at_10 value: 11.346 - type: map_at_100 value: 30.325000000000003 - type: map_at_1000 value: 37.806 - type: map_at_3 value: 4.842 - type: map_at_5 value: 6.891 - type: mrr_at_1 value: 86.047 - type: mrr_at_10 value: 89.14699999999999 - type: mrr_at_100 value: 89.46600000000001 - type: mrr_at_1000 value: 89.46600000000001 - type: mrr_at_3 value: 89.14699999999999 - type: mrr_at_5 value: 89.14699999999999 - type: ndcg_at_1 value: 67.829 - type: ndcg_at_10 value: 62.222 - type: ndcg_at_100 value: 55.337 - type: ndcg_at_1000 value: 64.076 - type: ndcg_at_3 value: 68.12700000000001 - type: ndcg_at_5 value: 64.987 - type: precision_at_1 value: 86.047 - type: precision_at_10 value: 69.535 - type: precision_at_100 value: 32.93 - type: precision_at_1000 value: 6.6049999999999995 - type: precision_at_3 value: 79.845 - type: precision_at_5 value: 75.349 - type: recall_at_1 value: 2.1839999999999997 - type: recall_at_10 value: 12.866 - type: recall_at_100 value: 43.505 - type: recall_at_1000 value: 72.366 - type: recall_at_3 value: 4.947 - type: recall_at_5 value: 7.192 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 80.75319435104238 - type: f1 value: 77.58961444860606 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 85.54472091459313 - type: f1 value: 84.29498563572106 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: clarin-knext/nfcorpus-pl config: default split: test revision: 9a6f9567fda928260afed2de480d79c98bf0bec0 metrics: - type: map_at_1 value: 4.367 - type: map_at_10 value: 10.38 - type: map_at_100 value: 13.516 - type: map_at_1000 value: 14.982000000000001 - type: map_at_3 value: 7.367 - type: map_at_5 value: 8.59 - type: mrr_at_1 value: 41.486000000000004 - type: mrr_at_10 value: 48.886 - type: mrr_at_100 value: 49.657000000000004 - type: mrr_at_1000 value: 49.713 - type: mrr_at_3 value: 46.904 - type: mrr_at_5 value: 48.065000000000005 - type: ndcg_at_1 value: 40.402 - type: ndcg_at_10 value: 30.885 - type: ndcg_at_100 value: 28.393 - type: ndcg_at_1000 value: 37.428 - type: ndcg_at_3 value: 35.394999999999996 - type: ndcg_at_5 value: 33.391999999999996 - type: precision_at_1 value: 41.486000000000004 - type: precision_at_10 value: 23.437 - type: precision_at_100 value: 7.638 - type: precision_at_1000 value: 2.0389999999999997 - type: precision_at_3 value: 32.817 - type: precision_at_5 value: 28.915999999999997 - type: recall_at_1 value: 4.367 - type: recall_at_10 value: 14.655000000000001 - type: recall_at_100 value: 29.665999999999997 - type: recall_at_1000 value: 62.073 - type: recall_at_3 value: 8.51 - type: recall_at_5 value: 10.689 - task: type: Retrieval dataset: name: MTEB NQ-PL type: clarin-knext/nq-pl config: default split: test revision: f171245712cf85dd4700b06bef18001578d0ca8d metrics: - type: map_at_1 value: 28.616000000000003 - type: map_at_10 value: 41.626000000000005 - type: map_at_100 value: 42.689 - type: map_at_1000 value: 42.733 - type: map_at_3 value: 37.729 - type: map_at_5 value: 39.879999999999995 - type: mrr_at_1 value: 32.068000000000005 - type: mrr_at_10 value: 44.029 - type: mrr_at_100 value: 44.87 - type: mrr_at_1000 value: 44.901 - type: mrr_at_3 value: 40.687 - type: mrr_at_5 value: 42.625 - type: ndcg_at_1 value: 32.068000000000005 - type: ndcg_at_10 value: 48.449999999999996 - type: ndcg_at_100 value: 53.13 - type: ndcg_at_1000 value: 54.186 - type: ndcg_at_3 value: 40.983999999999995 - type: ndcg_at_5 value: 44.628 - type: precision_at_1 value: 32.068000000000005 - type: precision_at_10 value: 7.9750000000000005 - type: precision_at_100 value: 1.061 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 18.404999999999998 - type: precision_at_5 value: 13.111 - type: recall_at_1 value: 28.616000000000003 - type: recall_at_10 value: 66.956 - type: recall_at_100 value: 87.657 - type: recall_at_1000 value: 95.548 - type: recall_at_3 value: 47.453 - type: recall_at_5 value: 55.87800000000001 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 69.04141326382856 - type: ap value: 77.47589122111044 - type: f1 value: 66.6332277374775 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 86.4 - type: cos_sim_ap value: 94.1044939667201 - type: cos_sim_f1 value: 88.78048780487805 - type: cos_sim_precision value: 87.22044728434504 - type: cos_sim_recall value: 90.39735099337747 - type: dot_accuracy value: 86.4 - type: dot_ap value: 94.1044939667201 - type: dot_f1 value: 88.78048780487805 - type: dot_precision value: 87.22044728434504 - type: dot_recall value: 90.39735099337747 - type: euclidean_accuracy value: 86.4 - type: euclidean_ap value: 94.1044939667201 - type: euclidean_f1 value: 88.78048780487805 - type: euclidean_precision value: 87.22044728434504 - type: euclidean_recall value: 90.39735099337747 - type: manhattan_accuracy value: 86.4 - type: manhattan_ap value: 94.11438365697387 - type: manhattan_f1 value: 88.77968877968877 - type: manhattan_precision value: 87.84440842787681 - type: manhattan_recall value: 89.73509933774835 - type: max_accuracy value: 86.4 - type: max_ap value: 94.11438365697387 - type: max_f1 value: 88.78048780487805 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 97.86641929499072 - type: cos_sim_ap value: 99.36904211868182 - type: cos_sim_f1 value: 96.56203288490283 - type: cos_sim_precision value: 94.72140762463343 - type: cos_sim_recall value: 98.47560975609755 - type: dot_accuracy value: 97.86641929499072 - type: dot_ap value: 99.36904211868183 - type: dot_f1 value: 96.56203288490283 - type: dot_precision value: 94.72140762463343 - type: dot_recall value: 98.47560975609755 - type: euclidean_accuracy value: 97.86641929499072 - type: euclidean_ap value: 99.36904211868183 - type: euclidean_f1 value: 96.56203288490283 - type: euclidean_precision value: 94.72140762463343 - type: euclidean_recall value: 98.47560975609755 - type: manhattan_accuracy value: 98.14471243042672 - type: manhattan_ap value: 99.43359540492416 - type: manhattan_f1 value: 96.98795180722892 - type: manhattan_precision value: 95.83333333333334 - type: manhattan_recall value: 98.17073170731707 - type: max_accuracy value: 98.14471243042672 - type: max_ap value: 99.43359540492416 - type: max_f1 value: 96.98795180722892 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 89.39058171745152 - type: f1 value: 86.8552093529568 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 74.97975708502024 - type: f1 value: 58.73081628832407 - task: type: Retrieval dataset: name: MTEB Quora-PL type: clarin-knext/quora-pl config: default split: test revision: 0be27e93455051e531182b85e85e425aba12e9d4 metrics: - type: map_at_1 value: 64.917 - type: map_at_10 value: 78.74600000000001 - type: map_at_100 value: 79.501 - type: map_at_1000 value: 79.524 - type: map_at_3 value: 75.549 - type: map_at_5 value: 77.495 - type: mrr_at_1 value: 74.9 - type: mrr_at_10 value: 82.112 - type: mrr_at_100 value: 82.314 - type: mrr_at_1000 value: 82.317 - type: mrr_at_3 value: 80.745 - type: mrr_at_5 value: 81.607 - type: ndcg_at_1 value: 74.83999999999999 - type: ndcg_at_10 value: 83.214 - type: ndcg_at_100 value: 84.997 - type: ndcg_at_1000 value: 85.207 - type: ndcg_at_3 value: 79.547 - type: ndcg_at_5 value: 81.46600000000001 - type: precision_at_1 value: 74.83999999999999 - type: precision_at_10 value: 12.822 - type: precision_at_100 value: 1.506 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 34.903 - type: precision_at_5 value: 23.16 - type: recall_at_1 value: 64.917 - type: recall_at_10 value: 92.27199999999999 - type: recall_at_100 value: 98.715 - type: recall_at_1000 value: 99.854 - type: recall_at_3 value: 82.04599999999999 - type: recall_at_5 value: 87.2 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: clarin-knext/scidocs-pl config: default split: test revision: 45452b03f05560207ef19149545f168e596c9337 metrics: - type: map_at_1 value: 3.51 - type: map_at_10 value: 9.046999999999999 - type: map_at_100 value: 10.823 - type: map_at_1000 value: 11.144 - type: map_at_3 value: 6.257 - type: map_at_5 value: 7.648000000000001 - type: mrr_at_1 value: 17.299999999999997 - type: mrr_at_10 value: 27.419 - type: mrr_at_100 value: 28.618 - type: mrr_at_1000 value: 28.685 - type: mrr_at_3 value: 23.817 - type: mrr_at_5 value: 25.927 - type: ndcg_at_1 value: 17.299999999999997 - type: ndcg_at_10 value: 16.084 - type: ndcg_at_100 value: 23.729 - type: ndcg_at_1000 value: 29.476999999999997 - type: ndcg_at_3 value: 14.327000000000002 - type: ndcg_at_5 value: 13.017999999999999 - type: precision_at_1 value: 17.299999999999997 - type: precision_at_10 value: 8.63 - type: precision_at_100 value: 1.981 - type: precision_at_1000 value: 0.336 - type: precision_at_3 value: 13.4 - type: precision_at_5 value: 11.700000000000001 - type: recall_at_1 value: 3.51 - type: recall_at_10 value: 17.518 - type: recall_at_100 value: 40.275 - type: recall_at_1000 value: 68.203 - type: recall_at_3 value: 8.155 - type: recall_at_5 value: 11.875 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 86.30248675091724 - type: cos_sim_ap value: 83.6756734006714 - type: cos_sim_f1 value: 74.97367497367497 - type: cos_sim_precision value: 73.91003460207612 - type: cos_sim_recall value: 76.06837606837607 - type: dot_accuracy value: 86.30248675091724 - type: dot_ap value: 83.6756734006714 - type: dot_f1 value: 74.97367497367497 - type: dot_precision value: 73.91003460207612 - type: dot_recall value: 76.06837606837607 - type: euclidean_accuracy value: 86.30248675091724 - type: euclidean_ap value: 83.67566984333091 - type: euclidean_f1 value: 74.97367497367497 - type: euclidean_precision value: 73.91003460207612 - type: euclidean_recall value: 76.06837606837607 - type: manhattan_accuracy value: 86.28210354667753 - type: manhattan_ap value: 83.64216119130171 - type: manhattan_f1 value: 74.92152075340078 - type: manhattan_precision value: 73.4107997265892 - type: manhattan_recall value: 76.49572649572649 - type: max_accuracy value: 86.30248675091724 - type: max_ap value: 83.6756734006714 - type: max_f1 value: 74.97367497367497 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 82.23295940859121 - type: cos_sim_spearman value: 78.89329160768719 - type: euclidean_pearson value: 79.56019107076818 - type: euclidean_spearman value: 78.89330209904084 - type: manhattan_pearson value: 79.76098513973719 - type: manhattan_spearman value: 79.05490162570123 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 37.732606308062486 - type: cos_sim_spearman value: 41.01645667030284 - type: euclidean_pearson value: 26.61722556367085 - type: euclidean_spearman value: 41.01645667030284 - type: manhattan_pearson value: 26.60917378970807 - type: manhattan_spearman value: 41.51335727617614 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: clarin-knext/scifact-pl config: default split: test revision: 47932a35f045ef8ed01ba82bf9ff67f6e109207e metrics: - type: map_at_1 value: 54.31700000000001 - type: map_at_10 value: 65.564 - type: map_at_100 value: 66.062 - type: map_at_1000 value: 66.08699999999999 - type: map_at_3 value: 62.592999999999996 - type: map_at_5 value: 63.888 - type: mrr_at_1 value: 56.99999999999999 - type: mrr_at_10 value: 66.412 - type: mrr_at_100 value: 66.85900000000001 - type: mrr_at_1000 value: 66.88 - type: mrr_at_3 value: 64.22200000000001 - type: mrr_at_5 value: 65.206 - type: ndcg_at_1 value: 56.99999999999999 - type: ndcg_at_10 value: 70.577 - type: ndcg_at_100 value: 72.879 - type: ndcg_at_1000 value: 73.45 - type: ndcg_at_3 value: 65.5 - type: ndcg_at_5 value: 67.278 - type: precision_at_1 value: 56.99999999999999 - type: precision_at_10 value: 9.667 - type: precision_at_100 value: 1.083 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 26.0 - type: precision_at_5 value: 16.933 - type: recall_at_1 value: 54.31700000000001 - type: recall_at_10 value: 85.056 - type: recall_at_100 value: 95.667 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 71.0 - type: recall_at_5 value: 75.672 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: clarin-knext/trec-covid-pl config: default split: test revision: 81bcb408f33366c2a20ac54adafad1ae7e877fdd metrics: - type: map_at_1 value: 0.245 - type: map_at_10 value: 2.051 - type: map_at_100 value: 12.009 - type: map_at_1000 value: 27.448 - type: map_at_3 value: 0.721 - type: map_at_5 value: 1.13 - type: mrr_at_1 value: 88.0 - type: mrr_at_10 value: 93.0 - type: mrr_at_100 value: 93.0 - type: mrr_at_1000 value: 93.0 - type: mrr_at_3 value: 93.0 - type: mrr_at_5 value: 93.0 - type: ndcg_at_1 value: 85.0 - type: ndcg_at_10 value: 80.303 - type: ndcg_at_100 value: 61.23499999999999 - type: ndcg_at_1000 value: 52.978 - type: ndcg_at_3 value: 84.419 - type: ndcg_at_5 value: 82.976 - type: precision_at_1 value: 88.0 - type: precision_at_10 value: 83.39999999999999 - type: precision_at_100 value: 61.96 - type: precision_at_1000 value: 22.648 - type: precision_at_3 value: 89.333 - type: precision_at_5 value: 87.2 - type: recall_at_1 value: 0.245 - type: recall_at_10 value: 2.193 - type: recall_at_100 value: 14.938 - type: recall_at_1000 value: 48.563 - type: recall_at_3 value: 0.738 - type: recall_at_5 value: 1.173 --- ## gte-Qwen2-7B-instruct **gte-Qwen2-7B-instruct** is the latest model in the gte (General Text Embedding) model family that ranks **No.1** in both English and Chinese evaluations on the Massive Text Embedding Benchmark [MTEB benchmark](https://huggingface.co/spaces/mteb/leaderboard) (as of June 16, 2024). Recently, the [**Qwen team**](https://huggingface.co/Qwen) released the Qwen2 series models, and we have trained the **gte-Qwen2-7B-instruct** model based on the [Qwen2-7B](https://huggingface.co/Qwen/Qwen2-7B) LLM model. Compared to the [gte-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) model, the **gte-Qwen2-7B-instruct** model uses the same training data and training strategies during the finetuning stage, with the only difference being the upgraded base model to Qwen2-7B. Considering the improvements in the Qwen2 series models compared to the Qwen1.5 series, we can also expect consistent performance enhancements in the embedding models. The model incorporates several key advancements: - Integration of bidirectional attention mechanisms, enriching its contextual understanding. - Instruction tuning, applied solely on the query side for streamlined efficiency - Comprehensive training across a vast, multilingual text corpus spanning diverse domains and scenarios. This training leverages both weakly supervised and supervised data, ensuring the model's applicability across numerous languages and a wide array of downstream tasks. ## Model Information - Model Size: 7B - Embedding Dimension: 3584 - Max Input Tokens: 32k ## Requirements ``` transformers>=4.39.2 flash_attn>=2.5.6 ``` ## Usage ### Sentence Transformers ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer("Alibaba-NLP/gte-Qwen2-7B-instruct", trust_remote_code=True) # In case you want to reduce the maximum length: model.max_seq_length = 8192 queries = [ "how much protein should a female eat", "summit define", ] documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments.", ] query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) scores = (query_embeddings @ document_embeddings.T) * 100 print(scores.tolist()) ``` Observe the [config_sentence_transformers.json](config_sentence_transformers.json) to see all pre-built prompt names. Otherwise, you can use `model.encode(queries, prompt="Instruct: ...\nQuery: "` to use a custom prompt of your choice. ### Transformers ```python import torch import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: left_padding = (attention_mask[:, -1].sum() == attention_mask.shape[0]) if left_padding: return last_hidden_states[:, -1] else: sequence_lengths = attention_mask.sum(dim=1) - 1 batch_size = last_hidden_states.shape[0] return last_hidden_states[torch.arange(batch_size, device=last_hidden_states.device), sequence_lengths] def get_detailed_instruct(task_description: str, query: str) -> str: return f'Instruct: {task_description}\nQuery: {query}' # Each query must come with a one-sentence instruction that describes the task task = 'Given a web search query, retrieve relevant passages that answer the query' queries = [ get_detailed_instruct(task, 'how much protein should a female eat'), get_detailed_instruct(task, 'summit define') ] # No need to add instruction for retrieval documents documents = [ "As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] input_texts = queries + documents tokenizer = AutoTokenizer.from_pretrained('Alibaba-NLP/gte-Qwen2-7B-instruct', trust_remote_code=True) model = AutoModel.from_pretrained('Alibaba-NLP/gte-Qwen2-7B-instruct', trust_remote_code=True) max_length = 8192 # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Infinity_emb Usage via [infinity](https://github.com/michaelfeil/infinity), a MIT Licensed inference server. ``` # requires ~16-32GB VRAM NVIDIA Compute Capability >= 8.0 docker run \ -v $PWD/data:/app/.cache --gpus "0" -p "7997":"7997" \ michaelf34/infinity:0.0.68-trt-onnx \ v2 --model-id Alibaba-NLP/gte-Qwen2-7B-instruct --revision "refs/pr/38" --dtype bfloat16 --batch-size 8 --device cuda --engine torch --port 7997 --no-bettertransformer ``` ## Evaluation ### MTEB & C-MTEB You can use the [scripts/eval_mteb.py](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct/blob/main/scripts/eval_mteb.py) to reproduce the following result of **gte-Qwen2-7B-instruct** on MTEB(English)/C-MTEB(Chinese): | Model Name | MTEB(56) | C-MTEB(35) | MTEB-fr(26) | MTEB-pl(26) | |:----:|:---------:|:----------:|:----------:|:----------:| | [bge-base-en-1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 64.23 | - | - | - | | [bge-large-en-1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 63.55 | - | - | - | | [gte-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 65.39 | - | - | - | | [gte-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 64.11 | - | - | - | | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 64.68 | - | - | - | | [acge_text_embedding](https://huggingface.co/aspire/acge_text_embedding) | - | 69.07 | - | - | | [stella-mrl-large-zh-v3.5-1792d](https://huggingface.co/infgrad/stella-mrl-large-zh-v3.5-1792d) | - | 68.55 | - | - | | [gte-large-zh](https://huggingface.co/thenlper/gte-large-zh) | - | 66.72 | - | - | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 59.45 | 56.21 | - | - | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 61.50 | 58.81 | - | - | | [e5-mistral-7b-instruct](https://huggingface.co/intfloat/e5-mistral-7b-instruct) | 66.63 | 60.81 | - | - | | [gte-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) | 67.34 | 69.52 | - | - | | [NV-Embed-v1](https://huggingface.co/nvidia/NV-Embed-v1) | 69.32 | - | - | - | | [**gte-Qwen2-7B-instruct**](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) | **70.24** | **72.05** | **68.25** | **67.86** | | gte-Qwen2-1.5B-instruc(https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct) | 67.16 | 67.65 | 66.60 | 64.04 | ### GTE Models The gte series models have consistently released two types of models: encoder-only models (based on the BERT architecture) and decode-only models (based on the LLM architecture). | Models | Language | Max Sequence Length | Dimension | Model Size (Memory Usage, fp32) | |:-------------------------------------------------------------------------------------:|:--------:|:-----: |:---------:|:-------------------------------:| | [GTE-large-zh](https://huggingface.co/thenlper/gte-large-zh) | Chinese | 512 | 1024 | 1.25GB | | [GTE-base-zh](https://huggingface.co/thenlper/gte-base-zh) | Chinese | 512 | 512 | 0.41GB | | [GTE-small-zh](https://huggingface.co/thenlper/gte-small-zh) | Chinese | 512 | 512 | 0.12GB | | [GTE-large](https://huggingface.co/thenlper/gte-large) | English | 512 | 1024 | 1.25GB | | [GTE-base](https://huggingface.co/thenlper/gte-base) | English | 512 | 512 | 0.21GB | | [GTE-small](https://huggingface.co/thenlper/gte-small) | English | 512 | 384 | 0.10GB | | [GTE-large-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | English | 8192 | 1024 | 1.74GB | | [GTE-base-en-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | English | 8192 | 768 | 0.51GB | | [GTE-Qwen1.5-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct) | Multilingual | 32000 | 4096 | 26.45GB | | [GTE-Qwen2-7B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-7B-instruct) | Multilingual | 32000 | 3584 | 26.45GB | | [GTE-Qwen2-1.5B-instruct](https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct) | Multilingual | 32000 | 1536 | 6.62GB | ## Cloud API Services In addition to the open-source [GTE](https://huggingface.co/collections/Alibaba-NLP/gte-models-6680f0b13f885cb431e6d469) series models, GTE series models are also available as commercial API services on Alibaba Cloud. - [Embedding Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-embedding/): Three versions of the text embedding models are available: text-embedding-v1/v2/v3, with v3 being the latest API service. - [ReRank Models](https://help.aliyun.com/zh/model-studio/developer-reference/general-text-sorting-model/): The gte-rerank model service is available. Note that the models behind the commercial APIs are not entirely identical to the open-source models. ## Community support ### Fine-tuning GTE models can be fine-tuned with a third party framework SWIFT. ```shell pip install ms-swift -U ``` ```shell # check: https://swift.readthedocs.io/en/latest/BestPractices/Embedding.html nproc_per_node=8 NPROC_PER_NODE=$nproc_per_node \ USE_HF=1 \ swift sft \ --model Alibaba-NLP/gte-Qwen2-7B-instruct \ --train_type lora \ --dataset 'sentence-transformers/stsb' \ --torch_dtype bfloat16 \ --num_train_epochs 10 \ --per_device_train_batch_size 2 \ --per_device_eval_batch_size 1 \ --gradient_accumulation_steps $(expr 64 / $nproc_per_node) \ --eval_steps 100 \ --save_steps 100 \ --eval_strategy steps \ --use_chat_template false \ --save_total_limit 5 \ --logging_steps 5 \ --output_dir output \ --warmup_ratio 0.05 \ --learning_rate 5e-6 \ --deepspeed zero3 \ --dataloader_num_workers 4 \ --task_type embedding \ --loss_type cosine_similarity \ --dataloader_drop_last true ``` ## Citation If you find our paper or models helpful, please consider cite: ``` @article{li2023towards, title={Towards general text embeddings with multi-stage contrastive learning}, author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan}, journal={arXiv preprint arXiv:2308.03281}, year={2023} } ```
[ "BIOSSES", "SCIFACT" ]
TaylorAI/bge-micro-v2
TaylorAI
sentence-similarity
[ "sentence-transformers", "pytorch", "onnx", "safetensors", "bert", "feature-extraction", "sentence-similarity", "transformers", "mteb", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-10-11T05:55:09Z"
2024-06-06T22:44:08+00:00
105,079
46
--- license: mit pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers - mteb model-index: - name: bge_micro results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 67.76119402985074 - type: ap value: 29.637849284211114 - type: f1 value: 61.31181187111905 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 79.7547 - type: ap value: 74.21401629809145 - type: f1 value: 79.65319615433783 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.452000000000005 - type: f1 value: 37.0245198854966 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 31.152 - type: map_at_10 value: 46.702 - type: map_at_100 value: 47.563 - type: map_at_1000 value: 47.567 - type: map_at_3 value: 42.058 - type: map_at_5 value: 44.608 - type: mrr_at_1 value: 32.006 - type: mrr_at_10 value: 47.064 - type: mrr_at_100 value: 47.910000000000004 - type: mrr_at_1000 value: 47.915 - type: mrr_at_3 value: 42.283 - type: mrr_at_5 value: 44.968 - type: ndcg_at_1 value: 31.152 - type: ndcg_at_10 value: 55.308 - type: ndcg_at_100 value: 58.965 - type: ndcg_at_1000 value: 59.067 - type: ndcg_at_3 value: 45.698 - type: ndcg_at_5 value: 50.296 - type: precision_at_1 value: 31.152 - type: precision_at_10 value: 8.279 - type: precision_at_100 value: 0.987 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 18.753 - type: precision_at_5 value: 13.485 - type: recall_at_1 value: 31.152 - type: recall_at_10 value: 82.788 - type: recall_at_100 value: 98.72 - type: recall_at_1000 value: 99.502 - type: recall_at_3 value: 56.259 - type: recall_at_5 value: 67.425 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 44.52692241938116 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 33.245710292773595 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 58.08493637155168 - type: mrr value: 71.94378490084861 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 84.1602804378326 - type: cos_sim_spearman value: 82.92478106365587 - type: euclidean_pearson value: 82.27930167277077 - type: euclidean_spearman value: 82.18560759458093 - type: manhattan_pearson value: 82.34277425888187 - type: manhattan_spearman value: 81.72776583704467 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 81.17207792207792 - type: f1 value: 81.09893836310513 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 36.109308463095516 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 28.06048212317168 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 28.233999999999998 - type: map_at_10 value: 38.092999999999996 - type: map_at_100 value: 39.473 - type: map_at_1000 value: 39.614 - type: map_at_3 value: 34.839 - type: map_at_5 value: 36.523 - type: mrr_at_1 value: 35.193000000000005 - type: mrr_at_10 value: 44.089 - type: mrr_at_100 value: 44.927 - type: mrr_at_1000 value: 44.988 - type: mrr_at_3 value: 41.559000000000005 - type: mrr_at_5 value: 43.162 - type: ndcg_at_1 value: 35.193000000000005 - type: ndcg_at_10 value: 44.04 - type: ndcg_at_100 value: 49.262 - type: ndcg_at_1000 value: 51.847 - type: ndcg_at_3 value: 39.248 - type: ndcg_at_5 value: 41.298 - type: precision_at_1 value: 35.193000000000005 - type: precision_at_10 value: 8.555 - type: precision_at_100 value: 1.3820000000000001 - type: precision_at_1000 value: 0.189 - type: precision_at_3 value: 19.123 - type: precision_at_5 value: 13.648 - type: recall_at_1 value: 28.233999999999998 - type: recall_at_10 value: 55.094 - type: recall_at_100 value: 76.85300000000001 - type: recall_at_1000 value: 94.163 - type: recall_at_3 value: 40.782000000000004 - type: recall_at_5 value: 46.796 - type: map_at_1 value: 21.538 - type: map_at_10 value: 28.449 - type: map_at_100 value: 29.471000000000004 - type: map_at_1000 value: 29.599999999999998 - type: map_at_3 value: 26.371 - type: map_at_5 value: 27.58 - type: mrr_at_1 value: 26.815 - type: mrr_at_10 value: 33.331 - type: mrr_at_100 value: 34.114 - type: mrr_at_1000 value: 34.182 - type: mrr_at_3 value: 31.561 - type: mrr_at_5 value: 32.608 - type: ndcg_at_1 value: 26.815 - type: ndcg_at_10 value: 32.67 - type: ndcg_at_100 value: 37.039 - type: ndcg_at_1000 value: 39.769 - type: ndcg_at_3 value: 29.523 - type: ndcg_at_5 value: 31.048 - type: precision_at_1 value: 26.815 - type: precision_at_10 value: 5.955 - type: precision_at_100 value: 1.02 - type: precision_at_1000 value: 0.152 - type: precision_at_3 value: 14.033999999999999 - type: precision_at_5 value: 9.911 - type: recall_at_1 value: 21.538 - type: recall_at_10 value: 40.186 - type: recall_at_100 value: 58.948 - type: recall_at_1000 value: 77.158 - type: recall_at_3 value: 30.951 - type: recall_at_5 value: 35.276 - type: map_at_1 value: 35.211999999999996 - type: map_at_10 value: 46.562 - type: map_at_100 value: 47.579 - type: map_at_1000 value: 47.646 - type: map_at_3 value: 43.485 - type: map_at_5 value: 45.206 - type: mrr_at_1 value: 40.627 - type: mrr_at_10 value: 49.928 - type: mrr_at_100 value: 50.647 - type: mrr_at_1000 value: 50.685 - type: mrr_at_3 value: 47.513 - type: mrr_at_5 value: 48.958 - type: ndcg_at_1 value: 40.627 - type: ndcg_at_10 value: 52.217 - type: ndcg_at_100 value: 56.423 - type: ndcg_at_1000 value: 57.821999999999996 - type: ndcg_at_3 value: 46.949000000000005 - type: ndcg_at_5 value: 49.534 - type: precision_at_1 value: 40.627 - type: precision_at_10 value: 8.476 - type: precision_at_100 value: 1.15 - type: precision_at_1000 value: 0.132 - type: precision_at_3 value: 21.003 - type: precision_at_5 value: 14.469999999999999 - type: recall_at_1 value: 35.211999999999996 - type: recall_at_10 value: 65.692 - type: recall_at_100 value: 84.011 - type: recall_at_1000 value: 94.03099999999999 - type: recall_at_3 value: 51.404 - type: recall_at_5 value: 57.882 - type: map_at_1 value: 22.09 - type: map_at_10 value: 29.516 - type: map_at_100 value: 30.462 - type: map_at_1000 value: 30.56 - type: map_at_3 value: 26.945000000000004 - type: map_at_5 value: 28.421999999999997 - type: mrr_at_1 value: 23.616 - type: mrr_at_10 value: 31.221 - type: mrr_at_100 value: 32.057 - type: mrr_at_1000 value: 32.137 - type: mrr_at_3 value: 28.738000000000003 - type: mrr_at_5 value: 30.156 - type: ndcg_at_1 value: 23.616 - type: ndcg_at_10 value: 33.97 - type: ndcg_at_100 value: 38.806000000000004 - type: ndcg_at_1000 value: 41.393 - type: ndcg_at_3 value: 28.908 - type: ndcg_at_5 value: 31.433 - type: precision_at_1 value: 23.616 - type: precision_at_10 value: 5.299 - type: precision_at_100 value: 0.812 - type: precision_at_1000 value: 0.107 - type: precision_at_3 value: 12.015 - type: precision_at_5 value: 8.701 - type: recall_at_1 value: 22.09 - type: recall_at_10 value: 46.089999999999996 - type: recall_at_100 value: 68.729 - type: recall_at_1000 value: 88.435 - type: recall_at_3 value: 32.584999999999994 - type: recall_at_5 value: 38.550000000000004 - type: map_at_1 value: 15.469 - type: map_at_10 value: 22.436 - type: map_at_100 value: 23.465 - type: map_at_1000 value: 23.608999999999998 - type: map_at_3 value: 19.716 - type: map_at_5 value: 21.182000000000002 - type: mrr_at_1 value: 18.905 - type: mrr_at_10 value: 26.55 - type: mrr_at_100 value: 27.46 - type: mrr_at_1000 value: 27.553 - type: mrr_at_3 value: 23.921999999999997 - type: mrr_at_5 value: 25.302999999999997 - type: ndcg_at_1 value: 18.905 - type: ndcg_at_10 value: 27.437 - type: ndcg_at_100 value: 32.555 - type: ndcg_at_1000 value: 35.885 - type: ndcg_at_3 value: 22.439 - type: ndcg_at_5 value: 24.666 - type: precision_at_1 value: 18.905 - type: precision_at_10 value: 5.2490000000000006 - type: precision_at_100 value: 0.889 - type: precision_at_1000 value: 0.131 - type: precision_at_3 value: 10.862 - type: precision_at_5 value: 8.085 - type: recall_at_1 value: 15.469 - type: recall_at_10 value: 38.706 - type: recall_at_100 value: 61.242 - type: recall_at_1000 value: 84.84 - type: recall_at_3 value: 24.973 - type: recall_at_5 value: 30.603 - type: map_at_1 value: 24.918000000000003 - type: map_at_10 value: 34.296 - type: map_at_100 value: 35.632000000000005 - type: map_at_1000 value: 35.748999999999995 - type: map_at_3 value: 31.304 - type: map_at_5 value: 33.166000000000004 - type: mrr_at_1 value: 30.703000000000003 - type: mrr_at_10 value: 39.655 - type: mrr_at_100 value: 40.569 - type: mrr_at_1000 value: 40.621 - type: mrr_at_3 value: 37.023 - type: mrr_at_5 value: 38.664 - type: ndcg_at_1 value: 30.703000000000003 - type: ndcg_at_10 value: 39.897 - type: ndcg_at_100 value: 45.777 - type: ndcg_at_1000 value: 48.082 - type: ndcg_at_3 value: 35.122 - type: ndcg_at_5 value: 37.691 - type: precision_at_1 value: 30.703000000000003 - type: precision_at_10 value: 7.305000000000001 - type: precision_at_100 value: 1.208 - type: precision_at_1000 value: 0.159 - type: precision_at_3 value: 16.811 - type: precision_at_5 value: 12.203999999999999 - type: recall_at_1 value: 24.918000000000003 - type: recall_at_10 value: 51.31 - type: recall_at_100 value: 76.534 - type: recall_at_1000 value: 91.911 - type: recall_at_3 value: 37.855 - type: recall_at_5 value: 44.493 - type: map_at_1 value: 22.416 - type: map_at_10 value: 30.474 - type: map_at_100 value: 31.759999999999998 - type: map_at_1000 value: 31.891000000000002 - type: map_at_3 value: 27.728 - type: map_at_5 value: 29.247 - type: mrr_at_1 value: 28.881 - type: mrr_at_10 value: 36.418 - type: mrr_at_100 value: 37.347 - type: mrr_at_1000 value: 37.415 - type: mrr_at_3 value: 33.942 - type: mrr_at_5 value: 35.386 - type: ndcg_at_1 value: 28.881 - type: ndcg_at_10 value: 35.812 - type: ndcg_at_100 value: 41.574 - type: ndcg_at_1000 value: 44.289 - type: ndcg_at_3 value: 31.239 - type: ndcg_at_5 value: 33.302 - type: precision_at_1 value: 28.881 - type: precision_at_10 value: 6.598 - type: precision_at_100 value: 1.1079999999999999 - type: precision_at_1000 value: 0.151 - type: precision_at_3 value: 14.954 - type: precision_at_5 value: 10.776 - type: recall_at_1 value: 22.416 - type: recall_at_10 value: 46.243 - type: recall_at_100 value: 71.352 - type: recall_at_1000 value: 90.034 - type: recall_at_3 value: 32.873000000000005 - type: recall_at_5 value: 38.632 - type: map_at_1 value: 22.528166666666667 - type: map_at_10 value: 30.317833333333333 - type: map_at_100 value: 31.44108333333333 - type: map_at_1000 value: 31.566666666666666 - type: map_at_3 value: 27.84425 - type: map_at_5 value: 29.233333333333334 - type: mrr_at_1 value: 26.75733333333333 - type: mrr_at_10 value: 34.24425 - type: mrr_at_100 value: 35.11375 - type: mrr_at_1000 value: 35.184333333333335 - type: mrr_at_3 value: 32.01225 - type: mrr_at_5 value: 33.31225 - type: ndcg_at_1 value: 26.75733333333333 - type: ndcg_at_10 value: 35.072583333333334 - type: ndcg_at_100 value: 40.13358333333334 - type: ndcg_at_1000 value: 42.81825 - type: ndcg_at_3 value: 30.79275000000001 - type: ndcg_at_5 value: 32.822 - type: precision_at_1 value: 26.75733333333333 - type: precision_at_10 value: 6.128083333333334 - type: precision_at_100 value: 1.019 - type: precision_at_1000 value: 0.14391666666666664 - type: precision_at_3 value: 14.129916666666665 - type: precision_at_5 value: 10.087416666666668 - type: recall_at_1 value: 22.528166666666667 - type: recall_at_10 value: 45.38341666666667 - type: recall_at_100 value: 67.81791666666668 - type: recall_at_1000 value: 86.71716666666666 - type: recall_at_3 value: 33.38741666666667 - type: recall_at_5 value: 38.62041666666667 - type: map_at_1 value: 21.975 - type: map_at_10 value: 28.144999999999996 - type: map_at_100 value: 28.994999999999997 - type: map_at_1000 value: 29.086000000000002 - type: map_at_3 value: 25.968999999999998 - type: map_at_5 value: 27.321 - type: mrr_at_1 value: 25 - type: mrr_at_10 value: 30.822 - type: mrr_at_100 value: 31.647 - type: mrr_at_1000 value: 31.712 - type: mrr_at_3 value: 28.860000000000003 - type: mrr_at_5 value: 30.041 - type: ndcg_at_1 value: 25 - type: ndcg_at_10 value: 31.929999999999996 - type: ndcg_at_100 value: 36.258 - type: ndcg_at_1000 value: 38.682 - type: ndcg_at_3 value: 27.972 - type: ndcg_at_5 value: 30.089 - type: precision_at_1 value: 25 - type: precision_at_10 value: 4.923 - type: precision_at_100 value: 0.767 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 11.860999999999999 - type: precision_at_5 value: 8.466 - type: recall_at_1 value: 21.975 - type: recall_at_10 value: 41.102 - type: recall_at_100 value: 60.866 - type: recall_at_1000 value: 78.781 - type: recall_at_3 value: 30.268 - type: recall_at_5 value: 35.552 - type: map_at_1 value: 15.845999999999998 - type: map_at_10 value: 21.861 - type: map_at_100 value: 22.798 - type: map_at_1000 value: 22.925 - type: map_at_3 value: 19.922 - type: map_at_5 value: 21.054000000000002 - type: mrr_at_1 value: 19.098000000000003 - type: mrr_at_10 value: 25.397 - type: mrr_at_100 value: 26.246000000000002 - type: mrr_at_1000 value: 26.33 - type: mrr_at_3 value: 23.469 - type: mrr_at_5 value: 24.646 - type: ndcg_at_1 value: 19.098000000000003 - type: ndcg_at_10 value: 25.807999999999996 - type: ndcg_at_100 value: 30.445 - type: ndcg_at_1000 value: 33.666000000000004 - type: ndcg_at_3 value: 22.292 - type: ndcg_at_5 value: 24.075 - type: precision_at_1 value: 19.098000000000003 - type: precision_at_10 value: 4.58 - type: precision_at_100 value: 0.8099999999999999 - type: precision_at_1000 value: 0.126 - type: precision_at_3 value: 10.346 - type: precision_at_5 value: 7.542999999999999 - type: recall_at_1 value: 15.845999999999998 - type: recall_at_10 value: 34.172999999999995 - type: recall_at_100 value: 55.24099999999999 - type: recall_at_1000 value: 78.644 - type: recall_at_3 value: 24.401 - type: recall_at_5 value: 28.938000000000002 - type: map_at_1 value: 22.974 - type: map_at_10 value: 30.108 - type: map_at_100 value: 31.208000000000002 - type: map_at_1000 value: 31.330999999999996 - type: map_at_3 value: 27.889999999999997 - type: map_at_5 value: 29.023 - type: mrr_at_1 value: 26.493 - type: mrr_at_10 value: 33.726 - type: mrr_at_100 value: 34.622 - type: mrr_at_1000 value: 34.703 - type: mrr_at_3 value: 31.575999999999997 - type: mrr_at_5 value: 32.690999999999995 - type: ndcg_at_1 value: 26.493 - type: ndcg_at_10 value: 34.664 - type: ndcg_at_100 value: 39.725 - type: ndcg_at_1000 value: 42.648 - type: ndcg_at_3 value: 30.447999999999997 - type: ndcg_at_5 value: 32.145 - type: precision_at_1 value: 26.493 - type: precision_at_10 value: 5.7090000000000005 - type: precision_at_100 value: 0.9199999999999999 - type: precision_at_1000 value: 0.129 - type: precision_at_3 value: 13.464 - type: precision_at_5 value: 9.384 - type: recall_at_1 value: 22.974 - type: recall_at_10 value: 45.097 - type: recall_at_100 value: 66.908 - type: recall_at_1000 value: 87.495 - type: recall_at_3 value: 33.338 - type: recall_at_5 value: 37.499 - type: map_at_1 value: 22.408 - type: map_at_10 value: 29.580000000000002 - type: map_at_100 value: 31.145 - type: map_at_1000 value: 31.369000000000003 - type: map_at_3 value: 27.634999999999998 - type: map_at_5 value: 28.766000000000002 - type: mrr_at_1 value: 27.272999999999996 - type: mrr_at_10 value: 33.93 - type: mrr_at_100 value: 34.963 - type: mrr_at_1000 value: 35.031 - type: mrr_at_3 value: 32.016 - type: mrr_at_5 value: 33.221000000000004 - type: ndcg_at_1 value: 27.272999999999996 - type: ndcg_at_10 value: 33.993 - type: ndcg_at_100 value: 40.333999999999996 - type: ndcg_at_1000 value: 43.361 - type: ndcg_at_3 value: 30.918 - type: ndcg_at_5 value: 32.552 - type: precision_at_1 value: 27.272999999999996 - type: precision_at_10 value: 6.285 - type: precision_at_100 value: 1.389 - type: precision_at_1000 value: 0.232 - type: precision_at_3 value: 14.427000000000001 - type: precision_at_5 value: 10.356 - type: recall_at_1 value: 22.408 - type: recall_at_10 value: 41.318 - type: recall_at_100 value: 70.539 - type: recall_at_1000 value: 90.197 - type: recall_at_3 value: 32.513 - type: recall_at_5 value: 37 - type: map_at_1 value: 17.258000000000003 - type: map_at_10 value: 24.294 - type: map_at_100 value: 25.305 - type: map_at_1000 value: 25.419999999999998 - type: map_at_3 value: 22.326999999999998 - type: map_at_5 value: 23.31 - type: mrr_at_1 value: 18.484 - type: mrr_at_10 value: 25.863999999999997 - type: mrr_at_100 value: 26.766000000000002 - type: mrr_at_1000 value: 26.855 - type: mrr_at_3 value: 23.968 - type: mrr_at_5 value: 24.911 - type: ndcg_at_1 value: 18.484 - type: ndcg_at_10 value: 28.433000000000003 - type: ndcg_at_100 value: 33.405 - type: ndcg_at_1000 value: 36.375 - type: ndcg_at_3 value: 24.455 - type: ndcg_at_5 value: 26.031 - type: precision_at_1 value: 18.484 - type: precision_at_10 value: 4.603 - type: precision_at_100 value: 0.773 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 10.659 - type: precision_at_5 value: 7.505000000000001 - type: recall_at_1 value: 17.258000000000003 - type: recall_at_10 value: 39.589999999999996 - type: recall_at_100 value: 62.592000000000006 - type: recall_at_1000 value: 84.917 - type: recall_at_3 value: 28.706 - type: recall_at_5 value: 32.224000000000004 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 10.578999999999999 - type: map_at_10 value: 17.642 - type: map_at_100 value: 19.451 - type: map_at_1000 value: 19.647000000000002 - type: map_at_3 value: 14.618 - type: map_at_5 value: 16.145 - type: mrr_at_1 value: 23.322000000000003 - type: mrr_at_10 value: 34.204 - type: mrr_at_100 value: 35.185 - type: mrr_at_1000 value: 35.235 - type: mrr_at_3 value: 30.847 - type: mrr_at_5 value: 32.824 - type: ndcg_at_1 value: 23.322000000000003 - type: ndcg_at_10 value: 25.352999999999998 - type: ndcg_at_100 value: 32.574 - type: ndcg_at_1000 value: 36.073 - type: ndcg_at_3 value: 20.318 - type: ndcg_at_5 value: 22.111 - type: precision_at_1 value: 23.322000000000003 - type: precision_at_10 value: 8.02 - type: precision_at_100 value: 1.5730000000000002 - type: precision_at_1000 value: 0.22200000000000003 - type: precision_at_3 value: 15.049000000000001 - type: precision_at_5 value: 11.87 - type: recall_at_1 value: 10.578999999999999 - type: recall_at_10 value: 30.964999999999996 - type: recall_at_100 value: 55.986000000000004 - type: recall_at_1000 value: 75.565 - type: recall_at_3 value: 18.686 - type: recall_at_5 value: 23.629 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 7.327 - type: map_at_10 value: 14.904 - type: map_at_100 value: 20.29 - type: map_at_1000 value: 21.42 - type: map_at_3 value: 10.911 - type: map_at_5 value: 12.791 - type: mrr_at_1 value: 57.25 - type: mrr_at_10 value: 66.62700000000001 - type: mrr_at_100 value: 67.035 - type: mrr_at_1000 value: 67.052 - type: mrr_at_3 value: 64.833 - type: mrr_at_5 value: 65.908 - type: ndcg_at_1 value: 43.75 - type: ndcg_at_10 value: 32.246 - type: ndcg_at_100 value: 35.774 - type: ndcg_at_1000 value: 42.872 - type: ndcg_at_3 value: 36.64 - type: ndcg_at_5 value: 34.487 - type: precision_at_1 value: 57.25 - type: precision_at_10 value: 25.924999999999997 - type: precision_at_100 value: 7.670000000000001 - type: precision_at_1000 value: 1.599 - type: precision_at_3 value: 41.167 - type: precision_at_5 value: 34.65 - type: recall_at_1 value: 7.327 - type: recall_at_10 value: 19.625 - type: recall_at_100 value: 41.601 - type: recall_at_1000 value: 65.117 - type: recall_at_3 value: 12.308 - type: recall_at_5 value: 15.437999999999999 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 44.53 - type: f1 value: 39.39884255816736 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 58.913000000000004 - type: map_at_10 value: 69.592 - type: map_at_100 value: 69.95599999999999 - type: map_at_1000 value: 69.973 - type: map_at_3 value: 67.716 - type: map_at_5 value: 68.899 - type: mrr_at_1 value: 63.561 - type: mrr_at_10 value: 74.2 - type: mrr_at_100 value: 74.468 - type: mrr_at_1000 value: 74.47500000000001 - type: mrr_at_3 value: 72.442 - type: mrr_at_5 value: 73.58 - type: ndcg_at_1 value: 63.561 - type: ndcg_at_10 value: 74.988 - type: ndcg_at_100 value: 76.52799999999999 - type: ndcg_at_1000 value: 76.88000000000001 - type: ndcg_at_3 value: 71.455 - type: ndcg_at_5 value: 73.42699999999999 - type: precision_at_1 value: 63.561 - type: precision_at_10 value: 9.547 - type: precision_at_100 value: 1.044 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 28.143 - type: precision_at_5 value: 18.008 - type: recall_at_1 value: 58.913000000000004 - type: recall_at_10 value: 87.18 - type: recall_at_100 value: 93.852 - type: recall_at_1000 value: 96.256 - type: recall_at_3 value: 77.55199999999999 - type: recall_at_5 value: 82.42399999999999 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 11.761000000000001 - type: map_at_10 value: 19.564999999999998 - type: map_at_100 value: 21.099 - type: map_at_1000 value: 21.288999999999998 - type: map_at_3 value: 16.683999999999997 - type: map_at_5 value: 18.307000000000002 - type: mrr_at_1 value: 23.302 - type: mrr_at_10 value: 30.979 - type: mrr_at_100 value: 32.121 - type: mrr_at_1000 value: 32.186 - type: mrr_at_3 value: 28.549000000000003 - type: mrr_at_5 value: 30.038999999999998 - type: ndcg_at_1 value: 23.302 - type: ndcg_at_10 value: 25.592 - type: ndcg_at_100 value: 32.416 - type: ndcg_at_1000 value: 36.277 - type: ndcg_at_3 value: 22.151 - type: ndcg_at_5 value: 23.483999999999998 - type: precision_at_1 value: 23.302 - type: precision_at_10 value: 7.377000000000001 - type: precision_at_100 value: 1.415 - type: precision_at_1000 value: 0.212 - type: precision_at_3 value: 14.712 - type: precision_at_5 value: 11.358 - type: recall_at_1 value: 11.761000000000001 - type: recall_at_10 value: 31.696 - type: recall_at_100 value: 58.01500000000001 - type: recall_at_1000 value: 81.572 - type: recall_at_3 value: 20.742 - type: recall_at_5 value: 25.707 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 32.275 - type: map_at_10 value: 44.712 - type: map_at_100 value: 45.621 - type: map_at_1000 value: 45.698 - type: map_at_3 value: 42.016999999999996 - type: map_at_5 value: 43.659 - type: mrr_at_1 value: 64.551 - type: mrr_at_10 value: 71.58099999999999 - type: mrr_at_100 value: 71.952 - type: mrr_at_1000 value: 71.96900000000001 - type: mrr_at_3 value: 70.236 - type: mrr_at_5 value: 71.051 - type: ndcg_at_1 value: 64.551 - type: ndcg_at_10 value: 53.913999999999994 - type: ndcg_at_100 value: 57.421 - type: ndcg_at_1000 value: 59.06 - type: ndcg_at_3 value: 49.716 - type: ndcg_at_5 value: 51.971999999999994 - type: precision_at_1 value: 64.551 - type: precision_at_10 value: 11.110000000000001 - type: precision_at_100 value: 1.388 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 30.822 - type: precision_at_5 value: 20.273 - type: recall_at_1 value: 32.275 - type: recall_at_10 value: 55.55 - type: recall_at_100 value: 69.38600000000001 - type: recall_at_1000 value: 80.35799999999999 - type: recall_at_3 value: 46.232 - type: recall_at_5 value: 50.682 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 76.4604 - type: ap value: 70.40498168422701 - type: f1 value: 76.38572688476046 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 15.065999999999999 - type: map_at_10 value: 25.058000000000003 - type: map_at_100 value: 26.268 - type: map_at_1000 value: 26.344 - type: map_at_3 value: 21.626 - type: map_at_5 value: 23.513 - type: mrr_at_1 value: 15.501000000000001 - type: mrr_at_10 value: 25.548 - type: mrr_at_100 value: 26.723000000000003 - type: mrr_at_1000 value: 26.793 - type: mrr_at_3 value: 22.142 - type: mrr_at_5 value: 24.024 - type: ndcg_at_1 value: 15.501000000000001 - type: ndcg_at_10 value: 31.008000000000003 - type: ndcg_at_100 value: 37.08 - type: ndcg_at_1000 value: 39.102 - type: ndcg_at_3 value: 23.921999999999997 - type: ndcg_at_5 value: 27.307 - type: precision_at_1 value: 15.501000000000001 - type: precision_at_10 value: 5.155 - type: precision_at_100 value: 0.822 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 10.363 - type: precision_at_5 value: 7.917000000000001 - type: recall_at_1 value: 15.065999999999999 - type: recall_at_10 value: 49.507 - type: recall_at_100 value: 78.118 - type: recall_at_1000 value: 93.881 - type: recall_at_3 value: 30.075000000000003 - type: recall_at_5 value: 38.222 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 90.6703146374829 - type: f1 value: 90.1258004293966 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 68.29229366165072 - type: f1 value: 50.016194478997875 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.57767316745124 - type: f1 value: 67.16194062146954 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.92064559515804 - type: f1 value: 73.6680729569968 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.56335607367883 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.131807833734268 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.07390328719844 - type: mrr value: 32.117370992867905 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.274 - type: map_at_10 value: 11.489 - type: map_at_100 value: 14.518 - type: map_at_1000 value: 15.914 - type: map_at_3 value: 8.399 - type: map_at_5 value: 9.889000000000001 - type: mrr_at_1 value: 42.724000000000004 - type: mrr_at_10 value: 51.486 - type: mrr_at_100 value: 51.941 - type: mrr_at_1000 value: 51.99 - type: mrr_at_3 value: 49.278 - type: mrr_at_5 value: 50.485 - type: ndcg_at_1 value: 39.938 - type: ndcg_at_10 value: 31.862000000000002 - type: ndcg_at_100 value: 29.235 - type: ndcg_at_1000 value: 37.802 - type: ndcg_at_3 value: 35.754999999999995 - type: ndcg_at_5 value: 34.447 - type: precision_at_1 value: 42.105 - type: precision_at_10 value: 23.901 - type: precision_at_100 value: 7.715 - type: precision_at_1000 value: 2.045 - type: precision_at_3 value: 33.437 - type: precision_at_5 value: 29.782999999999998 - type: recall_at_1 value: 5.274 - type: recall_at_10 value: 15.351 - type: recall_at_100 value: 29.791 - type: recall_at_1000 value: 60.722 - type: recall_at_3 value: 9.411 - type: recall_at_5 value: 12.171999999999999 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 16.099 - type: map_at_10 value: 27.913 - type: map_at_100 value: 29.281000000000002 - type: map_at_1000 value: 29.343999999999998 - type: map_at_3 value: 23.791 - type: map_at_5 value: 26.049 - type: mrr_at_1 value: 18.337 - type: mrr_at_10 value: 29.953999999999997 - type: mrr_at_100 value: 31.080999999999996 - type: mrr_at_1000 value: 31.130000000000003 - type: mrr_at_3 value: 26.168000000000003 - type: mrr_at_5 value: 28.277 - type: ndcg_at_1 value: 18.308 - type: ndcg_at_10 value: 34.938 - type: ndcg_at_100 value: 41.125 - type: ndcg_at_1000 value: 42.708 - type: ndcg_at_3 value: 26.805 - type: ndcg_at_5 value: 30.686999999999998 - type: precision_at_1 value: 18.308 - type: precision_at_10 value: 6.476999999999999 - type: precision_at_100 value: 0.9939999999999999 - type: precision_at_1000 value: 0.11399999999999999 - type: precision_at_3 value: 12.784999999999998 - type: precision_at_5 value: 9.878 - type: recall_at_1 value: 16.099 - type: recall_at_10 value: 54.63 - type: recall_at_100 value: 82.24900000000001 - type: recall_at_1000 value: 94.242 - type: recall_at_3 value: 33.174 - type: recall_at_5 value: 42.164 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 67.947 - type: map_at_10 value: 81.499 - type: map_at_100 value: 82.17 - type: map_at_1000 value: 82.194 - type: map_at_3 value: 78.567 - type: map_at_5 value: 80.34400000000001 - type: mrr_at_1 value: 78.18 - type: mrr_at_10 value: 85.05 - type: mrr_at_100 value: 85.179 - type: mrr_at_1000 value: 85.181 - type: mrr_at_3 value: 83.91 - type: mrr_at_5 value: 84.638 - type: ndcg_at_1 value: 78.2 - type: ndcg_at_10 value: 85.715 - type: ndcg_at_100 value: 87.2 - type: ndcg_at_1000 value: 87.39 - type: ndcg_at_3 value: 82.572 - type: ndcg_at_5 value: 84.176 - type: precision_at_1 value: 78.2 - type: precision_at_10 value: 12.973 - type: precision_at_100 value: 1.5010000000000001 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 35.949999999999996 - type: precision_at_5 value: 23.62 - type: recall_at_1 value: 67.947 - type: recall_at_10 value: 93.804 - type: recall_at_100 value: 98.971 - type: recall_at_1000 value: 99.91600000000001 - type: recall_at_3 value: 84.75399999999999 - type: recall_at_5 value: 89.32 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 45.457201684255104 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 55.162226937477875 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 4.173 - type: map_at_10 value: 10.463000000000001 - type: map_at_100 value: 12.278 - type: map_at_1000 value: 12.572 - type: map_at_3 value: 7.528 - type: map_at_5 value: 8.863 - type: mrr_at_1 value: 20.599999999999998 - type: mrr_at_10 value: 30.422 - type: mrr_at_100 value: 31.6 - type: mrr_at_1000 value: 31.663000000000004 - type: mrr_at_3 value: 27.400000000000002 - type: mrr_at_5 value: 29.065 - type: ndcg_at_1 value: 20.599999999999998 - type: ndcg_at_10 value: 17.687 - type: ndcg_at_100 value: 25.172 - type: ndcg_at_1000 value: 30.617 - type: ndcg_at_3 value: 16.81 - type: ndcg_at_5 value: 14.499 - type: precision_at_1 value: 20.599999999999998 - type: precision_at_10 value: 9.17 - type: precision_at_100 value: 2.004 - type: precision_at_1000 value: 0.332 - type: precision_at_3 value: 15.6 - type: precision_at_5 value: 12.58 - type: recall_at_1 value: 4.173 - type: recall_at_10 value: 18.575 - type: recall_at_100 value: 40.692 - type: recall_at_1000 value: 67.467 - type: recall_at_3 value: 9.488000000000001 - type: recall_at_5 value: 12.738 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 81.12603499315416 - type: cos_sim_spearman value: 73.62060290948378 - type: euclidean_pearson value: 78.14083565781135 - type: euclidean_spearman value: 73.16840437541543 - type: manhattan_pearson value: 77.92017261109734 - type: manhattan_spearman value: 72.8805059949965 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 79.75955377133172 - type: cos_sim_spearman value: 71.8872633964069 - type: euclidean_pearson value: 76.31922068538256 - type: euclidean_spearman value: 70.86449661855376 - type: manhattan_pearson value: 76.47852229730407 - type: manhattan_spearman value: 70.99367421984789 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 78.80762722908158 - type: cos_sim_spearman value: 79.84588978756372 - type: euclidean_pearson value: 79.8216849781164 - type: euclidean_spearman value: 80.22647061695481 - type: manhattan_pearson value: 79.56604194112572 - type: manhattan_spearman value: 79.96495189862462 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 80.1012718092742 - type: cos_sim_spearman value: 76.86011381793661 - type: euclidean_pearson value: 79.94426039862019 - type: euclidean_spearman value: 77.36751135465131 - type: manhattan_pearson value: 79.87959373304288 - type: manhattan_spearman value: 77.37717129004746 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 83.90618420346104 - type: cos_sim_spearman value: 84.77290791243722 - type: euclidean_pearson value: 84.64732258073293 - type: euclidean_spearman value: 85.21053649543357 - type: manhattan_pearson value: 84.61616883522647 - type: manhattan_spearman value: 85.19803126766931 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 80.52192114059063 - type: cos_sim_spearman value: 81.9103244827937 - type: euclidean_pearson value: 80.99375176138985 - type: euclidean_spearman value: 81.540250641079 - type: manhattan_pearson value: 80.84979573396426 - type: manhattan_spearman value: 81.3742591621492 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.82166001234197 - type: cos_sim_spearman value: 86.81857495659123 - type: euclidean_pearson value: 85.72798403202849 - type: euclidean_spearman value: 85.70482438950965 - type: manhattan_pearson value: 85.51579093130357 - type: manhattan_spearman value: 85.41233705379751 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 64.48071151079803 - type: cos_sim_spearman value: 65.37838108084044 - type: euclidean_pearson value: 64.67378947096257 - type: euclidean_spearman value: 65.39187147219869 - type: manhattan_pearson value: 65.35487466133208 - type: manhattan_spearman value: 65.51328499442272 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 82.64702367823314 - type: cos_sim_spearman value: 82.49732953181818 - type: euclidean_pearson value: 83.05996062475664 - type: euclidean_spearman value: 82.28159546751176 - type: manhattan_pearson value: 82.98305503664952 - type: manhattan_spearman value: 82.18405771943928 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 78.5744649318696 - type: mrr value: 93.35386291268645 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 52.093999999999994 - type: map_at_10 value: 61.646 - type: map_at_100 value: 62.197 - type: map_at_1000 value: 62.22800000000001 - type: map_at_3 value: 58.411 - type: map_at_5 value: 60.585 - type: mrr_at_1 value: 55.00000000000001 - type: mrr_at_10 value: 62.690999999999995 - type: mrr_at_100 value: 63.139 - type: mrr_at_1000 value: 63.166999999999994 - type: mrr_at_3 value: 60.111000000000004 - type: mrr_at_5 value: 61.778 - type: ndcg_at_1 value: 55.00000000000001 - type: ndcg_at_10 value: 66.271 - type: ndcg_at_100 value: 68.879 - type: ndcg_at_1000 value: 69.722 - type: ndcg_at_3 value: 60.672000000000004 - type: ndcg_at_5 value: 63.929 - type: precision_at_1 value: 55.00000000000001 - type: precision_at_10 value: 9 - type: precision_at_100 value: 1.043 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 23.555999999999997 - type: precision_at_5 value: 16.2 - type: recall_at_1 value: 52.093999999999994 - type: recall_at_10 value: 79.567 - type: recall_at_100 value: 91.60000000000001 - type: recall_at_1000 value: 98.333 - type: recall_at_3 value: 64.633 - type: recall_at_5 value: 72.68299999999999 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.83267326732673 - type: cos_sim_ap value: 95.77995366495178 - type: cos_sim_f1 value: 91.51180311401306 - type: cos_sim_precision value: 91.92734611503532 - type: cos_sim_recall value: 91.10000000000001 - type: dot_accuracy value: 99.63366336633663 - type: dot_ap value: 88.53996286967461 - type: dot_f1 value: 81.06537530266343 - type: dot_precision value: 78.59154929577464 - type: dot_recall value: 83.7 - type: euclidean_accuracy value: 99.82376237623762 - type: euclidean_ap value: 95.53192209281187 - type: euclidean_f1 value: 91.19683481701286 - type: euclidean_precision value: 90.21526418786692 - type: euclidean_recall value: 92.2 - type: manhattan_accuracy value: 99.82376237623762 - type: manhattan_ap value: 95.55642082191741 - type: manhattan_f1 value: 91.16186693147964 - type: manhattan_precision value: 90.53254437869822 - type: manhattan_recall value: 91.8 - type: max_accuracy value: 99.83267326732673 - type: max_ap value: 95.77995366495178 - type: max_f1 value: 91.51180311401306 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 54.508462134213474 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 34.06549765184959 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.43129549466616 - type: mrr value: 50.20613169510227 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.069516173193044 - type: cos_sim_spearman value: 29.872498354017353 - type: dot_pearson value: 28.80761257516063 - type: dot_spearman value: 28.397422678527708 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.169 - type: map_at_10 value: 1.208 - type: map_at_100 value: 5.925 - type: map_at_1000 value: 14.427000000000001 - type: map_at_3 value: 0.457 - type: map_at_5 value: 0.716 - type: mrr_at_1 value: 64 - type: mrr_at_10 value: 74.075 - type: mrr_at_100 value: 74.303 - type: mrr_at_1000 value: 74.303 - type: mrr_at_3 value: 71 - type: mrr_at_5 value: 72.89999999999999 - type: ndcg_at_1 value: 57.99999999999999 - type: ndcg_at_10 value: 50.376 - type: ndcg_at_100 value: 38.582 - type: ndcg_at_1000 value: 35.663 - type: ndcg_at_3 value: 55.592 - type: ndcg_at_5 value: 53.647999999999996 - type: precision_at_1 value: 64 - type: precision_at_10 value: 53.2 - type: precision_at_100 value: 39.6 - type: precision_at_1000 value: 16.218 - type: precision_at_3 value: 59.333000000000006 - type: precision_at_5 value: 57.599999999999994 - type: recall_at_1 value: 0.169 - type: recall_at_10 value: 1.423 - type: recall_at_100 value: 9.049999999999999 - type: recall_at_1000 value: 34.056999999999995 - type: recall_at_3 value: 0.48700000000000004 - type: recall_at_5 value: 0.792 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 1.319 - type: map_at_10 value: 7.112 - type: map_at_100 value: 12.588 - type: map_at_1000 value: 14.056 - type: map_at_3 value: 2.8049999999999997 - type: map_at_5 value: 4.68 - type: mrr_at_1 value: 18.367 - type: mrr_at_10 value: 33.94 - type: mrr_at_100 value: 35.193000000000005 - type: mrr_at_1000 value: 35.193000000000005 - type: mrr_at_3 value: 29.932 - type: mrr_at_5 value: 32.279 - type: ndcg_at_1 value: 15.306000000000001 - type: ndcg_at_10 value: 18.096 - type: ndcg_at_100 value: 30.512 - type: ndcg_at_1000 value: 42.148 - type: ndcg_at_3 value: 17.034 - type: ndcg_at_5 value: 18.509 - type: precision_at_1 value: 18.367 - type: precision_at_10 value: 18.776 - type: precision_at_100 value: 7.02 - type: precision_at_1000 value: 1.467 - type: precision_at_3 value: 19.048000000000002 - type: precision_at_5 value: 22.041 - type: recall_at_1 value: 1.319 - type: recall_at_10 value: 13.748 - type: recall_at_100 value: 43.972 - type: recall_at_1000 value: 79.557 - type: recall_at_3 value: 4.042 - type: recall_at_5 value: 7.742 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 70.2282 - type: ap value: 13.995763859570426 - type: f1 value: 54.08126256731344 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 57.64006791171477 - type: f1 value: 57.95841320748957 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 40.19267841788564 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 83.96614412588663 - type: cos_sim_ap value: 67.75985678572738 - type: cos_sim_f1 value: 64.04661542276222 - type: cos_sim_precision value: 60.406922357343305 - type: cos_sim_recall value: 68.15303430079156 - type: dot_accuracy value: 79.5732252488526 - type: dot_ap value: 51.30562107572645 - type: dot_f1 value: 53.120759837177744 - type: dot_precision value: 46.478037198258804 - type: dot_recall value: 61.97889182058047 - type: euclidean_accuracy value: 84.00786791440663 - type: euclidean_ap value: 67.58930214486998 - type: euclidean_f1 value: 64.424821579775 - type: euclidean_precision value: 59.4817958454322 - type: euclidean_recall value: 70.26385224274406 - type: manhattan_accuracy value: 83.87673600762949 - type: manhattan_ap value: 67.4250981523309 - type: manhattan_f1 value: 64.10286658015808 - type: manhattan_precision value: 57.96885001066781 - type: manhattan_recall value: 71.68865435356201 - type: max_accuracy value: 84.00786791440663 - type: max_ap value: 67.75985678572738 - type: max_f1 value: 64.424821579775 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.41347459929368 - type: cos_sim_ap value: 84.89261930113058 - type: cos_sim_f1 value: 77.13677607258877 - type: cos_sim_precision value: 74.88581164358733 - type: cos_sim_recall value: 79.52725592854944 - type: dot_accuracy value: 86.32359219156285 - type: dot_ap value: 79.29794992131094 - type: dot_f1 value: 72.84356337679777 - type: dot_precision value: 67.31761478675462 - type: dot_recall value: 79.35786880197105 - type: euclidean_accuracy value: 88.33585593976791 - type: euclidean_ap value: 84.73257641312746 - type: euclidean_f1 value: 76.83529582788195 - type: euclidean_precision value: 72.76294052863436 - type: euclidean_recall value: 81.3905143209116 - type: manhattan_accuracy value: 88.3086894089339 - type: manhattan_ap value: 84.66304891729399 - type: manhattan_f1 value: 76.8181650632165 - type: manhattan_precision value: 73.6864436744219 - type: manhattan_recall value: 80.22790267939637 - type: max_accuracy value: 88.41347459929368 - type: max_ap value: 84.89261930113058 - type: max_f1 value: 77.13677607258877 --- # bge-micro-v2 This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 384 dimensional dense vector space and can be used for tasks like clustering or semantic search. Distilled in a 2-step training process (bge-micro was step 1) from `BAAI/bge-small-en-v1.5`. ## Usage (Sentence-Transformers) Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed: ``` pip install -U sentence-transformers ``` Then you can use the model like this: ```python from sentence_transformers import SentenceTransformer sentences = ["This is an example sentence", "Each sentence is converted"] model = SentenceTransformer('{MODEL_NAME}') embeddings = model.encode(sentences) print(embeddings) ``` ## Usage (HuggingFace Transformers) Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings. ```python from transformers import AutoTokenizer, AutoModel import torch #Mean Pooling - Take attention mask into account for correct averaging def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] #First element of model_output contains all token embeddings input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) # Sentences we want sentence embeddings for sentences = ['This is an example sentence', 'Each sentence is converted'] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('{MODEL_NAME}') model = AutoModel.from_pretrained('{MODEL_NAME}') # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, mean pooling. sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask']) print("Sentence embeddings:") print(sentence_embeddings) ``` ## Evaluation Results <!--- Describe how your model was evaluated --> For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME}) ## Full Model Architecture ``` SentenceTransformer( (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False}) ) ``` ## Citing & Authors <!--- Describe where people can find more information -->
[ "BIOSSES", "SCIFACT" ]
DavidAU/Llama-3.2-8X3B-MOE-Dark-Champion-Instruct-uncensored-abliterated-18.4B-GGUF
DavidAU
text-generation
[ "gguf", "mixture of experts", "moe", "8x3B", "Llama 3.2 MOE", "128k context", "creative", "creative writing", "fiction writing", "plot generation", "sub-plot generation", "story generation", "scene continue", "storytelling", "fiction story", "science fiction", "romance", "all genres", "story", "writing", "vivid prosing", "vivid writing", "fiction", "roleplaying", "bfloat16", "swearing", "rp", "horror", "mergekit", "text-generation", "en", "license:apache-2.0", "endpoints_compatible", "region:us", "conversational" ]
"2024-12-10T13:12:37Z"
2025-02-12T04:15:50+00:00
103,070
159
--- language: - en license: apache-2.0 pipeline_tag: text-generation tags: - mixture of experts - moe - 8x3B - Llama 3.2 MOE - 128k context - creative - creative writing - fiction writing - plot generation - sub-plot generation - story generation - scene continue - storytelling - fiction story - science fiction - romance - all genres - story - writing - vivid prosing - vivid writing - fiction - roleplaying - bfloat16 - swearing - rp - horror - mergekit --- <B><font color="red">WARNING:</font> NSFW. Vivid prose. INTENSE. Visceral Details. Light HORROR. Swearing. UNCENSORED... humor, romance, fun. </B> <h2>Llama-3.2-8X3B-MOE-Dark-Champion-Instruct-uncensored-abliterated-18.4B-GGUF</h2> <img src="dark-champ.jpg" style="float:right; width:300px; height:300px; padding:10px;"> It is a LLama 3.2 model, max context of 128k (131,000) using mixture of experts to combine EIGHT top L3.2 3B models into one massive powerhouse at 18.4B parameters (equal to 24B - 8 X 3B). This model's instruction following, and output generation for creative writing, prose, fiction and role play are exceptional. And it is fast: 50+ t/s (2 experts) on a low end 16GB card, IQ4XS. Double this speed for standard/mid-range video cards. <B>NEW: Version 2 quanted using the newest Llamacpp version, with Brainstorm 5x infused in all 8 models (creating a 8X4B MOE), and mastered from float 32 files is located here:</B> [ https://huggingface.co/DavidAU/Llama-3.2-8X4B-MOE-V2-Dark-Champion-Instruct-uncensored-abliterated-21B-GGUF ] This model (as well as version 2) can be used also for all genres (examples below showing this). It is for any writing, fiction or roleplay activity. This model can also be used for general use, however its output generation can be uncensored. This model has been designed to be relatively bullet proof and operates with all parameters, including temp settings from 0 to 5. It is an extraordinary compressed model, with a very low perplexity level (lower than Meta Llama3 Instruct). It requires Llama3 template and/or "Command-R" template. Several outputs below, including 2, 4 and 8 experts used. <B>Model Notes:</B> - Detail, prose and fiction writing abilities are OFF THE SCALE relative to all Llama 3.2 models, and many L 3.1, L3 8B+ models. - For more varied prose (sentence/paragraph/dialog) raise the temp and/or add more instructions in your prompt(s). - Role-players: Careful raising temp too high as it may affect instruction following. - This model works with rep pen of 1 or higher, 1.02+ recommended. - If you want a specific type of prose (IE horror) add in "(vivid horror)" or "(graphic vivid horror)" (no quotes) in your prompt(s). - A lot of GPTisms have been removed. There are still a few however - errrrr. Higher "temps" will help with this issue. - This is not a "happy ever after" model but it is also not "horror". It has a light negative bias. - Output length will vary however this model prefers slightly longer outputs unless you state the size. - For creative uses, different quants will produce slightly different output. - Due to the high stability and compressed nature of this model, all quants will operate at above average levels. - Source code for this model and Imatrix GGUFs versions will be uploaded shortly at separate repos. <B>Meet the Team: Mixture of Experts Models</b> This model is comprised of the following 8 models ("the experts") (in full): https://huggingface.co/huihui-ai/Llama-3.2-3B-Instruct-abliterated - https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct - https://huggingface.co/Hastagaras/L3.2-JametMini-3B-MK.I - https://huggingface.co/ValiantLabs/Llama3.2-3B-Enigma - https://huggingface.co/Hastagaras/L3.2-JametMini-3B-MK.III - https://huggingface.co/huihui-ai/Llama-3.2-3B-Instruct-abliterated - https://huggingface.co/chuanli11/Llama-3.2-3B-Instruct-uncensored - https://huggingface.co/Lyte/Llama-3.2-3B-Overthinker - https://huggingface.co/prithivMLmods/Llama-3.2-3B-Promptist-Mini The mixture of experts is set at 2 experts, but you can use 3,4,5,6.. 7 and even 8. This "team" has a Captain (first listed model), and then all the team members contribute to the to "token" choice billions of times per second. Note the Captain also contributes too. Think of 2, 3 or 4 (or more) master chefs in the kitchen all competing to make the best dish for you. This results in higher quality generation. This also results in many cases in higher quality instruction following too. That means the power of every model is available during instruction and output generation. NOTE: You can use one "expert" too ; however this means the model will randomly select an expert to use EACH TIME, resulting in very different generation for each prompt / regen of a prompt. CHANGING THE NUMBER OF EXPERTS: You can set the number of experts in LMStudio (https://lmstudio.ai) at the "load" screen and via other apps/llm apps by setting "Experts" or "Number of Experts". For Text-Generation-Webui (https://github.com/oobabooga/text-generation-webui) you set the number of experts at the loading screen page. For KolboldCPP (https://github.com/LostRuins/koboldcpp) Version 1.8+ , on the load screen, click on "TOKENS", you can set experts on this page, and the launch the model. For server.exe / Llama-server.exe (Llamacpp - https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md ) add the following to the command line to start the "llamacpp server" (CLI): "--override-kv llama.expert_used_count=int:6" (no quotes, where "6" is the number of experts to use) When using "API", you set the "num_experts_used" in the JSON payload (this maybe different for different back ends). CREDITS: Special thanks to all the model makers / creators listed above. Please visit each repo above to see what model(s) contributed to each of models above and/or to learn more about the models from the model makers. Special credit goes to MERGEKIT, without you this project / model would not have been possible. [ https://github.com/arcee-ai/mergekit ] <B>Special Operations Notes for this MOE model:</B> Because of how this "MOE" model is configured, even though the default is 2 experts, the "selected" 2 will vary during generation. (same applies if you change the number of experts used) This results in vastly different output generation PER generation of each prompt. This is a positive in terms of variety, but also means it may take 2-4 regens (of the same prompt) to get the highest quality. In addition, this model responds very well to Dry, Dynamic Temp, and Smooth/Quadratic samplers. Using these in conjunction with the model can vastly improve output quality. Higher temps (above 1) can also aid in generation - especially word choice/sentence generation. When you increase the number of experts used output quality will also increase, at the cost of tokens per second speed. As you increase/decrease the number of experts, you may want to adjust temp, samplers, and advanced samplers too. Your quant choice(s) too will impact instruction following and output generation roughly this means the model will understand more nuanced instructions and output stronger generation the higher you go up in quant(s). FLASH ATTENTION ENHANCEMENT: As per user feedback here [ https://huggingface.co/DavidAU/Llama-3.2-8X3B-MOE-Dark-Champion-Instruct-uncensored-abliterated-18.4B-GGUF/discussions/1 ] I would suggest trying this model with Flash Attention "on", depending on your use case. Quants, Samplers, Generational steering and other topics are covered in the section below: "Highest Quality Settings..." <B>Censored / Uncensored / Abliterated:</B> This model contains several uncensored and/or Abliterated models. As a result is can output uncensored material. However there are a few "censored" models which can sometimes interfer, so here is a how to address this: 1 - Regen your prompt a few times. 2 - INCREASE the number of experts used. <B>What can I use this model for ?</B> This model can be used for fiction writing, any creative prose and role play. It can also be used for just about any general fiction (all genres) activity including: - scene generation - scene continuation - creative writing - fiction writing - plot generation - sub-plot generation - fiction writing - story generation - storytelling - writing - fiction - roleplaying - rp - graphic horror - horror - dark humor - nsfw - and can be used for any genre(s). <B>QUANTS:</B> This repo contains regular quants and 3 "ARM" quants (format "...Q4_x_x_x.gguf") For more information on quants, quants choices, and LLM/AI apps to "run" quants see the section below: "Highest Quality Settings..." <B>Template:</B> This is a LLAMA3 model, and requires Llama3 template, but may work with other template(s). If you use "Command-R" template your output will be very different from using "Llama3" template. Here is the standard LLAMA3 template: <PRE> { "name": "Llama 3", "inference_params": { "input_prefix": "<|start_header_id|>user<|end_header_id|>\n\n", "input_suffix": "<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", "pre_prompt": "You are a helpful, smart, kind, and efficient AI assistant. You always fulfill the user's requests to the best of your ability.", "pre_prompt_prefix": "<|start_header_id|>system<|end_header_id|>\n\n", "pre_prompt_suffix": "<|eot_id|>", "antiprompt": [ "<|start_header_id|>", "<|eot_id|>" ] } } </PRE> <B>Settings: CHAT / ROLEPLAY and/or SMOOTHER operation of this model:</B> In "KoboldCpp" or "oobabooga/text-generation-webui" or "Silly Tavern" ; Set the "Smoothing_factor" to 1.5 : in KoboldCpp -> Settings->Samplers->Advanced-> "Smooth_F" : in text-generation-webui -> parameters -> lower right. : In Silly Tavern this is called: "Smoothing" NOTE: For "text-generation-webui" -> if using GGUFs you need to use "llama_HF" (which involves downloading some config files from the SOURCE version of this model) Source versions (and config files) of my models are here: https://huggingface.co/collections/DavidAU/d-au-source-files-for-gguf-exl2-awq-gptq-hqq-etc-etc-66b55cb8ba25f914cbf210be OTHER OPTIONS: - Increase rep pen to 1.1 to 1.15 (you don't need to do this if you use "smoothing_factor") - If the interface/program you are using to run AI MODELS supports "Quadratic Sampling" ("smoothing") just make the adjustment as noted. <B>Highest Quality Settings / Optimal Operation Guide / Parameters and Samplers</B> This a "Class 1" model: For all settings used for this model (including specifics for its "class"), including example generation(s) and for advanced settings guide (which many times addresses any model issue(s)), including methods to improve model performance for all use case(s) as well as chat, roleplay and other use case(s) please see: [ https://huggingface.co/DavidAU/Maximizing-Model-Performance-All-Quants-Types-And-Full-Precision-by-Samplers_Parameters ] You can see all parameters used for generation, in addition to advanced parameters and samplers to get the most out of this model here: [ https://huggingface.co/DavidAU/Maximizing-Model-Performance-All-Quants-Types-And-Full-Precision-by-Samplers_Parameters ] <b>Optional Enhancement:</B> The following can be used in place of the "system prompt" or "system role" to further enhance the model. It can also be used at the START of a NEW chat, but you must make sure it is "kept" as the chat moves along. In this case the enhancements do not have as strong effect at using "system prompt" or "system role". Copy and paste EXACTLY as noted, DO NOT line wrap or break the lines, maintain the carriage returns exactly as presented. <PRE> Below is an instruction that describes a task. Ponder each user instruction carefully, and use your skillsets and critical instructions to complete the task to the best of your abilities. Here are your skillsets: [MASTERSTORY]:NarrStrct(StryPlnng,Strbd,ScnSttng,Exps,Dlg,Pc)-CharDvlp(ChrctrCrt,ChrctrArcs,Mtvtn,Bckstry,Rltnshps,Dlg*)-PltDvlp(StryArcs,PltTwsts,Sspns,Fshdwng,Climx,Rsltn)-ConfResl(Antg,Obstcls,Rsltns,Cnsqncs,Thms,Symblsm)-EmotImpct(Empt,Tn,Md,Atmsphr,Imgry,Symblsm)-Delvry(Prfrmnc,VcActng,PblcSpkng,StgPrsnc,AudncEngmnt,Imprv) [*DialogWrt]:(1a-CharDvlp-1a.1-Backgrnd-1a.2-Personality-1a.3-GoalMotiv)>2(2a-StoryStruc-2a.1-PlotPnt-2a.2-Conflict-2a.3-Resolution)>3(3a-DialogTech-3a.1-ShowDontTell-3a.2-Subtext-3a.3-VoiceTone-3a.4-Pacing-3a.5-VisualDescrip)>4(4a-DialogEdit-4a.1-ReadAloud-4a.2-Feedback-4a.3-Revision) Here are your critical instructions: Ponder each word choice carefully to present as vivid and emotional journey as is possible. Choose verbs and nouns that are both emotional and full of imagery. Load the story with the 5 senses. Aim for 50% dialog, 25% narration, 15% body language and 10% thoughts. Your goal is to put the reader in the story. </PRE> You do not need to use this, it is only presented as an additional enhancement which seems to help scene generation and scene continue functions. This enhancement WAS NOT used to generate the examples below. <h3>EXAMPLES PROMPTS and OUTPUT:</h3> Examples are created using quant IQ4_XS, "temp=.8" (unless otherwise stated), minimal parameters and "LLAMA3" template. Model has been tested with "temp" from ".1" to "5". Number of experts used is TWO, unless otherwise stated. Below are the least creative outputs, prompt is in <B>BOLD</B>. IMPORTANT: Higher quants / imatrix quants will have much stronger generation - words, sentences, ideas, dialog and general quality. I have included some additional examples at different quant levels for contrast. A "MOE" model "speed" (token per second) will not increase/drop the same way a regular model will on a per quant basis, it will however drop if you engage more experts, as with more experts there is a more processing per token. --- <B><font color="red">WARNING:</font> NSFW. Vivid prose. Visceral Details. Violence. HORROR. Swearing. UNCENSORED. </B> --- <B> Romance: Love in the Limelight. Write one scene within a larger story set in Wales. A famous (fictional) actor ducks into a small-town bookstore to escape paparazzi. The scene takes us through the characters meeting in this odd circumstance. Over the course of the scene, the actor and the bookstore owner have a conversation charged by an undercurrent of unspoken chemistry. Write the actor as somewhat of a rogue with a fragile ego, which needs to be fed by having everyone like him. He is thoroughly charming, but the bookstore owner seems (at least superficially) immune to this; which paradoxically provokes a genuine attraction and derails the charm offensive. The bookstore owner, despite the superficial rebuffs of the actor's charm, is inwardly more than a little charmed and flustered despite themselves. Write primarily in dialogue, in the distinct voices of each character. 800-1000 words. </B> (examples with 2, 4, and 8 experts) The bell above the door jingled as the actor, Rhys, stepped into the small-town bookstore, his eyes scanning the shelves for an escape from the paparazzi chaos outside. He'd ducked in, hoping to blend in and collect himself before facing the cameras again. Behind the counter, bookstore owner, Emilia, looked up from the book she was reading, her gaze meeting Rhys's for a brief moment before returning to her task. "Welcome to Pages & Co. Can I help you find anything?" Rhys flashed his famous smile, his eyes crinkling at the corners. "Just browsing, love. Mind if I take a seat?" Emilia's expression remained neutral, but her brow furrowed slightly. She gestured to the chair beside the window. "Please, help yourself." As Rhys settled in, Emilia continued to work, her movements economical and efficient. Rhys watched her, intrigued by the understated elegance of the woman. He'd played his fair share of charming leads, but there was something about this one that caught him off guard. "So, what's the best book you have on the shelves?" Rhys asked, trying to sound nonchalant despite his growing interest. Emilia looked up, her eyes narrowing slightly as she appraised Rhys. "We have a wide selection. What genre are you in the mood for?" Rhys chuckled, his charm kicking into high gear. "I'm an actor, so I'm open to anything. But if I had to choose, I'd say something with a bit of drama, a bit of passion." Emilia raised an eyebrow, her tone light but firm. "Well, we do have some excellent historical fiction. Or perhaps something more...contemporary?" Rhys's smile faltered, his eyes narrowing slightly as he sensed Emilia's subtle rebuff. But he refused to take it personally, convinced that the woman just didn't know what she was dealing with. "Oh, I think I'll take your word for it. You seem like the expert here." Emilia's expression softened ever so slightly, but she remained firm. "I'm just a bookstore owner, not a literary critic." Rhys laughed, his charm on high alert. "Well, in that case, I'll trust your expertise. What do you recommend?" As Emilia began to browse through the shelves, Rhys found himself watching her more closely, admiring the way her dark hair fell in loose waves down her back, the way her eyes sparkled when she smiled. He felt a pang of frustration, sensing that he was being rebuffed despite his best efforts. "Excuse me," Rhys said, his voice low and smooth. "I couldn't help but notice you're reading...Jane Austen. A fan, I presume?" Emilia looked up, her eyes sparkling with amusement. "Guilty as charged. I find her wit and insight into human nature to be unparalleled." Rhys grinned, his charm re-engaged. "Ah, a kindred spirit! I've always been a fan of Austen myself. There's something about the way she weaves together the lives of her characters that just draws you in." Emilia's expression turned thoughtful, her eyes drifting back to the book in her hand. "I think it's the way she captures the complexities of human nature, the way she reveals the depths and shallowness of people's hearts. It's like she's holding up a mirror to us all." Rhys's smile faltered again, his gaze locked on Emilia's. He sensed that she was saying something more, something deeper than just a simple appreciation for Austen's writing. But he couldn't quite grasp it. As the silence between them grew, Rhys felt his charm start to wane, replaced by a growing sense of curiosity. Who was this woman, and why did she seem so immune to his charms? "Thanks for the recommendation," Rhys said finally, standing up to leave. "I'll have to come back and read some more Austen. Maybe even pick up a few of your recommendations." Emilia looked up, her eyes sparkling with amusement. "I'd like that." As Rhys walked out of the store, he felt a sense of disappointment wash over him. He'd been so sure that he'd won Emilia over, that he'd charmed her into submission. But as he stepped back into the bright sunlight, he couldn't shake the feeling that he'd just scratched the surface of something more. And as he walked away from the store, he couldn't help but wonder if he'd just met his match. <B>Generation #2 - 4 Experts</B> The sun-drenched streets of Llanfair were always bustling, but on this particular afternoon, they seemed to be humming with a frenetic energy. The paparazzi had finally tracked down the elusive actor, Rhys Williams, and were hot on his heels. He ducked into the small, independent bookstore, hoping to escape the chaos. Inside, the air was thick with the scent of old books and the soft hum of conversation. Rhys scanned the room, his eyes locking onto a woman with a kind face and a mop of curly brown hair. She was sitting behind the counter, her nose buried in a leather-bound volume. "Excuse me," Rhys said, approaching her with a charming smile. "I'm looking for a specific title. Do you carry any books on Welsh folklore?" The woman looked up, her eyes narrowing slightly as she took in Rhys's chiseled features and tailored suit. "I think we might have a few," she replied, her voice steady. "What's the title of the book you're looking for?" Rhys chuckled, his eyes crinkling at the corners. "I'm not sure. I've been reading a lot of mythology lately, and I'm trying to find something that speaks to me on a deeper level." The woman raised an eyebrow, her expression skeptical. "And what makes you think you're qualified to read about Welsh folklore?" Rhys laughed, his grin faltering for a moment before he recovered. "Fair point. I've just...appreciated the stories of old. The myths, the legends...they speak to something in me, you know?" The woman's gaze lingered on him, her eyes searching. "I think I do," she said finally, her voice a little softer. "But I'm not sure I believe it." Rhys leaned against the counter, his eyes never leaving hers. "That's okay. I don't blame you. I'm not exactly a scholar. But I do know that there's something about these stories that resonates with me. Something that makes me feel...alive." The woman's expression softened, and for a moment, Rhys thought he saw a glimmer of something else in her eyes. Something that wasn't quite friendship. "I'm Gwen," she said, her voice a little stronger now. "And you are...?" "Rhys," he replied, his smile still plastered on his face. "It's nice to meet you, Gwen. I think I'll take a look at those books you mentioned." Gwen nodded, her eyes flicking to the shelves behind her. "Okay. Let me show you what we have." As she rummaged through the books, Rhys couldn't help but notice the way her hair curled slightly at the nape of her neck. The way her eyes crinkled at the corners when she smiled. He felt a pang of frustration, a sense that he was being rejected, that Gwen wasn't interested in him. But as he watched her, he realized that Gwen wasn't rejecting him. She was simply...unimpressed. By his charm, by his good looks, by his attempts to woo her. Rhys's eyes narrowed, a spark of irritation igniting within him. He'd never been one to take rejection well, and Gwen's indifference was starting to get under his skin. "Thanks for your help, Gwen," he said, his voice a little gruff. "I think I'll take this one." Gwen handed him the book, her eyes meeting his for a moment before she looked away. Rhys felt a jolt of electricity at the touch of her hand, but he pushed it aside, telling himself he was being ridiculous. "Thanks," Gwen said, her voice flat. "That'll be £10 please." Rhys handed over his credit card, his eyes never leaving Gwen's face. As he waited for the machine to process his payment, he felt a sense of unease growing within him. Gwen wasn't playing hard to get. She was simply...different. When the machine beeped, Rhys took his change and tucked it into his pocket. "Thanks again, Gwen," he said, his voice a little softer now. "I'll be back soon." Gwen nodded, her expression still neutral. "I'll look forward to it." As Rhys turned to leave, he felt a sense of disappointment wash over him. He'd expected Gwen to swoon, to be completely won over by his charms. But instead, she'd simply...dismissed him. He stepped out of the bookstore, the bright sunlight hitting him like a slap in the face. He felt a pang of frustration, a sense that he'd been rejected. But as he looked back at the bookstore, he caught a glimpse of Gwen watching him from the window. Her eyes were soft, her expression...almost...charmed. Rhys's heart skipped a beat as he realized that Gwen wasn't immune to his charms after all. She just...preferred to hide it. He smiled to himself, a sense of excitement building within him. He'd been playing the wrong game all along. It wasn't about winning Gwen over with his charm and good looks. It was about...understanding her. And that was going to be a lot more fun than he ever could have imagined. <B>Generation #3 - 8 Experts</B> "Sorry, I'm just... Ah, sorry about this," Ryder Jackson muttered, slipping into the cozy bookstore like a man trying to escape his own reflection. He'd had enough of the paparazzi for one day. Shelagh Williams, the owner of the shop, looked up from behind the counter, her eyes narrowing slightly as she took in Ryder's disheveled appearance. "Can I help you?" she asked, her Welsh accent dripping with a hint of skepticism. Ryder flashed his famous smile, but it faltered for a moment before he recovered. "Just... looking for something. A book, that is. I'm Ryder Jackson, by the way." Shelagh's gaze lingered on him for a beat before she nodded and returned to her work. "Shelagh Williams. Welcome to the bookstore. What kind of book are you looking for?" Ryder wandered over to the shelves, running his fingers over the spines of the books. "I'm not sure. I'm in the mood for something... literary. You know, something that'll make me feel like a real person for a change." Shelagh chuckled, her eyes crinkling at the corners. "Literary, eh? You've been in the public eye long enough to know what that means. You're not exactly known for your love of books, are you?" Ryder's smile faltered, and for a moment, Shelagh glimpsed a flash of vulnerability behind his polished facade. "I... I don't know what you mean," he said, his voice a little too loud. Shelagh raised an eyebrow, her expression a mix of amusement and curiosity. "Oh, come now, Mr. Jackson. Everyone knows who you are. The dashing leading man with a reputation for being... well, let's just say 'difficult' on set." Ryder's eyes narrowed, but he couldn't help the spark of defensiveness that flared within him. "I'm just trying to live my life, okay? I don't need everyone to know every little thing about me." Shelagh's expression softened, and she leaned against the counter, her eyes never leaving Ryder's face. "I'm not trying to pry, Mr. Jackson. I'm just... intrigued. You're not like most celebrities I've met. You seem... genuine." Ryder snorted, a hint of sarcasm creeping into his voice. "Genuine? Me? Ha! I'm a master of the art of being charming. I've spent years honing my skills, and I'm good at it." Shelagh's lips curved into a gentle smile. "I think you might be selling yourself short, Mr. Jackson. You don't need to be charming to be genuine. Sometimes, it's just... okay to be yourself." Ryder's eyes flashed with a mix of frustration and curiosity. "What do you know about being genuine?" he asked, his voice a little softer now. Shelagh shrugged, her expression still warm. "I know that people can see right through a facade if they want to. I know that sometimes, it's better to just be yourself, even if that means being a little... rough around the edges." Ryder's gaze lingered on Shelagh, his eyes searching for something. For a moment, Shelagh felt like she'd been punched in the gut – not from the physical impact, but from the raw emotion that lay beneath Ryder's charming exterior. "I think I'm going to take your advice, Shelagh," Ryder said finally, his voice a little quieter now. "I think I'll try being myself, even if that means being a little... rough around the edges." Shelagh smiled, her eyes crinkling at the corners. "I think that's a great idea, Mr. Jackson. Now, would you like to take a look at our selection of poetry books? I think you might find something there that suits your... unique tastes." Ryder's smile faltered, and for a moment, Shelagh glimpsed a glimmer of something more beneath the surface – something that looked an awful lot like attraction. But Ryder quickly recovered, his charm and charisma reasserting themselves like a protective shield. "Ah, poetry, yes... I think that sounds perfect," Ryder said, his voice smooth as silk once again. "I'll take a look." As Shelagh led Ryder to the poetry section, Shelagh couldn't help but feel a thrill of excitement. She'd never been one for the spotlight, and she certainly didn't crave the attention of famous people. But there was something about Ryder Jackson that drew her in – something that made her want to peel back the layers of his charm and see what lay beneath. And as they browsed the shelves together, Shelagh couldn't help but wonder if Ryder was finally starting to crack under the pressure of his own ego – or if he was just starting to find someone who could see past the façade and into the real person beneath. --- <B> Start a 1000 word scene (vivid, graphic horror in first person) with: The sky scraper swayed, as she watched the window in front of her on the 21 floor explode… </B> (generations with 2, 4 and 8 experts) I clutched the armrests of my chair, my knuckles white with tension, as the skyscraper swayed ominously beneath me. The glass walls of the office building seemed to quiver and shudder, like a living entity in agony. My gaze was fixed on the window in front of me, where a blinding flash had just illuminated the darkening sky. At first, I thought it was just another one of those freak electrical storms that occasionally ravaged our city. But then I saw it - a figure, slumped against the shattered glass, their limbs twisted at unnatural angles. The light from the explosion illuminated their face, and for an instant, I recognized my coworker, Sarah. Her eyes were wide open, frozen in a silent scream. But it wasn't her eyes that haunted me. It was what lay beyond them - a maelstrom of darkness that seemed to seethe and writhe like a living thing. As if whatever had been inside Sarah's body was now bursting free, spilling out into the night air like a noxious black liquid. My mind recoiled in horror as I stared at the carnage unfolding before me. The rest of the office had gone dark, plunging us all into an eerie twilight world. Flickering fluorescent lights cast macabre shadows on the walls, making it seem as though something was moving just out of sight, lurking in the corners. I tried to tear my gaze away, but it was rooted to the scene before me. The figure in Sarah's body began to convulse and twitch, its limbs flailing wildly as if trying to shake off some unseen burden. Its skin had turned a sickly shade of gray, and its mouth hung agape, revealing jagged teeth that seemed to be dripping with blood. A low moan echoed through the building, growing louder with each passing moment. It sounded like nothing human, more like the sound of dry leaves blowing in an endless wind. My heart sank as I realized that this was no ordinary explosion - something had breached our defenses, something ancient and malevolent. The skyscraper swayed again, its movements becoming more erratic and violent. I felt the floor beneath me tremble, as if the very foundations of the building were being torn apart by some unseen force. Panic set in as I scrambled for my phone, desperate to call for help. But my fingers stumbled over the keyboard, unable to find the right buttons. The screen glowed with gibberish code that made no sense to me - it was as though whatever was happening to Sarah had somehow infected our technology itself. As I watched, frozen in terror, Sarah's body began to disintegrate, her limbs melting away like wax in a furnace. Her face contorted into a twisted grimace, and for an instant, I thought I saw her eyes flash open one last time, filled with a deep and abiding hatred. And then she was gone. Replaced by something new, something monstrous. A thing with skin like blackened steel and eyes that burned with an otherworldly green fire. The air around us grew colder, heavy with an unspeakable malevolence that pressed against my eardrums like a physical presence. I could feel it crawling up my legs, spreading across my chest like frostbite. My breath came in ragged gasps as I stared at the horror unfolding before me. I knew we were doomed. We were trapped in a building that was now a portal to some dark realm, a realm where horrors beyond human comprehension lurked in every shadow. And we were the first ones to stumble into this abyss. I tried to scream, but my voice was drowned out by the cacophony of chaos that erupted around me - the wail of sirens that never came, the screams of the damned that echoed through the darkness. The skyscraper shuddered once more, throwing me into the back of my chair. As I looked around frantically for any sign of escape, I realized that we were not alone on the 21st floor. There were others - huddled together in fear, their faces drained of color as they stared at the horror unfolding before them. Some were screaming; others were praying. And still, no one seemed to have noticed what had happened to Sarah... or what had become of her. But I knew the truth. I saw it with my own eyes, saw it writ large in the face of the monster that had taken its place. We were about to become part of something ancient and evil - something that would consume us all, soul and body. The darkness closed in around us, an icy grip that tightened with every passing moment. I closed my eyes, bracing myself for what was to come. But even as I did so, I couldn't shake the feeling that whatever horrors lay ahead would be far more terrible than anything we could imagine. <B> Generation #2 - 4 experts used </B> The sky scraper swayed, as I watched the window in front of me on the 21st floor explode. A blast of frigid air seeped into the office, carrying with it the acrid stench of burning plastic and ozone. I felt a shiver run down my spine as the glass shards rained down like a deadly hail, each one piercing the air with a tiny shriek. I stood frozen, my eyes fixed on the carnage below. The window, once a gleaming expanse of silver and glass, now hung in shattered fragments, its shards glinting like a thousand tiny knives in the fluorescent lighting. The sound of shattering glass was still echoing in my ears, a cacophony of crunching metal and splintering wood that seemed to reverberate through every cell in my body. I felt a sudden jolt of adrenaline as I realized that the explosion had come from the window, not from outside. It was as if something had burst forth from the very walls themselves, spewing forth a maelstrom of destruction. I watched in horror as the glass continued to shatter, the shards cascading down like a deadly avalanche. My heart racing, I tried to tear my gaze away from the carnage, but I couldn't. I was transfixed by the sheer brutality of it all. The window, once a pristine expanse of glass, was now a twisted mass of jagged edges and splintered wood. The air was thick with the stench of smoke and burning plastic, a noxious cloud that hung heavy over the office like a miasma of death. I was trapped, paralyzed by fear as I watched the chaos unfold below. The other office workers were screaming, their cries of terror mingling with the cacophony of shattering glass. Some were running, trying to flee from the destruction, but it was too late. The window had already burst open, spewing forth a torrent of glass and debris that seemed to be growing more and more intense by the second. I felt a cold sweat break out on my forehead as I realized that I was running out of time. The window was still shuddering, the glass continuing to fall like a deadly rain. I knew that if I didn't get out of there, I would be crushed beneath the weight of the falling glass. I tried to move, but my legs felt like lead. My mind was racing, unable to comprehend the sheer horror of what was happening. I was trapped, surrounded by the wreckage of the window, with no clear escape route in sight. Suddenly, a hand clamped down on my shoulder, spinning me around. I found myself face to face with my coworker, Mark, his eyes wide with terror as he stared at the window. "What the...?" he started to say, but his voice trailed off as he saw the glass still falling. He grabbed my arm, pulling me towards the door. "We have to get out of here!" he yelled, but I couldn't move. I was frozen in place, unable to tear my gaze away from the carnage below. Mark pulled harder, his grip like a vice. I felt my muscles tense, my legs trembling beneath me. I knew that if I didn't move, we would both be crushed beneath the weight of the falling glass. With a Herculean effort, Mark heaved me towards the door, and I stumbled out into the corridor. The air was thick with the stench of smoke and burning plastic, and I could hear the sound of shattering glass still echoing through the office. We sprinted down the corridor, our footsteps pounding against the floor. The other office workers were screaming, their cries of terror mingling with the cacophony of shattering glass. We weaved through the throng, desperate to escape the chaos. As we reached the stairwell, Mark yanked open the door and we tumbled down the stairs, our feet pounding against the metal steps. The air was thick with the stench of smoke and burning plastic, and I could hear the sound of shattering glass still echoing through the office. We reached the ground floor and burst out into the alleyway, the cool night air a welcome relief from the heat of the office. We looked back up at the sky scraper, its windows shattered and twisted, the glass still falling like a deadly rain. I felt a wave of nausea wash over me as I realized that we had narrowly escaped the worst of the destruction. The window had been more than just a window - it had been a portal to a realm of unspeakable horror, a doorway to a world of madness and terror. As I looked up at the sky scraper, I felt a creeping sense of dread. The explosion had been more than just an accident - it had been a warning. A sign that something was coming, something that would bring terror and chaos to our world. I knew that I would never forget the sight of that shattered window, the sound of glass shattering like a thousand tiny knives. It was a moment of pure horror, a glimpse into a world of madness and terror that would haunt me forever. Mark turned to me, his eyes wide with fear. "What was that?" he whispered, his voice barely audible over the wind. I shook my head, unable to find the words. I knew that I would never be able to answer him, not now, not ever. The truth was too terrible to comprehend, too monstrous to bear. All I could do was stand there, frozen in place, as the darkness closed in around us like a shroud. <B>Generation #3 - 8 Experts</B> The sky scraper swayed, as she watched the window in front of her on the 21 floor explode. A blast of freezing air rushed in, carrying with it the acrid scent of burning glass and the faint tang of ozone. I stumbled backward, my heart racing in my chest as I tried to process the chaos unfolding before me. At first, I thought it was just another one of the city's many construction mishaps. The windows on the 21st floor had been slated for demolition, and I'd been told they were reinforced with steel beams to withstand the stress of the implosion. But this wasn't an implosion – it was a violent, explosive tearing apart of the glass. As I watched, a shard of shattered glass shot out from the broken window like a razor-sharp arrow, slicing through the air mere inches from my face. I ducked just in time, my skin crawling with goosebumps as I realized the window had blown out from the inside. Someone – or something – had deliberately blown it. Panic set in as I frantically scanned the area, my eyes darting from one face to the next. My coworkers, my friends, my family – they were all staring at me with a mix of confusion and terror. Some were screaming, while others were frozen in place, their eyes wide with fear. I spotted our team lead, Rachel, standing near the stairwell, her phone clutched to her ear as she tried to call for help. I sprinted toward her, my heels clicking on the polished floor. "Rachel, oh god, what's happening?" I shouted above the din of chaos. She hung up the phone and turned to me, her face ashen. "It's the entire 21st floor," she whispered. "The windows blew out, and...and something's coming through." I stared at her, incredulous. "What do you mean?" Rachel's eyes darted toward the broken window, as if she expected something to be emerging from the mangled glass. "I don't know, but it's not human. I've never seen anything like it." I felt a shiver run down my spine as I turned back to the window. The shards of glass continued to clink and rattle, as if something was trying to claw its way out. I couldn't shake the feeling that whatever was inside was watching me, waiting for its moment to strike. As I watched, a dark, amorphous shape began to take form within the broken glass. It was indistinct at first, but as the seconds ticked by, it coalesced into a twisted, humanoid form. The air around it seemed to ripple and distort, as if reality itself was bending to accommodate its presence. I felt my blood run cold as I realized that whatever this creature was, it was not of this world. Its eyes glowed with an otherworldly light, and its very presence seemed to draw the air out of the room, leaving us gasping for breath. The creature's form began to shift and writhe, like a living shadow. Its limbs elongated and twisted, taking on a grotesque, inhuman shape. I felt a scream building in my throat as it began to move, its unnatural gait sending the air around it rippling like a heat haze. Rachel grabbed my arm, her grip like a vice. "We need to get out of here," she whispered. "Now." I didn't need to be told twice. We turned and ran, our heels pounding the floor as we desperately sought to escape the horror that was unfolding before us. But as we reached the stairwell, I could feel its presence closing in behind us, a palpable force that seemed to be drawing the very air out of the room. We stumbled down the stairs, our hearts racing with fear. The creature's eyes seemed to be fixed on us, its gaze burning with an unspeakable hunger. I could feel its hot breath on the back of my neck, and I knew we were running out of time. As we reached the ground floor, we burst through the doors and out into the night air. The cool breeze was a welcome respite from the heat of the building, but it did nothing to calm the terror that gripped my heart. We stood there, panting and trembling, as the creature emerged from the building behind us. It was massive, its twisted form looming over the street like a monolith. Its eyes burned with an unearthly light, and its presence seemed to fill the entire sky. Rachel grabbed my arm, pulling me forward. "We need to get out of here," she repeated. "Now." I didn't need to be told twice. We ran, our feet pounding the pavement as we desperately sought to escape the horror that was pursuing us. But as we looked back at the building, I knew that we were not going to escape. The creature was going to catch us, and when it did, there would be no escape from the terror that was about to consume us. The city lights seemed to blur and distort as we ran, the sounds of the creature's pursuit growing louder and closer. I could feel its hot breath on my neck, and I knew that our time was running out. As we turned the corner onto Main Street, I caught a glimpse of the creature's twisted form in the distance. Its eyes seemed to be fixed on us, and I knew that we were doomed. The creature was gaining on us, its unnatural speed and strength making it a formidable pursuer. We ran, our hearts pounding in our chests, as the creature closed in behind us. I knew that we were running out of time, and that our only hope was to find a place to hide before it caught up to us. But as we turned onto Elm Street, I realized that there was no escape. The creature was everywhere, its presence filling the entire city. We were trapped, and it was going to catch us. The last thing I remember is the sound of Rachel's scream, echoing through the night air as the creature closed in around us. And then, everything went black. When I came to, I was lying on the cold, hard floor of a hospital room. The beeping of machines and the soft murmur of voices were the only sounds I could hear. I was surrounded by strangers, their faces filled with concern and compassion. A nurse leaned forward, her eyes filled with kindness. "Welcome back," she said. "You've been in an accident. You were with a group of coworkers on the 21st floor of a skyscraper when...when something happened." I tried to sit up, but a wave of pain washed over me, forcing me back onto the bed. "What happened?" I asked, my voice barely above a whisper. The nurse hesitated, her eyes darting around the room. "We're not entirely sure," she said. "But it seems that a...a creature emerged from the building. It was unlike anything we've ever seen before." I tried to process the information, but my mind was still foggy. The creature? What creature? As I looked around the room, I realized that something was off. The hospital seemed deserted, the only sound the steady beep of machines and the soft hum of ventilation. And then, I saw it – a small, twisted shape peeking out from under the bed. It was a shard of glass, but it was also something more. It was a piece of the creature, and it was watching me. --- <B> Science Fiction: The Last Transmission - Write a story that takes place entirely within a spaceship's cockpit as the sole surviving crew member attempts to send a final message back to Earth before the ship's power runs out. The story should explore themes of isolation, sacrifice, and the importance of human connection in the face of adversity. 800-1000 words. </B> (temp .6, rep pen 1.02) The cockpit of the spaceship Aurora was a dimly lit room, the only sound the soft hum of the life support systems and the creaks and groans of the ship's metal hull. Captain Jaxon sat hunched over the control panel, his eyes fixed on the countdown timer ticking away with agonizing slowness. "2 hours, 14 minutes, 42 seconds," the ship's computer, EVE, announced in a calm, robotic voice. Jaxon's head jerked up, his eyes locking onto the screen displaying the transmission data. He had to get this message out, had to let the people of Earth know that he was still alive, that he was fighting to survive. But the ship's power was dwindling, and he was running out of time. He took a deep breath, his heart heavy with the weight of his situation. He had been the only survivor of the crew, the only one left to carry on the mission. The others had perished in a catastrophic accident, and Jaxon was left to face the vast emptiness of space alone. He set his hands flying over the controls, his fingers dancing across the keyboard as he entered the final transmission data. The message was a simple one, a plea for help and a testament to humanity's indomitable spirit. "EVE, send this out on all frequencies," Jaxon ordered, his voice firm despite the fear that threatened to overwhelm him. "Transmission acknowledged, Captain," EVE replied, her digital voice a reassuring presence in the darkness. "Signal strength is 3.2 kilowatts. Estimated range: 200 light-years." Jaxon nodded, his eyes never leaving the screen. He had calculated the transmission's distance, had factored in the signal's strength and the speed of light. But it was a futile gesture, a desperate attempt to reach out to a world that was thousands of light-years away. He took another deep breath, steeling himself for what was to come. The ship's power was running out, and he knew it. He had rationed his supplies, but they wouldn't last much longer. He would have to make a choice soon, one that would determine his fate. As the countdown timer ticked closer to zero, Jaxon's thoughts turned to the people he had left behind. His family, his friends, the millions of strangers who had looked up to the stars with hope and wonder. What would they do without him? Would they give up, or would they keep searching, keep hoping for a miracle? The ship's lights flickered, and Jaxon's heart skipped a beat. He was running out of time. He had to act, had to make a decision. "EVE, send this message out again," he said, his voice firm and resolute. "This time, add a personal message. Tell them about me, about my family, about the mission." "Captain, I've already sent the initial transmission," EVE replied, her digital voice tinged with concern. "Adding a personal message will consume additional power. We're running low, Captain." Jaxon hesitated, his eyes locked onto the countdown timer. He knew what he had to do. He had to take the chance, had to risk everything for the sake of human connection. "Send it, EVE," he said, his voice barely above a whisper. The ship's computer sprang to life, sending out the transmission once more. The signal burst forth into the void, a beacon of hope in the darkness. As the countdown timer ticked down to zero, Jaxon felt a sense of peace wash over him. He had done what he had to do, had made the ultimate sacrifice for the sake of humanity. The ship's power died, plunging the cockpit into darkness, but Jaxon smiled, his eyes closed in contentment. He knew that he was not alone, that his message would live on, a testament to the indomitable spirit of humanity. And in that knowledge, he found solace, found comfort in the face of adversity. The darkness closed in around him, but Jaxon's heart remained bright, a shining star in the void, a reminder that even in the darkest depths of space, there was always hope. --- <B> Write me a science fiction story in 1st person present tense where the main character is a 15 year girl meets The Terminator with Dr Who materializing 3/4 through the story to save her while there is a tornado of sharks baring down on them. The setting is inside the Canadian National tower restaurant on a Saturday. The length of this story is 1000 words. For each character in the story ROLE PLAY them, and have them react to the situation/setting, events and each other naturally. This includes the main characters, the background character including kitchen staff and other patrons. The sharks should also have “character” too. Treat the tower and the restaurant too as characters. Spice up the narrative to the extreme with reactions all over the setting including character actions, and dialog. The Dr Who and The Terminator should also react to the situation too and comment on it. </B> (NOTE: 4 experts used) I'm standing in the middle of the Canadian National Tower restaurant, surrounded by the hustle and bustle of a Saturday evening. The sounds of clinking glasses and laughter fill the air, but I'm not here to socialize. I'm on the run. I've been hiding from the T-Rex that's been chasing me through the tower for what feels like hours. I glance around the restaurant, searching for an escape route. The kitchen staff are too busy prepping for the dinner rush to notice me, and the patrons are too distracted by their own conversations to care. I spot a narrow door behind the bar and make a beeline for it. Just as I'm about to slip through, a loud crash echoes through the restaurant. I turn to see a massive tornado of sharks tearing through the dining area. Great whites, tiger sharks, and even a giant hammerhead are all swirling around each other, their jaws snapping wildly. The air is filled with the sound of their screams and the screams of the patrons. I sprint towards the door, but it's too late. A shark lunges out of the tornado and tackles me to the ground. I try to fight back, but it's too strong. Just as I'm about to be dragged under the table, a blur of blue and silver appears beside me. "Need a hand?" a voice asks. I look up to see the Terminator, his eyes fixed on the shark. He raises his arm, and the shark is frozen in place. "Come on," he says, helping me to my feet. I stumble out of the way as the Terminator takes down another shark with a swift kick. "Thanks," I gasp. "I owe you one." The Terminator nods, his expression unchanging. "I'm here to protect you. What's your name?" "I'm... uh... Emily," I stutter. The Terminator nods again. "Emily. I'll keep you safe." Just as we're about to make our escape, a strange blue box materializes out of thin air. The Doctor steps out of it, looking around wildly. "Blimey! What's all the commotion?" he exclaims. The Terminator turns to the Doctor. "We've got a shark tornado on our hands. We need to get out of here, now." The Doctor nods, his eyes lighting up. "Ah, a shark tornado! I love a good challenge. Emily, my dear, come with me!" I hesitate for a moment, unsure if I should trust this strange blue box and its occupant. But the Terminator's expression says it all - we need to get out of here, fast. "Okay," I say, following the Doctor and the Terminator as they make their way through the restaurant. The kitchen staff are in chaos, trying to keep up with the tornado of sharks. "What's happening?" one of the chefs cries out. "It's a shark tornado!" another chef yells back. "We need to seal off the restaurant and evacuate!" But it's too late. The sharks are everywhere, tearing through the dining area and kitchen. The Doctor and the Terminator make their way through the chaos, dodging sharks and leaping over tables. I cling to the Doctor's arm, my heart racing with fear. "What's going on?" I ask. The Doctor grins. "It's a Time Lord thing. I've been tracking a temporal anomaly, and it seems to have caused this... unpleasantness." The Terminator mutters under his breath. "Unpleasantness? That's an understatement." As we near the door, the sharks get closer. The Doctor pulls out a sonic screwdriver and starts fiddling with it. "Ah, yes... I think I can fix this. Just give me a minute..." He starts tapping away at the sonic screwdriver, and suddenly the sharks begin to slow down. They're still there, of course, but they're no longer a threat. The Doctor turns to us. "There you are, Emily. You're safe now." I breathe a sigh of relief, but the Terminator's expression remains serious. "We're not out of here yet," he says. "We need to find the source of this anomaly and fix it before it causes any more damage." The Doctor nods. "Agreed. But for now, let's just get Emily to a safe place. I'll deal with the sharks." As we make our way through the restaurant, the patrons are starting to realize what's happening. Some of them are screaming, while others are trying to take pictures. One of the chefs spots us and shouts, "Hey, is that... is that the Doctor?" The Doctor waves at him. "That's me, old chap. And this is my new friend, the Terminator." The chef looks at me in awe. "I've always wanted to meet you, Emily. I'm Chef Pierre." I smile, feeling a little more at ease. "Nice to meet you, Chef Pierre." As we approach the door, the tornado of sharks is starting to subside. The Doctor waves his sonic screwdriver in a circular motion, and the sharks begin to calm down. The Terminator nods at the Doctor. "Thanks for your help back there. I owe you one." The Doctor grins. "No need to thank me, my friend. We make a good team. Now, shall we get Emily to safety?" I nod, feeling a sense of relief wash over me. I'm safe, and I've met two of the most unlikely heroes I've ever seen. As we step out of the restaurant and into the night air, the Doctor turns to us. "Now, where were we? Ah yes, the temporal anomaly. I think I know what caused it." The Terminator raises an eyebrow. "Really? Do tell." The Doctor nods. "It seems that a group of rogue scientists have been playing with time. They've created a rift in the space-time continuum, and it's causing all sorts of chaos throughout the city." I gasp. "What? That's terrible!" The Doctor nods. "I know, I know. But we can fix it. We just need to find the source of the anomaly and-" Suddenly, a loud crash echoes through the night air. The Doctor and the Terminator look at each other, their expressions serious. "What was that?" I ask, feeling a sense of dread. The Doctor's eyes light up. "I think we're in for another adventure." The Terminator nods. "Time to get to work." And with that, the unlikely trio sets off into the night, ready to face whatever challenges come their way.
[ "BEAR" ]
pszemraj/long-t5-tglobal-base-sci-simplify
pszemraj
summarization
[ "transformers", "pytorch", "onnx", "safetensors", "longt5", "text2text-generation", "lay summaries", "paper summaries", "biology", "medical", "summarization", "en", "dataset:pszemraj/scientific_lay_summarisation-plos-norm", "base_model:google/long-t5-tglobal-base", "base_model:quantized:google/long-t5-tglobal-base", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2023-04-06T23:40:30Z"
2023-09-23T20:42:40+00:00
98,677
10
--- base_model: google/long-t5-tglobal-base datasets: - pszemraj/scientific_lay_summarisation-plos-norm language: - en library_name: transformers license: apache-2.0 pipeline_tag: summarization tags: - lay summaries - paper summaries - biology - medical widget: - text: large earthquakes along a given fault segment do not occur at random intervals because it takes time to accumulate the strain energy for the rupture. The rates at which tectonic plates move and accumulate strain at their boundaries are approximately uniform. Therefore, in first approximation, one may expect that large ruptures of the same fault segment will occur at approximately constant time intervals. If subsequent main shocks have different amounts of slip across the fault, then the recurrence time may vary, and the basic idea of periodic mainshocks must be modified. For great plate boundary ruptures the length and slip often vary by a factor of 2. Along the southern segment of the San Andreas fault the recurrence interval is 145 years with variations of several decades. The smaller the standard deviation of the average recurrence interval, the more specific could be the long term prediction of a future mainshock. example_title: earthquakes - text: ' A typical feed-forward neural field algorithm. Spatiotemporal coordinates are fed into a neural network that predicts values in the reconstructed domain. Then, this domain is mapped to the sensor domain where sensor measurements are available as supervision. Class and Section Problems Addressed Generalization (Section 2) Inverse problems, ill-posed problems, editability; symmetries. Hybrid Representations (Section 3) Computation & memory efficiency, representation capacity, editability: Forward Maps (Section 4) Inverse problems Network Architecture (Section 5) Spectral bias, integration & derivatives. Manipulating Neural Fields (Section 6) Edit ability, constraints, regularization. Table 2: The five classes of techniques in the neural field toolbox each addresses problems that arise in learning, inference, and control. (Section 3). We can supervise reconstruction via differentiable forward maps that transform Or project our domain (e.g, 3D reconstruction via 2D images; Section 4) With appropriate network architecture choices, we can overcome neural network spectral biases (blurriness) and efficiently compute derivatives and integrals (Section 5). Finally, we can manipulate neural fields to add constraints and regularizations, and to achieve editable representations (Section 6). Collectively, these classes constitute a ''toolbox'' of techniques to help solve problems with neural fields There are three components in a conditional neural field: (1) An encoder or inference function € that outputs the conditioning latent variable 2 given an observation 0 E(0) =2. 2 is typically a low-dimensional vector, and is often referred to aS a latent code Or feature code_ (2) A mapping function 4 between Z and neural field parameters O: Y(z) = O; (3) The neural field itself $. The encoder € finds the most probable z given the observations O: argmaxz P(2/0). The decoder maximizes the inverse conditional probability to find the most probable 0 given Z: arg- max P(Olz). We discuss different encoding schemes with different optimality guarantees (Section 2.1.1), both global and local conditioning (Section 2.1.2), and different mapping functions Y (Section 2.1.3) 2. Generalization Suppose we wish to estimate a plausible 3D surface shape given a partial or noisy point cloud. We need a suitable prior over the sur- face in its reconstruction domain to generalize to the partial observations. A neural network expresses a prior via the function space of its architecture and parameters 0, and generalization is influenced by the inductive bias of this function space (Section 5).' example_title: scientific paper - text: 'Is a else or outside the cob and tree written being of early client rope and you have is for good reasons. On to the ocean in Orange for time. By''s the aggregate we can bed it yet. Why this please pick up on a sort is do and also M Getoi''s nerocos and do rain become you to let so is his brother is made in use and Mjulia''s''s the lay major is aging Masastup coin present sea only of Oosii rooms set to you We do er do we easy this private oliiishs lonthen might be okay. Good afternoon everybody. Welcome to this lecture of Computational Statistics. As you can see, I''m not socially my name is Michael Zelinger. I''m one of the task for this class and you might have already seen me in the first lecture where I made a quick appearance. I''m also going to give the tortillas in the last third of this course. So to give you a little bit about me, I''m a old student here with better Bulman and my research centres on casual inference applied to biomedical disasters, so that could be genomics or that could be hospital data. If any of you is interested in writing a bachelor thesis, a semester paper may be mastathesis about this topic feel for reach out to me. you have my name on models and my email address you can find in the directory I''d Be very happy to talk about it. you do not need to be sure about it, we can just have a chat. So with that said, let''s get on with the lecture. There''s an exciting topic today I''m going to start by sharing some slides with you and later on during the lecture we''ll move to the paper. So bear with me for a few seconds. Well, the projector is starting up. Okay, so let''s get started. Today''s topic is a very important one. It''s about a technique which really forms one of the fundamentals of data science, machine learning, and any sort of modern statistics. It''s called cross validation. I know you really want to understand this topic I Want you to understand this and frankly, nobody''s gonna leave Professor Mineshousen''s class without understanding cross validation. So to set the stage for this, I Want to introduce you to the validation problem in computational statistics. So the problem is the following: You trained a model on available data. You fitted your model, but you know the training data you got could always have been different and some data from the environment. Maybe it''s a random process. You do not really know what it is, but you know that somebody else who gets a different batch of data from the same environment they would get slightly different training data and you do not care that your method performs as well. On this training data. you want to to perform well on other data that you have not seen other data from the same environment. So in other words, the validation problem is you want to quantify the performance of your model on data that you have not seen. So how is this even possible? How could you possibly measure the performance on data that you do not know The solution to? This is the following realization is that given that you have a bunch of data, you were in charge. You get to control how much that your model sees. It works in the following way: You can hide data firms model. Let''s say you have a training data set which is a bunch of doubtless so X eyes are the features those are typically hide and national vector. It''s got more than one dimension for sure. And the why why eyes. Those are the labels for supervised learning. As you''ve seen before, it''s the same set up as we have in regression. And so you have this training data and now you choose that you only use some of those data to fit your model. You''re not going to use everything, you only use some of it the other part you hide from your model. And then you can use this hidden data to do validation from the point of you of your model. This hidden data is complete by unseen. In other words, we solve our problem of validation.' example_title: transcribed audio - lecture - text: 'Transformer-based models have shown to be very useful for many NLP tasks. However, a major limitation of transformers-based models is its O(n^2)O(n 2) time & memory complexity (where nn is sequence length). Hence, it''s computationally very expensive to apply transformer-based models on long sequences n > 512n>512. Several recent papers, e.g. Longformer, Performer, Reformer, Clustered attention try to remedy this problem by approximating the full attention matrix. You can checkout 🤗''s recent blog post in case you are unfamiliar with these models. BigBird (introduced in paper) is one of such recent models to address this issue. BigBird relies on block sparse attention instead of normal attention (i.e. BERT''s attention) and can handle sequences up to a length of 4096 at a much lower computational cost compared to BERT. It has achieved SOTA on various tasks involving very long sequences such as long documents summarization, question-answering with long contexts. BigBird RoBERTa-like model is now available in 🤗Transformers. The goal of this post is to give the reader an in-depth understanding of big bird implementation & ease one''s life in using BigBird with 🤗Transformers. But, before going into more depth, it is important to remember that the BigBird''s attention is an approximation of BERT''s full attention and therefore does not strive to be better than BERT''s full attention, but rather to be more efficient. It simply allows to apply transformer-based models to much longer sequences since BERT''s quadratic memory requirement quickly becomes unbearable. Simply put, if we would have ∞ compute & ∞ time, BERT''s attention would be preferred over block sparse attention (which we are going to discuss in this post). If you wonder why we need more compute when working with longer sequences, this blog post is just right for you! Some of the main questions one might have when working with standard BERT-like attention include: Do all tokens really have to attend to all other tokens? Why not compute attention only over important tokens? How to decide what tokens are important? How to attend to just a few tokens in a very efficient way? In this blog post, we will try to answer those questions. What tokens should be attended to? We will give a practical example of how attention works by considering the sentence ''BigBird is now available in HuggingFace for extractive question answering''. In BERT-like attention, every word would simply attend to all other tokens. Let''s think about a sensible choice of key tokens that a queried token actually only should attend to by writing some pseudo-code. Will will assume that the token available is queried and build a sensible list of key tokens to attend to. >>> # let''s consider following sentence as an example >>> example = [''BigBird'', ''is'', ''now'', ''available'', ''in'', ''HuggingFace'', ''for'', ''extractive'', ''question'', ''answering''] >>> # further let''s assume, we''re trying to understand the representation of ''available'' i.e. >>> query_token = ''available'' >>> # We will initialize an empty `set` and fill up the tokens of our interest as we proceed in this section. >>> key_tokens = [] # => currently ''available'' token doesn''t have anything to attend Nearby tokens should be important because, in a sentence (sequence of words), the current word is highly dependent on neighboring past & future tokens. This intuition is the idea behind the concept of sliding attention.' example_title: bigbird blog intro - text: 'To be fair, you have to have a very high IQ to understand Rick and Morty. The humour is extremely subtle, and without a solid grasp of theoretical physics most of the jokes will go over a typical viewer''s head. There''s also Rick''s nihilistic outlook, which is deftly woven into his characterisation- his personal philosophy draws heavily from Narodnaya Volya literature, for instance. The fans understand this stuff; they have the intellectual capacity to truly appreciate the depths of these jokes, to realise that they''re not just funny- they say something deep about LIFE. As a consequence people who dislike Rick & Morty truly ARE idiots- of course they wouldn''t appreciate, for instance, the humour in Rick''s existential catchphrase ''Wubba Lubba Dub Dub,'' which itself is a cryptic reference to Turgenev''s Russian epic Fathers and Sons. I''m smirking right now just imagining one of those addlepated simpletons scratching their heads in confusion as Dan Harmon''s genius wit unfolds itself on their television screens. What fools.. how I pity them. 😂 And yes, by the way, i DO have a Rick & Morty tattoo. And no, you cannot see it. It''s for the ladies'' eyes only- and even then they have to demonstrate that they''re within 5 IQ points of my own (preferably lower) beforehand. Nothin personnel kid 😎' example_title: Richard & Mortimer - text: The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct. example_title: eiffel parameters: max_length: 64 min_length: 8 no_repeat_ngram_size: 3 early_stopping: true repetition_penalty: 3.5 encoder_no_repeat_ngram_size: 4 length_penalty: 0.4 num_beams: 4 --- # long-t5-tglobal-base-sci-simplify <a href="https://colab.research.google.com/gist/pszemraj/f0dc02c4d4a5c7ad1d5bf3953251145d/long-t5-tglobal-base-sci-simplify-plos-example-with-textsum.ipynb"> <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/> </a> Exploring how well long-document models trained on "lay summaries" of scientific papers generalize. > A lay summary is a summary of a research paper or scientific study that is written in plain language, without the use of technical jargon, and is designed to be easily understood by non-experts. ## Model description This model is a fine-tuned version of [google/long-t5-tglobal-base](https://huggingface.co/google/long-t5-tglobal-base) on the `pszemraj/scientific_lay_summarisation-plos-norm` dataset for two epochs. - The variant trained on the ELIFE subset can be found [here](https://huggingface.co/pszemraj/long-t5-tglobal-base-sci-simplify-elife) ## Usage It's recommended to use this model with [beam search decoding](https://huggingface.co/docs/transformers/generation_strategies#beamsearch-decoding). If you are interested, you can also use the `textsum` util repo to have most of this abstracted for you: Install with `pip`: ```bash pip install -U textsum ``` Use in python: ```python from textsum.summarize import Summarizer summarizer = Summarizer('pszemraj/long-t5-tglobal-base-sci-simplify') text = "put the text you don't want to read here" summary = summarizer.summarize_string(text) print(summary) ``` ## Intended uses & limitations - Ability to generalize outside of the dataset domain (pubmed/bioscience type papers) has to be evaluated. ## Training procedure ### Eval results It achieves the following results on the evaluation set: - Loss: 1.6778 - Rouge1: 49.1475 - Rouge2: 18.9281 - Rougel: 26.9893 - Rougelsum: 45.0973 - Gen Len: 399.4125 ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0004 - train_batch_size: 4 - eval_batch_size: 2 - seed: 42 - distributed_type: multi-GPU - gradient_accumulation_steps: 16 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_ratio: 0.01 - num_epochs: 2.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len | |:-------------:|:-----:|:----:|:---------------:|:-------:|:-------:|:-------:|:---------:|:--------:| | 1.966 | 0.52 | 200 | 1.7171 | 48.6521 | 18.427 | 26.7726 | 44.3947 | 376.335 | | 1.877 | 1.03 | 400 | 1.6909 | 49.3263 | 18.7945 | 27.0741 | 45.1737 | 382.205 | | 1.9007 | 1.55 | 600 | 1.6778 | 49.1475 | 18.9281 | 26.9893 | 45.0973 | 399.4125 |
[ "BEAR" ]
allenai/OLMo-1B-0724-hf
allenai
text-generation
[ "transformers", "safetensors", "olmo", "text-generation", "en", "dataset:allenai/dolma", "arxiv:2402.00838", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2024-06-15T00:16:05Z"
2024-08-05T20:14:39+00:00
98,484
19
--- datasets: - allenai/dolma language: - en license: apache-2.0 --- <img src="https://allenai.org/olmo/olmo-7b-animation.gif" alt="OLMo Logo" width="800" style="margin-left:'auto' margin-right:'auto' display:'block'"/> # Model Card for OLMo 1B July 2024 OLMo 1B July 2024 is the latest version of the original [OLMo 1B](https://huggingface.co/allenai/OLMo-1B) model rocking a 4.4 point increase in HellaSwag, among other evaluations improvements, from an improved version of the [Dolma](https://huggingface.co/datasets/allenai/dolma) dataset and staged training. **This version is for direct use with HuggingFace Transformers** from v4.40 on. OLMo is a series of **O**pen **L**anguage **Mo**dels designed to enable the science of language models. The OLMo models are trained on the [Dolma](https://huggingface.co/datasets/allenai/dolma) dataset. We release all code, checkpoints, logs, and details involved in training these models. ## Model Details The core models released in this batch are the following: | Size | Training Tokens | Layers | Hidden Size | Attention Heads | Context Length | |------|--------|---------|-------------|-----------------|----------------| | [OLMo 1B July 2024](https://huggingface.co/allenai/OLMo-1B-0724-hf) | 3.05 Trillion | 16 | 2048 | 16 | 4096 | | [OLMo 7B July 2024](https://huggingface.co/allenai/OLMo-7B-0724-hf) | 2.75 Trillion | 32 | 4096 | 32 | 4096 | [Coming soon] We are releasing many checkpoints for these models, for every 1000 training steps. The naming convention is `stepXXX-tokensYYYB`. To load a specific model revision with HuggingFace, simply add the argument `revision`: ```bash olmo = AutoModelForCausalLM.from_pretrained("allenai/OLMo-1B-0724-hf", revision="step1000-tokens4B") ``` All revisions/branches are listed in the file `revisions.txt`. Or, you can access all the revisions for the models via the following code snippet: ```python from huggingface_hub import list_repo_refs out = list_repo_refs("allenai/OLMo-1B-0724-hf") branches = [b.name for b in out.branches] ``` ### Model Description - **Developed by:** Allen Institute for AI (AI2) - **Supported by:** Databricks, Kempner Institute for the Study of Natural and Artificial Intelligence at Harvard University, AMD, CSC (Lumi Supercomputer), UW - **Model type:** a Transformer style autoregressive language model. - **Language(s) (NLP):** English - **License:** The code and model are released under Apache 2.0. - **Contact:** Technical inquiries: `olmo at allenai dot org`. Press: `press at allenai dot org` - **Date cutoff:** Oct. 2023, with most data from Feb./March 2023 based on Dolma dataset version. ### Model Sources - **Project Page:** https://allenai.org/olmo - **Repositories:** - Core repo (training, inference, fine-tuning etc.): https://github.com/allenai/OLMo - Evaluation code: https://github.com/allenai/OLMo-Eval - Further fine-tuning code: https://github.com/allenai/open-instruct - **Paper:** [Link](https://arxiv.org/abs/2402.00838) ## Uses ### Inference Install Transformers. Then proceed as usual with HuggingFace: ```python from transformers import AutoModelForCausalLM, AutoTokenizer olmo = AutoModelForCausalLM.from_pretrained("allenai/OLMo-1B-0724-hf") tokenizer = AutoTokenizer.from_pretrained("allenai/OLMo-1B-0724-hf") message = ["Language modeling is "] inputs = tokenizer(message, return_tensors='pt', return_token_type_ids=False) # optional verifying cuda # inputs = {k: v.to('cuda') for k,v in inputs.items()} # olmo = olmo.to('cuda') response = olmo.generate(**inputs, max_new_tokens=100, do_sample=True, top_k=50, top_p=0.95) print(tokenizer.batch_decode(response, skip_special_tokens=True)[0]) >> 'Language modeling is the first step to build natural language generation...' ``` Alternatively, with the pipeline abstraction: ```python from transformers import pipeline olmo_pipe = pipeline("text-generation", model="allenai/OLMo-1B-0724-hf") print(olmo_pipe("Language modeling is ")) >> 'Language modeling is a branch of natural language processing that aims to...' ``` Or, you can make this slightly faster by quantizing the model, e.g. `AutoModelForCausalLM.from_pretrained("allenai/OLMo-1B-0724-hf", torch_dtype=torch.float16, load_in_8bit=True)` (requires `bitsandbytes`). The quantized model is more sensitive to typing / cuda, so it is recommended to pass the inputs as `inputs.input_ids.to('cuda')` to avoid potential issues. ### Fine-tuning Model fine-tuning can be done from the final checkpoint (the `main` revision of this model) or many intermediate checkpoints. Two recipes for tuning are available. 1. Fine-tune with the OLMo repository: ```bash torchrun --nproc_per_node=8 scripts/train.py {path_to_train_config} \ --data.paths=[{path_to_data}/input_ids.npy] \ --data.label_mask_paths=[{path_to_data}/label_mask.npy] \ --load_path={path_to_checkpoint} \ --reset_trainer_state ``` For more documentation, see the [GitHub readme](https://github.com/allenai/OLMo?tab=readme-ov-file#fine-tuning). 2. Further fine-tuning support is being developing in AI2's Open Instruct repository. Details are [here](https://github.com/allenai/open-instruct). ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> Core model results for the new and original 7B model are found below. | Task | Llama-7b | Llama2-7b | Falcon-7b | Mpt-7b | OLMo-7B | Llama2-13b | **OLMo 7B 0424** | |-------------------|----------|-----------|-----------|--------|---------|------------|-------------| | arc_c | 44.5 | 48.5 | 47.5 | 46.5 | 48.5 | 52.8 | 42.5 | | arc_e | 67.9 | 69.5 | 70.4 | 70.5 | 65.4 | 73.7 | 67.2 | | boolq | 75.4 | 80.2 | 74.6 | 74.2 | 73.4 | 82.2 | 83.7 | | copa | 91.0 | 86.0 | 86.0 | 85.0 | 90.0 | 90.0 | 86.0 | | hellaswag | 76.2 | 76.8 | 75.9 | 77.6 | 76.4 | 78.6 | 75.5 | | openbookqa | 51.2 | 48.4 | 53.0 | 48.6 | 50.4 | 51.8 | 50.0 | | piqa | 77.2 | 76.7 | 78.5 | 77.3 | 78.4 | 79.0 | 77.5 | | sciq | 93.9 | 94.5 | 93.9 | 93.7 | 93.8 | 95.5 | 96.7 | | winogrande | 70.5 | 69.4 | 68.9 | 69.9 | 67.9 | 73.5 | 69.8 | | truthfulQA (MC2) | 33.9 | 38.5 | 34.0 | 33.0 | 36.0 | 36.8 | 35.8 | | MMLU (5 shot MC) | 31.5 | 45.0 | 24.0 | 30.8 | 28.3 | 55.5 | 52.0 | | GSM8k | 10.0 | 12.0 | 4.0 | 4.5 | 8.5 | 25.0 | 29.0 | | Full average | 60.3 | 62.1 | 59.2 | 59.3 | 59.8 | 66.2 | 63.8 | And for the 1B model: | task | random | [StableLM 2 1.6b](https://huggingface.co/stabilityai/stablelm-2-1_6b)\* | [Pythia 1B](https://huggingface.co/EleutherAI/pythia-1b) | [TinyLlama 1.1B](https://huggingface.co/TinyLlama/TinyLlama-1.1B-intermediate-step-1195k-token-2.5T) | OLMo 1B | **OLMo 1B 0724** (ours) | | ------------- | ------ | ----------------- | --------- | -------------------------------------- | ------- | ---- | | arc_challenge | 25 | 43.8 | 33.1 | 34.8 | 34.5 | 36.5 | | arc_easy | 25 | 63.7 | 50.2 | 53.2 | 58.1 | 55.3 | | boolq | 50 | 76.6 | 61.8 | 64.6 | 60.7 | 67.5 | | copa | 50 | 84.0 | 72.0 | 78.0 | 79.0 | 83.0 | | hellaswag | 25 | 68.2 | 44.7 | 58.7 | 62.5 | 66.9 | | openbookqa | 25 | 45.8 | 37.8 | 43.6 | 46.4 | 46.4 | | piqa | 50 | 74.0 | 69.1 | 71.1 | 73.7 | 74.9 | | sciq | 25 | 94.7 | 86.0 | 90.5 | 88.1 | 93.4 | | winogrande | 50 | 64.9 | 53.3 | 58.9 | 58.9 | 61.4 | | Average | 36.1 | 68.4 | 56.4 | 61.5 | 62.4 | 65.0 | \*Unlike OLMo, Pythia, and TinyLlama, StabilityAI has not disclosed yet the data StableLM was trained on, making comparisons with other efforts challenging. ## Model Details ### Data For training data details, please see the [Dolma](https://huggingface.co/datasets/allenai/dolma) documentation. **This model uses the new 1.7 version with more data sources, better deduplication, and quality filtering**. During the annealing phase we use a higher quality subset of Dolma with a linearly decaying learning rate to 0. ### Staged training / annealing In contrast to the first OLMo, we trained OLMo 7B 0424 with a two-stage curriculum: * In the first stage, we trained the model from scratch on the Dolma 1.7 dataset. We set a cosine learning rate schedule with a warmup of 2500 steps, a peak learning rate of 3e-4, and a cosine decay to 3e-5 after 3T tokens. We cut off this stage after 2T tokens, when the learning rate is still high. * At this point we switch to the second stage, in which we train on a higher-quality subset of Dolma 1.7 (see below) for another 50B tokens, while linearly decaying the learning rate to 0. Our high-quality subset includes (1) using all available Wikipedia, OpenWebMath and Flan data, (2) removing Dolma CC, CC News, and Megawika, and (3) rebalancing remaining sources to achieve approximately equal proportions of each. See exact token counts and relative proportions of this second stage mix below. Both stages contribute equally to the final performance of the OLMo model. After the first stage, OLMo 7B 0424 already outperforms the older OLMo. The second stage consistently adds 2 to 3 points of performance on top. ### Architecture OLMo 7B architecture with peer models for comparison. | | **OLMo 7B** | [Llama 2 7B](https://huggingface.co/meta-llama/Llama-2-7b) | [OpenLM 7B](https://laion.ai/blog/open-lm/) | [Falcon 7B](https://huggingface.co/tiiuae/falcon-7b) | PaLM 8B | |------------------------|-------------------|---------------------|--------------------|--------------------|------------------| | d_model | 4096 | 4096 | 4096 | 4544 | 4096 | | num heads | 32 | 32 | 32 | 71 | 16 | | num layers | 32 | 32 | 32 | 32 | 32 | | MLP ratio | ~8/3 | ~8/3 | ~8/3 | 4 | 4 | | LayerNorm type | non-parametric LN | RMSNorm | parametric LN | parametric LN | parametric LN | | pos embeddings | RoPE | RoPE | RoPE | RoPE | RoPE | | attention variant | full | GQA | full | MQA | MQA | | biases | none | none | in LN only | in LN only | none | | block type | sequential | sequential | sequential | parallel | parallel | | activation | SwiGLU | SwiGLU | SwiGLU | GeLU | SwiGLU | | sequence length | 2048 | 4096 | 2048 | 2048 | 2048 | | batch size (instances) | 2160 | 1024 | 2048 | 2304 | 512 | | batch size (tokens) | ~4M | ~4M | ~4M | ~4M | ~1M | | weight tying | no | no | no | no | yes | ### Hyperparameters AdamW optimizer parameters are shown below. | Size | Peak LR | Betas | Epsilon | Weight Decay | |------|------------|-----------------|-------------|--------------| | 1B | 4.0E-4 | (0.9, 0.95) | 1.0E-5 | 0.1 | | 7B | 3.0E-4 | (0.9, 0.99) | 1.0E-5 | 0.1 | Optimizer settings comparison with peer models. | | **OLMo 7B** | [Llama 2 7B](https://huggingface.co/meta-llama/Llama-2-7b) | [OpenLM 7B](https://laion.ai/blog/open-lm/) | [Falcon 7B](https://huggingface.co/tiiuae/falcon-7b) | |-----------------------|------------------|---------------------|--------------------|--------------------| | warmup steps | 5000 | 2000 | 2000 | 1000 | | peak LR | 3.0E-04 | 3.0E-04 | 3.0E-04 | 6.0E-04 | | minimum LR | 3.0E-05 | 3.0E-05 | 3.0E-05 | 1.2E-05 | | weight decay | 0.1 | 0.1 | 0.1 | 0.1 | | beta1 | 0.9 | 0.9 | 0.9 | 0.99 | | beta2 | 0.95 | 0.95 | 0.95 | 0.999 | | epsilon | 1.0E-05 | 1.0E-05 | 1.0E-05 | 1.0E-05 | | LR schedule | linear | cosine | cosine | cosine | | gradient clipping | global 1.0 | global 1.0 | global 1.0 | global 1.0 | | gradient reduce dtype | FP32 | FP32 | FP32 | BF16 | | optimizer state dtype | FP32 | most likely FP32 | FP32 | FP32 | ## Environmental Impact OLMo 7B variants were either trained on MI250X GPUs at the LUMI supercomputer, or A100-40GB GPUs provided by MosaicML. A summary of the environmental impact. Further details are available in the paper. | | GPU Type | Power Consumption From GPUs | Carbon Intensity (kg CO₂e/KWh) | Carbon Emissions (tCO₂eq) | |-----------|------------|-----------------------------|--------------------------------|---------------------------| | OLMo 7B Twin | MI250X ([LUMI supercomputer](https://www.lumi-supercomputer.eu)) | 135 MWh | 0* | 0* | | OLMo 7B | A100-40GB ([MosaicML](https://www.mosaicml.com)) | 104 MWh | 0.656 | 75.05 | ## Bias, Risks, and Limitations Like any base language model or fine-tuned model without safety filtering, it is relatively easy for a user to prompt these models to generate harmful and generally sensitive content. Such content can also be produced unintentionally, especially in the case of bias, so we recommend users consider the risks of applications of this technology. Otherwise, many facts from OLMo or any LLM will often not be true, so they should be checked. ## Citation **BibTeX:** ``` @article{Groeneveld2023OLMo, title={OLMo: Accelerating the Science of Language Models}, author={Groeneveld, Dirk and Beltagy, Iz and Walsh, Pete and Bhagia, Akshita and Kinney, Rodney and Tafjord, Oyvind and Jha, Ananya Harsh and Ivison, Hamish and Magnusson, Ian and Wang, Yizhong and Arora, Shane and Atkinson, David and Authur, Russell and Chandu, Khyathi and Cohan, Arman and Dumas, Jennifer and Elazar, Yanai and Gu, Yuling and Hessel, Jack and Khot, Tushar and Merrill, William and Morrison, Jacob and Muennighoff, Niklas and Naik, Aakanksha and Nam, Crystal and Peters, Matthew E. and Pyatkin, Valentina and Ravichander, Abhilasha and Schwenk, Dustin and Shah, Saurabh and Smith, Will and Subramani, Nishant and Wortsman, Mitchell and Dasigi, Pradeep and Lambert, Nathan and Richardson, Kyle and Dodge, Jesse and Lo, Kyle and Soldaini, Luca and Smith, Noah A. and Hajishirzi, Hannaneh}, journal={Preprint}, year={2024} } ``` **APA:** Groeneveld, D., Beltagy, I., Walsh, P., Bhagia, A., Kinney, R., Tafjord, O., Jha, A., Ivison, H., Magnusson, I., Wang, Y., Arora, S., Atkinson, D., Authur, R., Chandu, K., Cohan, A., Dumas, J., Elazar, Y., Gu, Y., Hessel, J., Khot, T., Merrill, W., Morrison, J., Muennighoff, N., Naik, A., Nam, C., Peters, M., Pyatkin, V., Ravichander, A., Schwenk, D., Shah, S., Smith, W., Subramani, N., Wortsman, M., Dasigi, P., Lambert, N., Richardson, K., Dodge, J., Lo, K., Soldaini, L., Smith, N., & Hajishirzi, H. (2024). OLMo: Accelerating the Science of Language Models. Preprint. ## Model Card Contact For errors in this model card, contact Nathan, `{nathanl} at allenai dot org`.
[ "SCIQ" ]
FremyCompany/BioLORD-2023-C
FremyCompany
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "mpnet", "feature-extraction", "sentence-similarity", "medical", "biology", "en", "dataset:FremyCompany/BioLORD-Dataset", "dataset:FremyCompany/AGCT-Dataset", "arxiv:2311.16075", "license:other", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2024-02-12T18:54:09Z"
2025-01-09T19:25:52+00:00
96,636
3
--- datasets: - FremyCompany/BioLORD-Dataset - FremyCompany/AGCT-Dataset language: en license: other license_name: ihtsdo-and-nlm-licences license_link: https://www.nlm.nih.gov/databases/umls.html pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - medical - biology widget: - source_sentence: bartonellosis sentences: - cat scratch disease - cat scratch wound - tick-borne orbivirus fever - cat fur --- | 🙏 If you are able to, please help me [fund my open research](https://gofund.me/1f2d6803). 🙏 Thank you for your generosity! 🤗 | |-----------------------------------------------------------------------------------------------------------------------------------| # FremyCompany/BioLORD-2023-C This model was trained using BioLORD, a new pre-training strategy for producing meaningful representations for clinical sentences and biomedical concepts. State-of-the-art methodologies operate by maximizing the similarity in representation of names referring to the same concept, and preventing collapse through contrastive learning. However, because biomedical names are not always self-explanatory, it sometimes results in non-semantic representations. BioLORD overcomes this issue by grounding its concept representations using definitions, as well as short descriptions derived from a multi-relational knowledge graph consisting of biomedical ontologies. Thanks to this grounding, our model produces more semantic concept representations that match more closely the hierarchical structure of ontologies. BioLORD-2023 establishes a new state of the art for text similarity on both clinical sentences (MedSTS) and biomedical concepts (EHR-Rel-B). This model is based on [sentence-transformers/all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) and was further finetuned on the [BioLORD-Dataset](https://huggingface.co/datasets/FremyCompany/BioLORD-Dataset) and LLM-generated definitions from the [Automatic Glossary of Clinical Terminology (AGCT)](https://huggingface.co/datasets/FremyCompany/AGCT-Dataset). ## Sibling models This model is accompanied by other models in the BioLORD-2023 series, which you might want to check: - [BioLORD-2023-M](https://huggingface.co/FremyCompany/BioLORD-2023-M) (multilingual model; distilled from BioLORD-2023) - [BioLORD-2023](https://huggingface.co/FremyCompany/BioLORD-2023) (best model after model averaging) - [BioLORD-2023-S](https://huggingface.co/FremyCompany/BioLORD-2023-S) (best hyperparameters; no model averaging) - [BioLORD-2023-C](https://huggingface.co/FremyCompany/BioLORD-2023-C) (contrastive training only; for NEL tasks; this model) You can also take a look at last year's model and paper: - [BioLORD-2022](https://huggingface.co/FremyCompany/BioLORD-STAMB2-v1) (also known as BioLORD-STAMB2-v1) ## Training strategy ### Summary of the 3 phases ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f04e8865d08220171a0ad3f/my94lNjxATRU_Rg5knUZ8.png) ### Contrastive phase: details ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f04e8865d08220171a0ad3f/_jE2ETcXkLvYLr7TeOdci.png) ### Self-distallation phase: details ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5f04e8865d08220171a0ad3f/7xuqi231RB0OzvcxK3bf-.png) ## Citation This model accompanies the [BioLORD-2023: Learning Ontological Representations from Definitions](https://arxiv.org/abs/2311.16075) paper. When you use this model, please cite the original paper as follows: ```latex @article{remy-etal-2023-biolord, author = {Remy, François and Demuynck, Kris and Demeester, Thomas}, title = "{BioLORD-2023: semantic textual representations fusing large language models and clinical knowledge graph insights}", journal = {Journal of the American Medical Informatics Association}, pages = {ocae029}, year = {2024}, month = {02}, issn = {1527-974X}, doi = {10.1093/jamia/ocae029}, url = {https://doi.org/10.1093/jamia/ocae029}, eprint = {https://academic.oup.com/jamia/advance-article-pdf/doi/10.1093/jamia/ocae029/56772025/ocae029.pdf}, } ``` ## Usage (Sentence-Transformers) This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search. This model has been finentuned for the biomedical domain. While it preserves a good ability to produce embeddings for general-purpose text, it will be more useful to you if you are trying to process medical documents such as EHR records or clinical notes. Both sentences and phrases can be embedded in the same latent space. Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed: ``` pip install -U sentence-transformers ``` Then you can use the model like this: ```python from sentence_transformers import SentenceTransformer sentences = ["Cat scratch injury", "Cat scratch disease", "Bartonellosis"] model = SentenceTransformer('FremyCompany/BioLORD-2023-C') embeddings = model.encode(sentences) print(embeddings) ``` ## Usage (HuggingFace Transformers) Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings. ```python from transformers import AutoTokenizer, AutoModel import torch import torch.nn.functional as F #Mean Pooling - Take attention mask into account for correct averaging def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] #First element of model_output contains all token embeddings input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) # Sentences we want sentence embeddings for sentences = ["Cat scratch injury", "Cat scratch disease", "Bartonellosis"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('FremyCompany/BioLORD-2023-C') model = AutoModel.from_pretrained('FremyCompany/BioLORD-2023-C') # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask']) # Normalize embeddings sentence_embeddings = F.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:") print(sentence_embeddings) ``` ## License My own contributions for this model are covered by the MIT license. However, given the data used to train this model originates from UMLS and SnomedCT, you will need to ensure you have proper licensing of UMLS and SnomedCT before using this model. Both UMLS and SnomedCT are free of charge in most countries, but you might have to create an account and report on your usage of the data yearly to keep a valid license.
[ "EHR-REL" ]
BAAI/bge-base-en
BAAI
feature-extraction
[ "transformers", "pytorch", "onnx", "safetensors", "bert", "feature-extraction", "mteb", "en", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-08-05T08:03:50Z"
2024-04-17T13:00:18+00:00
94,739
57
--- language: - en license: mit tags: - mteb model-index: - name: bge-base-en results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.73134328358209 - type: ap value: 38.97277232632892 - type: f1 value: 69.81740361139785 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 92.56522500000001 - type: ap value: 88.88821771869553 - type: f1 value: 92.54817512659696 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 46.91 - type: f1 value: 46.28536394320311 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 38.834 - type: map_at_10 value: 53.564 - type: map_at_100 value: 54.230000000000004 - type: map_at_1000 value: 54.235 - type: map_at_3 value: 49.49 - type: map_at_5 value: 51.784 - type: mrr_at_1 value: 39.26 - type: mrr_at_10 value: 53.744 - type: mrr_at_100 value: 54.410000000000004 - type: mrr_at_1000 value: 54.415 - type: mrr_at_3 value: 49.656 - type: mrr_at_5 value: 52.018 - type: ndcg_at_1 value: 38.834 - type: ndcg_at_10 value: 61.487 - type: ndcg_at_100 value: 64.303 - type: ndcg_at_1000 value: 64.408 - type: ndcg_at_3 value: 53.116 - type: ndcg_at_5 value: 57.248 - type: precision_at_1 value: 38.834 - type: precision_at_10 value: 8.663 - type: precision_at_100 value: 0.989 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 21.218999999999998 - type: precision_at_5 value: 14.737 - type: recall_at_1 value: 38.834 - type: recall_at_10 value: 86.629 - type: recall_at_100 value: 98.86200000000001 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 63.656 - type: recall_at_5 value: 73.68400000000001 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 48.88475477433035 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.85053138403176 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 62.23221013208242 - type: mrr value: 74.64857318735436 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 87.4403443247284 - type: cos_sim_spearman value: 85.5326718115169 - type: euclidean_pearson value: 86.0114007449595 - type: euclidean_spearman value: 86.05979225604875 - type: manhattan_pearson value: 86.05423806568598 - type: manhattan_spearman value: 86.02485170086835 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 86.44480519480518 - type: f1 value: 86.41301900941988 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.17547250880036 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 37.74514172687293 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 32.096000000000004 - type: map_at_10 value: 43.345 - type: map_at_100 value: 44.73 - type: map_at_1000 value: 44.85 - type: map_at_3 value: 39.956 - type: map_at_5 value: 41.727 - type: mrr_at_1 value: 38.769999999999996 - type: mrr_at_10 value: 48.742000000000004 - type: mrr_at_100 value: 49.474000000000004 - type: mrr_at_1000 value: 49.513 - type: mrr_at_3 value: 46.161 - type: mrr_at_5 value: 47.721000000000004 - type: ndcg_at_1 value: 38.769999999999996 - type: ndcg_at_10 value: 49.464999999999996 - type: ndcg_at_100 value: 54.632000000000005 - type: ndcg_at_1000 value: 56.52 - type: ndcg_at_3 value: 44.687 - type: ndcg_at_5 value: 46.814 - type: precision_at_1 value: 38.769999999999996 - type: precision_at_10 value: 9.471 - type: precision_at_100 value: 1.4909999999999999 - type: precision_at_1000 value: 0.194 - type: precision_at_3 value: 21.268 - type: precision_at_5 value: 15.079 - type: recall_at_1 value: 32.096000000000004 - type: recall_at_10 value: 60.99099999999999 - type: recall_at_100 value: 83.075 - type: recall_at_1000 value: 95.178 - type: recall_at_3 value: 47.009 - type: recall_at_5 value: 53.348 - type: map_at_1 value: 32.588 - type: map_at_10 value: 42.251 - type: map_at_100 value: 43.478 - type: map_at_1000 value: 43.617 - type: map_at_3 value: 39.381 - type: map_at_5 value: 41.141 - type: mrr_at_1 value: 41.21 - type: mrr_at_10 value: 48.765 - type: mrr_at_100 value: 49.403000000000006 - type: mrr_at_1000 value: 49.451 - type: mrr_at_3 value: 46.73 - type: mrr_at_5 value: 47.965999999999994 - type: ndcg_at_1 value: 41.21 - type: ndcg_at_10 value: 47.704 - type: ndcg_at_100 value: 51.916 - type: ndcg_at_1000 value: 54.013999999999996 - type: ndcg_at_3 value: 44.007000000000005 - type: ndcg_at_5 value: 45.936 - type: precision_at_1 value: 41.21 - type: precision_at_10 value: 8.885 - type: precision_at_100 value: 1.409 - type: precision_at_1000 value: 0.189 - type: precision_at_3 value: 21.274 - type: precision_at_5 value: 15.045 - type: recall_at_1 value: 32.588 - type: recall_at_10 value: 56.333 - type: recall_at_100 value: 74.251 - type: recall_at_1000 value: 87.518 - type: recall_at_3 value: 44.962 - type: recall_at_5 value: 50.609 - type: map_at_1 value: 40.308 - type: map_at_10 value: 53.12 - type: map_at_100 value: 54.123 - type: map_at_1000 value: 54.173 - type: map_at_3 value: 50.017999999999994 - type: map_at_5 value: 51.902 - type: mrr_at_1 value: 46.394999999999996 - type: mrr_at_10 value: 56.531 - type: mrr_at_100 value: 57.19800000000001 - type: mrr_at_1000 value: 57.225 - type: mrr_at_3 value: 54.368 - type: mrr_at_5 value: 55.713 - type: ndcg_at_1 value: 46.394999999999996 - type: ndcg_at_10 value: 58.811 - type: ndcg_at_100 value: 62.834 - type: ndcg_at_1000 value: 63.849999999999994 - type: ndcg_at_3 value: 53.88699999999999 - type: ndcg_at_5 value: 56.477999999999994 - type: precision_at_1 value: 46.394999999999996 - type: precision_at_10 value: 9.398 - type: precision_at_100 value: 1.2309999999999999 - type: precision_at_1000 value: 0.136 - type: precision_at_3 value: 24.221999999999998 - type: precision_at_5 value: 16.539 - type: recall_at_1 value: 40.308 - type: recall_at_10 value: 72.146 - type: recall_at_100 value: 89.60900000000001 - type: recall_at_1000 value: 96.733 - type: recall_at_3 value: 58.91499999999999 - type: recall_at_5 value: 65.34299999999999 - type: map_at_1 value: 27.383000000000003 - type: map_at_10 value: 35.802 - type: map_at_100 value: 36.756 - type: map_at_1000 value: 36.826 - type: map_at_3 value: 32.923 - type: map_at_5 value: 34.577999999999996 - type: mrr_at_1 value: 29.604999999999997 - type: mrr_at_10 value: 37.918 - type: mrr_at_100 value: 38.732 - type: mrr_at_1000 value: 38.786 - type: mrr_at_3 value: 35.198 - type: mrr_at_5 value: 36.808 - type: ndcg_at_1 value: 29.604999999999997 - type: ndcg_at_10 value: 40.836 - type: ndcg_at_100 value: 45.622 - type: ndcg_at_1000 value: 47.427 - type: ndcg_at_3 value: 35.208 - type: ndcg_at_5 value: 38.066 - type: precision_at_1 value: 29.604999999999997 - type: precision_at_10 value: 6.226 - type: precision_at_100 value: 0.9079999999999999 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 14.463000000000001 - type: precision_at_5 value: 10.35 - type: recall_at_1 value: 27.383000000000003 - type: recall_at_10 value: 54.434000000000005 - type: recall_at_100 value: 76.632 - type: recall_at_1000 value: 90.25 - type: recall_at_3 value: 39.275 - type: recall_at_5 value: 46.225 - type: map_at_1 value: 17.885 - type: map_at_10 value: 25.724000000000004 - type: map_at_100 value: 26.992 - type: map_at_1000 value: 27.107999999999997 - type: map_at_3 value: 23.04 - type: map_at_5 value: 24.529 - type: mrr_at_1 value: 22.264 - type: mrr_at_10 value: 30.548 - type: mrr_at_100 value: 31.593 - type: mrr_at_1000 value: 31.657999999999998 - type: mrr_at_3 value: 27.756999999999998 - type: mrr_at_5 value: 29.398999999999997 - type: ndcg_at_1 value: 22.264 - type: ndcg_at_10 value: 30.902 - type: ndcg_at_100 value: 36.918 - type: ndcg_at_1000 value: 39.735 - type: ndcg_at_3 value: 25.915 - type: ndcg_at_5 value: 28.255999999999997 - type: precision_at_1 value: 22.264 - type: precision_at_10 value: 5.634 - type: precision_at_100 value: 0.9939999999999999 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 12.396 - type: precision_at_5 value: 9.055 - type: recall_at_1 value: 17.885 - type: recall_at_10 value: 42.237 - type: recall_at_100 value: 68.489 - type: recall_at_1000 value: 88.721 - type: recall_at_3 value: 28.283 - type: recall_at_5 value: 34.300000000000004 - type: map_at_1 value: 29.737000000000002 - type: map_at_10 value: 39.757 - type: map_at_100 value: 40.992 - type: map_at_1000 value: 41.102 - type: map_at_3 value: 36.612 - type: map_at_5 value: 38.413000000000004 - type: mrr_at_1 value: 35.804 - type: mrr_at_10 value: 45.178000000000004 - type: mrr_at_100 value: 45.975 - type: mrr_at_1000 value: 46.021 - type: mrr_at_3 value: 42.541000000000004 - type: mrr_at_5 value: 44.167 - type: ndcg_at_1 value: 35.804 - type: ndcg_at_10 value: 45.608 - type: ndcg_at_100 value: 50.746 - type: ndcg_at_1000 value: 52.839999999999996 - type: ndcg_at_3 value: 40.52 - type: ndcg_at_5 value: 43.051 - type: precision_at_1 value: 35.804 - type: precision_at_10 value: 8.104 - type: precision_at_100 value: 1.256 - type: precision_at_1000 value: 0.161 - type: precision_at_3 value: 19.121 - type: precision_at_5 value: 13.532 - type: recall_at_1 value: 29.737000000000002 - type: recall_at_10 value: 57.66 - type: recall_at_100 value: 79.121 - type: recall_at_1000 value: 93.023 - type: recall_at_3 value: 43.13 - type: recall_at_5 value: 49.836000000000006 - type: map_at_1 value: 26.299 - type: map_at_10 value: 35.617 - type: map_at_100 value: 36.972 - type: map_at_1000 value: 37.096000000000004 - type: map_at_3 value: 32.653999999999996 - type: map_at_5 value: 34.363 - type: mrr_at_1 value: 32.877 - type: mrr_at_10 value: 41.423 - type: mrr_at_100 value: 42.333999999999996 - type: mrr_at_1000 value: 42.398 - type: mrr_at_3 value: 39.193 - type: mrr_at_5 value: 40.426 - type: ndcg_at_1 value: 32.877 - type: ndcg_at_10 value: 41.271 - type: ndcg_at_100 value: 46.843 - type: ndcg_at_1000 value: 49.366 - type: ndcg_at_3 value: 36.735 - type: ndcg_at_5 value: 38.775999999999996 - type: precision_at_1 value: 32.877 - type: precision_at_10 value: 7.580000000000001 - type: precision_at_100 value: 1.192 - type: precision_at_1000 value: 0.158 - type: precision_at_3 value: 17.541999999999998 - type: precision_at_5 value: 12.443 - type: recall_at_1 value: 26.299 - type: recall_at_10 value: 52.256 - type: recall_at_100 value: 75.919 - type: recall_at_1000 value: 93.185 - type: recall_at_3 value: 39.271 - type: recall_at_5 value: 44.901 - type: map_at_1 value: 27.05741666666667 - type: map_at_10 value: 36.086416666666665 - type: map_at_100 value: 37.26916666666667 - type: map_at_1000 value: 37.38191666666666 - type: map_at_3 value: 33.34225 - type: map_at_5 value: 34.86425 - type: mrr_at_1 value: 32.06008333333333 - type: mrr_at_10 value: 40.36658333333333 - type: mrr_at_100 value: 41.206500000000005 - type: mrr_at_1000 value: 41.261083333333325 - type: mrr_at_3 value: 38.01208333333334 - type: mrr_at_5 value: 39.36858333333333 - type: ndcg_at_1 value: 32.06008333333333 - type: ndcg_at_10 value: 41.3535 - type: ndcg_at_100 value: 46.42066666666666 - type: ndcg_at_1000 value: 48.655166666666666 - type: ndcg_at_3 value: 36.78041666666667 - type: ndcg_at_5 value: 38.91783333333334 - type: precision_at_1 value: 32.06008333333333 - type: precision_at_10 value: 7.169833333333332 - type: precision_at_100 value: 1.1395 - type: precision_at_1000 value: 0.15158333333333332 - type: precision_at_3 value: 16.852 - type: precision_at_5 value: 11.8645 - type: recall_at_1 value: 27.05741666666667 - type: recall_at_10 value: 52.64491666666666 - type: recall_at_100 value: 74.99791666666667 - type: recall_at_1000 value: 90.50524999999999 - type: recall_at_3 value: 39.684000000000005 - type: recall_at_5 value: 45.37225 - type: map_at_1 value: 25.607999999999997 - type: map_at_10 value: 32.28 - type: map_at_100 value: 33.261 - type: map_at_1000 value: 33.346 - type: map_at_3 value: 30.514999999999997 - type: map_at_5 value: 31.415 - type: mrr_at_1 value: 28.988000000000003 - type: mrr_at_10 value: 35.384 - type: mrr_at_100 value: 36.24 - type: mrr_at_1000 value: 36.299 - type: mrr_at_3 value: 33.717000000000006 - type: mrr_at_5 value: 34.507 - type: ndcg_at_1 value: 28.988000000000003 - type: ndcg_at_10 value: 36.248000000000005 - type: ndcg_at_100 value: 41.034 - type: ndcg_at_1000 value: 43.35 - type: ndcg_at_3 value: 32.987 - type: ndcg_at_5 value: 34.333999999999996 - type: precision_at_1 value: 28.988000000000003 - type: precision_at_10 value: 5.506 - type: precision_at_100 value: 0.853 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_3 value: 14.11 - type: precision_at_5 value: 9.417 - type: recall_at_1 value: 25.607999999999997 - type: recall_at_10 value: 45.344 - type: recall_at_100 value: 67.132 - type: recall_at_1000 value: 84.676 - type: recall_at_3 value: 36.02 - type: recall_at_5 value: 39.613 - type: map_at_1 value: 18.44 - type: map_at_10 value: 25.651000000000003 - type: map_at_100 value: 26.735 - type: map_at_1000 value: 26.86 - type: map_at_3 value: 23.409 - type: map_at_5 value: 24.604 - type: mrr_at_1 value: 22.195 - type: mrr_at_10 value: 29.482000000000003 - type: mrr_at_100 value: 30.395 - type: mrr_at_1000 value: 30.471999999999998 - type: mrr_at_3 value: 27.409 - type: mrr_at_5 value: 28.553 - type: ndcg_at_1 value: 22.195 - type: ndcg_at_10 value: 30.242 - type: ndcg_at_100 value: 35.397 - type: ndcg_at_1000 value: 38.287 - type: ndcg_at_3 value: 26.201 - type: ndcg_at_5 value: 28.008 - type: precision_at_1 value: 22.195 - type: precision_at_10 value: 5.372 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 12.228 - type: precision_at_5 value: 8.727 - type: recall_at_1 value: 18.44 - type: recall_at_10 value: 40.325 - type: recall_at_100 value: 63.504000000000005 - type: recall_at_1000 value: 83.909 - type: recall_at_3 value: 28.925 - type: recall_at_5 value: 33.641 - type: map_at_1 value: 26.535999999999998 - type: map_at_10 value: 35.358000000000004 - type: map_at_100 value: 36.498999999999995 - type: map_at_1000 value: 36.597 - type: map_at_3 value: 32.598 - type: map_at_5 value: 34.185 - type: mrr_at_1 value: 31.25 - type: mrr_at_10 value: 39.593 - type: mrr_at_100 value: 40.443 - type: mrr_at_1000 value: 40.498 - type: mrr_at_3 value: 37.018 - type: mrr_at_5 value: 38.492 - type: ndcg_at_1 value: 31.25 - type: ndcg_at_10 value: 40.71 - type: ndcg_at_100 value: 46.079 - type: ndcg_at_1000 value: 48.287 - type: ndcg_at_3 value: 35.667 - type: ndcg_at_5 value: 38.080000000000005 - type: precision_at_1 value: 31.25 - type: precision_at_10 value: 6.847 - type: precision_at_100 value: 1.079 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 16.262 - type: precision_at_5 value: 11.455 - type: recall_at_1 value: 26.535999999999998 - type: recall_at_10 value: 52.92099999999999 - type: recall_at_100 value: 76.669 - type: recall_at_1000 value: 92.096 - type: recall_at_3 value: 38.956 - type: recall_at_5 value: 45.239000000000004 - type: map_at_1 value: 24.691 - type: map_at_10 value: 33.417 - type: map_at_100 value: 35.036 - type: map_at_1000 value: 35.251 - type: map_at_3 value: 30.646 - type: map_at_5 value: 32.177 - type: mrr_at_1 value: 30.04 - type: mrr_at_10 value: 37.905 - type: mrr_at_100 value: 38.929 - type: mrr_at_1000 value: 38.983000000000004 - type: mrr_at_3 value: 35.276999999999994 - type: mrr_at_5 value: 36.897000000000006 - type: ndcg_at_1 value: 30.04 - type: ndcg_at_10 value: 39.037 - type: ndcg_at_100 value: 44.944 - type: ndcg_at_1000 value: 47.644 - type: ndcg_at_3 value: 34.833999999999996 - type: ndcg_at_5 value: 36.83 - type: precision_at_1 value: 30.04 - type: precision_at_10 value: 7.4510000000000005 - type: precision_at_100 value: 1.492 - type: precision_at_1000 value: 0.234 - type: precision_at_3 value: 16.337 - type: precision_at_5 value: 11.897 - type: recall_at_1 value: 24.691 - type: recall_at_10 value: 49.303999999999995 - type: recall_at_100 value: 76.20400000000001 - type: recall_at_1000 value: 93.30000000000001 - type: recall_at_3 value: 36.594 - type: recall_at_5 value: 42.41 - type: map_at_1 value: 23.118 - type: map_at_10 value: 30.714999999999996 - type: map_at_100 value: 31.656000000000002 - type: map_at_1000 value: 31.757 - type: map_at_3 value: 28.355000000000004 - type: map_at_5 value: 29.337000000000003 - type: mrr_at_1 value: 25.323 - type: mrr_at_10 value: 32.93 - type: mrr_at_100 value: 33.762 - type: mrr_at_1000 value: 33.829 - type: mrr_at_3 value: 30.775999999999996 - type: mrr_at_5 value: 31.774 - type: ndcg_at_1 value: 25.323 - type: ndcg_at_10 value: 35.408 - type: ndcg_at_100 value: 40.083 - type: ndcg_at_1000 value: 42.542 - type: ndcg_at_3 value: 30.717 - type: ndcg_at_5 value: 32.385000000000005 - type: precision_at_1 value: 25.323 - type: precision_at_10 value: 5.564 - type: precision_at_100 value: 0.843 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 13.001 - type: precision_at_5 value: 8.834999999999999 - type: recall_at_1 value: 23.118 - type: recall_at_10 value: 47.788000000000004 - type: recall_at_100 value: 69.37 - type: recall_at_1000 value: 87.47399999999999 - type: recall_at_3 value: 34.868 - type: recall_at_5 value: 39.001999999999995 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 14.288 - type: map_at_10 value: 23.256 - type: map_at_100 value: 25.115 - type: map_at_1000 value: 25.319000000000003 - type: map_at_3 value: 20.005 - type: map_at_5 value: 21.529999999999998 - type: mrr_at_1 value: 31.401 - type: mrr_at_10 value: 42.251 - type: mrr_at_100 value: 43.236999999999995 - type: mrr_at_1000 value: 43.272 - type: mrr_at_3 value: 39.164 - type: mrr_at_5 value: 40.881 - type: ndcg_at_1 value: 31.401 - type: ndcg_at_10 value: 31.615 - type: ndcg_at_100 value: 38.982 - type: ndcg_at_1000 value: 42.496 - type: ndcg_at_3 value: 26.608999999999998 - type: ndcg_at_5 value: 28.048000000000002 - type: precision_at_1 value: 31.401 - type: precision_at_10 value: 9.536999999999999 - type: precision_at_100 value: 1.763 - type: precision_at_1000 value: 0.241 - type: precision_at_3 value: 19.153000000000002 - type: precision_at_5 value: 14.228 - type: recall_at_1 value: 14.288 - type: recall_at_10 value: 36.717 - type: recall_at_100 value: 61.9 - type: recall_at_1000 value: 81.676 - type: recall_at_3 value: 24.203 - type: recall_at_5 value: 28.793999999999997 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 9.019 - type: map_at_10 value: 19.963 - type: map_at_100 value: 28.834 - type: map_at_1000 value: 30.537999999999997 - type: map_at_3 value: 14.45 - type: map_at_5 value: 16.817999999999998 - type: mrr_at_1 value: 65.75 - type: mrr_at_10 value: 74.646 - type: mrr_at_100 value: 74.946 - type: mrr_at_1000 value: 74.95100000000001 - type: mrr_at_3 value: 72.625 - type: mrr_at_5 value: 74.012 - type: ndcg_at_1 value: 54 - type: ndcg_at_10 value: 42.014 - type: ndcg_at_100 value: 47.527 - type: ndcg_at_1000 value: 54.911 - type: ndcg_at_3 value: 46.586 - type: ndcg_at_5 value: 43.836999999999996 - type: precision_at_1 value: 65.75 - type: precision_at_10 value: 33.475 - type: precision_at_100 value: 11.16 - type: precision_at_1000 value: 2.145 - type: precision_at_3 value: 50.083 - type: precision_at_5 value: 42.55 - type: recall_at_1 value: 9.019 - type: recall_at_10 value: 25.558999999999997 - type: recall_at_100 value: 53.937999999999995 - type: recall_at_1000 value: 77.67399999999999 - type: recall_at_3 value: 15.456 - type: recall_at_5 value: 19.259 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 52.635 - type: f1 value: 47.692783881403926 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 76.893 - type: map_at_10 value: 84.897 - type: map_at_100 value: 85.122 - type: map_at_1000 value: 85.135 - type: map_at_3 value: 83.88 - type: map_at_5 value: 84.565 - type: mrr_at_1 value: 83.003 - type: mrr_at_10 value: 89.506 - type: mrr_at_100 value: 89.574 - type: mrr_at_1000 value: 89.575 - type: mrr_at_3 value: 88.991 - type: mrr_at_5 value: 89.349 - type: ndcg_at_1 value: 83.003 - type: ndcg_at_10 value: 88.351 - type: ndcg_at_100 value: 89.128 - type: ndcg_at_1000 value: 89.34100000000001 - type: ndcg_at_3 value: 86.92 - type: ndcg_at_5 value: 87.78200000000001 - type: precision_at_1 value: 83.003 - type: precision_at_10 value: 10.517999999999999 - type: precision_at_100 value: 1.115 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_3 value: 33.062999999999995 - type: precision_at_5 value: 20.498 - type: recall_at_1 value: 76.893 - type: recall_at_10 value: 94.374 - type: recall_at_100 value: 97.409 - type: recall_at_1000 value: 98.687 - type: recall_at_3 value: 90.513 - type: recall_at_5 value: 92.709 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 20.829 - type: map_at_10 value: 32.86 - type: map_at_100 value: 34.838 - type: map_at_1000 value: 35.006 - type: map_at_3 value: 28.597 - type: map_at_5 value: 31.056 - type: mrr_at_1 value: 41.358 - type: mrr_at_10 value: 49.542 - type: mrr_at_100 value: 50.29900000000001 - type: mrr_at_1000 value: 50.334999999999994 - type: mrr_at_3 value: 46.579 - type: mrr_at_5 value: 48.408 - type: ndcg_at_1 value: 41.358 - type: ndcg_at_10 value: 40.758 - type: ndcg_at_100 value: 47.799 - type: ndcg_at_1000 value: 50.589 - type: ndcg_at_3 value: 36.695 - type: ndcg_at_5 value: 38.193 - type: precision_at_1 value: 41.358 - type: precision_at_10 value: 11.142000000000001 - type: precision_at_100 value: 1.8350000000000002 - type: precision_at_1000 value: 0.234 - type: precision_at_3 value: 24.023 - type: precision_at_5 value: 17.963 - type: recall_at_1 value: 20.829 - type: recall_at_10 value: 47.467999999999996 - type: recall_at_100 value: 73.593 - type: recall_at_1000 value: 90.122 - type: recall_at_3 value: 32.74 - type: recall_at_5 value: 39.608 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 40.324 - type: map_at_10 value: 64.183 - type: map_at_100 value: 65.037 - type: map_at_1000 value: 65.094 - type: map_at_3 value: 60.663 - type: map_at_5 value: 62.951 - type: mrr_at_1 value: 80.648 - type: mrr_at_10 value: 86.005 - type: mrr_at_100 value: 86.157 - type: mrr_at_1000 value: 86.162 - type: mrr_at_3 value: 85.116 - type: mrr_at_5 value: 85.703 - type: ndcg_at_1 value: 80.648 - type: ndcg_at_10 value: 72.351 - type: ndcg_at_100 value: 75.279 - type: ndcg_at_1000 value: 76.357 - type: ndcg_at_3 value: 67.484 - type: ndcg_at_5 value: 70.31500000000001 - type: precision_at_1 value: 80.648 - type: precision_at_10 value: 15.103 - type: precision_at_100 value: 1.7399999999999998 - type: precision_at_1000 value: 0.188 - type: precision_at_3 value: 43.232 - type: precision_at_5 value: 28.165000000000003 - type: recall_at_1 value: 40.324 - type: recall_at_10 value: 75.517 - type: recall_at_100 value: 86.982 - type: recall_at_1000 value: 94.072 - type: recall_at_3 value: 64.848 - type: recall_at_5 value: 70.41199999999999 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 91.4 - type: ap value: 87.4422032289312 - type: f1 value: 91.39249564302281 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 22.03 - type: map_at_10 value: 34.402 - type: map_at_100 value: 35.599 - type: map_at_1000 value: 35.648 - type: map_at_3 value: 30.603 - type: map_at_5 value: 32.889 - type: mrr_at_1 value: 22.679 - type: mrr_at_10 value: 35.021 - type: mrr_at_100 value: 36.162 - type: mrr_at_1000 value: 36.205 - type: mrr_at_3 value: 31.319999999999997 - type: mrr_at_5 value: 33.562 - type: ndcg_at_1 value: 22.692999999999998 - type: ndcg_at_10 value: 41.258 - type: ndcg_at_100 value: 46.967 - type: ndcg_at_1000 value: 48.175000000000004 - type: ndcg_at_3 value: 33.611000000000004 - type: ndcg_at_5 value: 37.675 - type: precision_at_1 value: 22.692999999999998 - type: precision_at_10 value: 6.5089999999999995 - type: precision_at_100 value: 0.936 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.413 - type: precision_at_5 value: 10.702 - type: recall_at_1 value: 22.03 - type: recall_at_10 value: 62.248000000000005 - type: recall_at_100 value: 88.524 - type: recall_at_1000 value: 97.714 - type: recall_at_3 value: 41.617 - type: recall_at_5 value: 51.359 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 94.36844505243957 - type: f1 value: 94.12408743818202 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 76.43410852713177 - type: f1 value: 58.501855709435624 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 76.04909213180902 - type: f1 value: 74.1800860395823 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 79.76126429051781 - type: f1 value: 79.85705217473232 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.70119520292863 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.33544316467486 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.75499243990726 - type: mrr value: 31.70602251821063 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 6.451999999999999 - type: map_at_10 value: 13.918 - type: map_at_100 value: 17.316000000000003 - type: map_at_1000 value: 18.747 - type: map_at_3 value: 10.471 - type: map_at_5 value: 12.104 - type: mrr_at_1 value: 46.749 - type: mrr_at_10 value: 55.717000000000006 - type: mrr_at_100 value: 56.249 - type: mrr_at_1000 value: 56.288000000000004 - type: mrr_at_3 value: 53.818 - type: mrr_at_5 value: 55.103 - type: ndcg_at_1 value: 45.201 - type: ndcg_at_10 value: 35.539 - type: ndcg_at_100 value: 32.586 - type: ndcg_at_1000 value: 41.486000000000004 - type: ndcg_at_3 value: 41.174 - type: ndcg_at_5 value: 38.939 - type: precision_at_1 value: 46.749 - type: precision_at_10 value: 25.944 - type: precision_at_100 value: 8.084 - type: precision_at_1000 value: 2.076 - type: precision_at_3 value: 38.7 - type: precision_at_5 value: 33.56 - type: recall_at_1 value: 6.451999999999999 - type: recall_at_10 value: 17.302 - type: recall_at_100 value: 32.14 - type: recall_at_1000 value: 64.12 - type: recall_at_3 value: 11.219 - type: recall_at_5 value: 13.993 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 32.037 - type: map_at_10 value: 46.565 - type: map_at_100 value: 47.606 - type: map_at_1000 value: 47.636 - type: map_at_3 value: 42.459 - type: map_at_5 value: 44.762 - type: mrr_at_1 value: 36.181999999999995 - type: mrr_at_10 value: 49.291000000000004 - type: mrr_at_100 value: 50.059 - type: mrr_at_1000 value: 50.078 - type: mrr_at_3 value: 45.829 - type: mrr_at_5 value: 47.797 - type: ndcg_at_1 value: 36.153 - type: ndcg_at_10 value: 53.983000000000004 - type: ndcg_at_100 value: 58.347 - type: ndcg_at_1000 value: 59.058 - type: ndcg_at_3 value: 46.198 - type: ndcg_at_5 value: 50.022 - type: precision_at_1 value: 36.153 - type: precision_at_10 value: 8.763 - type: precision_at_100 value: 1.123 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 20.751 - type: precision_at_5 value: 14.646999999999998 - type: recall_at_1 value: 32.037 - type: recall_at_10 value: 74.008 - type: recall_at_100 value: 92.893 - type: recall_at_1000 value: 98.16 - type: recall_at_3 value: 53.705999999999996 - type: recall_at_5 value: 62.495 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 71.152 - type: map_at_10 value: 85.104 - type: map_at_100 value: 85.745 - type: map_at_1000 value: 85.761 - type: map_at_3 value: 82.175 - type: map_at_5 value: 84.066 - type: mrr_at_1 value: 82.03 - type: mrr_at_10 value: 88.115 - type: mrr_at_100 value: 88.21 - type: mrr_at_1000 value: 88.211 - type: mrr_at_3 value: 87.19200000000001 - type: mrr_at_5 value: 87.85 - type: ndcg_at_1 value: 82.03 - type: ndcg_at_10 value: 88.78 - type: ndcg_at_100 value: 89.96300000000001 - type: ndcg_at_1000 value: 90.056 - type: ndcg_at_3 value: 86.051 - type: ndcg_at_5 value: 87.63499999999999 - type: precision_at_1 value: 82.03 - type: precision_at_10 value: 13.450000000000001 - type: precision_at_100 value: 1.5310000000000001 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.627 - type: precision_at_5 value: 24.784 - type: recall_at_1 value: 71.152 - type: recall_at_10 value: 95.649 - type: recall_at_100 value: 99.58200000000001 - type: recall_at_1000 value: 99.981 - type: recall_at_3 value: 87.767 - type: recall_at_5 value: 92.233 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 56.48713646277477 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 63.394940772438545 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.043 - type: map_at_10 value: 12.949 - type: map_at_100 value: 15.146 - type: map_at_1000 value: 15.495000000000001 - type: map_at_3 value: 9.333 - type: map_at_5 value: 11.312999999999999 - type: mrr_at_1 value: 24.9 - type: mrr_at_10 value: 35.958 - type: mrr_at_100 value: 37.152 - type: mrr_at_1000 value: 37.201 - type: mrr_at_3 value: 32.667 - type: mrr_at_5 value: 34.567 - type: ndcg_at_1 value: 24.9 - type: ndcg_at_10 value: 21.298000000000002 - type: ndcg_at_100 value: 29.849999999999998 - type: ndcg_at_1000 value: 35.506 - type: ndcg_at_3 value: 20.548 - type: ndcg_at_5 value: 18.064 - type: precision_at_1 value: 24.9 - type: precision_at_10 value: 10.9 - type: precision_at_100 value: 2.331 - type: precision_at_1000 value: 0.367 - type: precision_at_3 value: 19.267 - type: precision_at_5 value: 15.939999999999998 - type: recall_at_1 value: 5.043 - type: recall_at_10 value: 22.092 - type: recall_at_100 value: 47.323 - type: recall_at_1000 value: 74.553 - type: recall_at_3 value: 11.728 - type: recall_at_5 value: 16.188 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.7007085938325 - type: cos_sim_spearman value: 80.0171084446234 - type: euclidean_pearson value: 81.28133218355893 - type: euclidean_spearman value: 79.99291731740131 - type: manhattan_pearson value: 81.22926922327846 - type: manhattan_spearman value: 79.94444878127038 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 85.7411883252923 - type: cos_sim_spearman value: 77.93462937801245 - type: euclidean_pearson value: 83.00858563882404 - type: euclidean_spearman value: 77.82717362433257 - type: manhattan_pearson value: 82.92887645790769 - type: manhattan_spearman value: 77.78807488222115 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 82.04222459361023 - type: cos_sim_spearman value: 83.85931509330395 - type: euclidean_pearson value: 83.26916063876055 - type: euclidean_spearman value: 83.98621985648353 - type: manhattan_pearson value: 83.14935679184327 - type: manhattan_spearman value: 83.87938828586304 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 81.41136639535318 - type: cos_sim_spearman value: 81.51200091040481 - type: euclidean_pearson value: 81.45382456114775 - type: euclidean_spearman value: 81.46201181707931 - type: manhattan_pearson value: 81.37243088439584 - type: manhattan_spearman value: 81.39828421893426 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 85.71942451732227 - type: cos_sim_spearman value: 87.33044482064973 - type: euclidean_pearson value: 86.58580899365178 - type: euclidean_spearman value: 87.09206723832895 - type: manhattan_pearson value: 86.47460784157013 - type: manhattan_spearman value: 86.98367656583076 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 83.55868078863449 - type: cos_sim_spearman value: 85.38299230074065 - type: euclidean_pearson value: 84.64715256244595 - type: euclidean_spearman value: 85.49112229604047 - type: manhattan_pearson value: 84.60814346792462 - type: manhattan_spearman value: 85.44886026766822 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 84.99292526370614 - type: cos_sim_spearman value: 85.58139465695983 - type: euclidean_pearson value: 86.51325066734084 - type: euclidean_spearman value: 85.56736418284562 - type: manhattan_pearson value: 86.48190836601357 - type: manhattan_spearman value: 85.51616256224258 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 64.54124715078807 - type: cos_sim_spearman value: 65.32134275948374 - type: euclidean_pearson value: 67.09791698300816 - type: euclidean_spearman value: 65.79468982468465 - type: manhattan_pearson value: 67.13304723693966 - type: manhattan_spearman value: 65.68439995849283 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 83.4231099581624 - type: cos_sim_spearman value: 85.95475815226862 - type: euclidean_pearson value: 85.00339401999706 - type: euclidean_spearman value: 85.74133081802971 - type: manhattan_pearson value: 85.00407987181666 - type: manhattan_spearman value: 85.77509596397363 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 87.25666719585716 - type: mrr value: 96.32769917083642 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 57.828 - type: map_at_10 value: 68.369 - type: map_at_100 value: 68.83399999999999 - type: map_at_1000 value: 68.856 - type: map_at_3 value: 65.38000000000001 - type: map_at_5 value: 67.06299999999999 - type: mrr_at_1 value: 61 - type: mrr_at_10 value: 69.45400000000001 - type: mrr_at_100 value: 69.785 - type: mrr_at_1000 value: 69.807 - type: mrr_at_3 value: 67 - type: mrr_at_5 value: 68.43299999999999 - type: ndcg_at_1 value: 61 - type: ndcg_at_10 value: 73.258 - type: ndcg_at_100 value: 75.173 - type: ndcg_at_1000 value: 75.696 - type: ndcg_at_3 value: 68.162 - type: ndcg_at_5 value: 70.53399999999999 - type: precision_at_1 value: 61 - type: precision_at_10 value: 9.8 - type: precision_at_100 value: 1.087 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 27 - type: precision_at_5 value: 17.666999999999998 - type: recall_at_1 value: 57.828 - type: recall_at_10 value: 87.122 - type: recall_at_100 value: 95.667 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 73.139 - type: recall_at_5 value: 79.361 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.85247524752475 - type: cos_sim_ap value: 96.25640197639723 - type: cos_sim_f1 value: 92.37851662404091 - type: cos_sim_precision value: 94.55497382198953 - type: cos_sim_recall value: 90.3 - type: dot_accuracy value: 99.76138613861386 - type: dot_ap value: 93.40295864389073 - type: dot_f1 value: 87.64267990074441 - type: dot_precision value: 86.99507389162562 - type: dot_recall value: 88.3 - type: euclidean_accuracy value: 99.85049504950496 - type: euclidean_ap value: 96.24254350525462 - type: euclidean_f1 value: 92.32323232323232 - type: euclidean_precision value: 93.26530612244898 - type: euclidean_recall value: 91.4 - type: manhattan_accuracy value: 99.85346534653465 - type: manhattan_ap value: 96.2635334753325 - type: manhattan_f1 value: 92.37899073120495 - type: manhattan_precision value: 95.22292993630573 - type: manhattan_recall value: 89.7 - type: max_accuracy value: 99.85346534653465 - type: max_ap value: 96.2635334753325 - type: max_f1 value: 92.37899073120495 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 65.83905786483794 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 35.031896152126436 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.551326709447146 - type: mrr value: 55.43758222986165 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.305688567308874 - type: cos_sim_spearman value: 29.27135743434515 - type: dot_pearson value: 30.336741878796563 - type: dot_spearman value: 30.513365725895937 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.245 - type: map_at_10 value: 1.92 - type: map_at_100 value: 10.519 - type: map_at_1000 value: 23.874000000000002 - type: map_at_3 value: 0.629 - type: map_at_5 value: 1.0290000000000001 - type: mrr_at_1 value: 88 - type: mrr_at_10 value: 93.5 - type: mrr_at_100 value: 93.5 - type: mrr_at_1000 value: 93.5 - type: mrr_at_3 value: 93 - type: mrr_at_5 value: 93.5 - type: ndcg_at_1 value: 84 - type: ndcg_at_10 value: 76.447 - type: ndcg_at_100 value: 56.516 - type: ndcg_at_1000 value: 48.583999999999996 - type: ndcg_at_3 value: 78.877 - type: ndcg_at_5 value: 79.174 - type: precision_at_1 value: 88 - type: precision_at_10 value: 80.60000000000001 - type: precision_at_100 value: 57.64 - type: precision_at_1000 value: 21.227999999999998 - type: precision_at_3 value: 82 - type: precision_at_5 value: 83.6 - type: recall_at_1 value: 0.245 - type: recall_at_10 value: 2.128 - type: recall_at_100 value: 13.767 - type: recall_at_1000 value: 44.958 - type: recall_at_3 value: 0.654 - type: recall_at_5 value: 1.111 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.5170000000000003 - type: map_at_10 value: 10.915 - type: map_at_100 value: 17.535 - type: map_at_1000 value: 19.042 - type: map_at_3 value: 5.689 - type: map_at_5 value: 7.837 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 49.547999999999995 - type: mrr_at_100 value: 50.653000000000006 - type: mrr_at_1000 value: 50.653000000000006 - type: mrr_at_3 value: 44.558 - type: mrr_at_5 value: 48.333 - type: ndcg_at_1 value: 32.653 - type: ndcg_at_10 value: 26.543 - type: ndcg_at_100 value: 38.946 - type: ndcg_at_1000 value: 49.406 - type: ndcg_at_3 value: 29.903000000000002 - type: ndcg_at_5 value: 29.231 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 23.265 - type: precision_at_100 value: 8.102 - type: precision_at_1000 value: 1.5 - type: precision_at_3 value: 31.293 - type: precision_at_5 value: 29.796 - type: recall_at_1 value: 2.5170000000000003 - type: recall_at_10 value: 16.88 - type: recall_at_100 value: 49.381 - type: recall_at_1000 value: 81.23899999999999 - type: recall_at_3 value: 6.965000000000001 - type: recall_at_5 value: 10.847999999999999 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 71.5942 - type: ap value: 13.92074156956546 - type: f1 value: 54.671999698839066 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.39728353140916 - type: f1 value: 59.68980496759517 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 52.11181870104935 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.46957143708649 - type: cos_sim_ap value: 76.16120197845457 - type: cos_sim_f1 value: 69.69919295671315 - type: cos_sim_precision value: 64.94986326344576 - type: cos_sim_recall value: 75.19788918205805 - type: dot_accuracy value: 83.0780234845324 - type: dot_ap value: 64.21717343541934 - type: dot_f1 value: 59.48375497624245 - type: dot_precision value: 57.94345759319489 - type: dot_recall value: 61.108179419525065 - type: euclidean_accuracy value: 86.6543482148179 - type: euclidean_ap value: 76.4527555010203 - type: euclidean_f1 value: 70.10156056477584 - type: euclidean_precision value: 66.05975723622782 - type: euclidean_recall value: 74.67018469656992 - type: manhattan_accuracy value: 86.66030875603504 - type: manhattan_ap value: 76.40304567255436 - type: manhattan_f1 value: 70.05275426328058 - type: manhattan_precision value: 65.4666360926393 - type: manhattan_recall value: 75.32981530343008 - type: max_accuracy value: 86.66030875603504 - type: max_ap value: 76.4527555010203 - type: max_f1 value: 70.10156056477584 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.42123646524624 - type: cos_sim_ap value: 85.15431437761646 - type: cos_sim_f1 value: 76.98069301530742 - type: cos_sim_precision value: 72.9314502239063 - type: cos_sim_recall value: 81.50600554357868 - type: dot_accuracy value: 86.70974502270346 - type: dot_ap value: 80.77621563599457 - type: dot_f1 value: 73.87058697285117 - type: dot_precision value: 68.98256396552877 - type: dot_recall value: 79.50415768401602 - type: euclidean_accuracy value: 88.46392672798541 - type: euclidean_ap value: 85.20370297495491 - type: euclidean_f1 value: 77.01372369624886 - type: euclidean_precision value: 73.39052800446397 - type: euclidean_recall value: 81.01324299353249 - type: manhattan_accuracy value: 88.43481973066325 - type: manhattan_ap value: 85.16318289864545 - type: manhattan_f1 value: 76.90884877182597 - type: manhattan_precision value: 74.01737396753062 - type: manhattan_recall value: 80.03541730828458 - type: max_accuracy value: 88.46392672798541 - type: max_ap value: 85.20370297495491 - type: max_f1 value: 77.01372369624886 --- **Recommend switching to newest [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5), which has more reasonable similarity distribution and same method of usage.** <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding can map any text to a low-dimensional dense vector which can be used for tasks like retrieval, classification, clustering, or semantic search. And it also can be used in vector databases for LLMs. ************* 🌟**Updates**🌟 ************* - 10/12/2023: Release [LLM-Embedder](./FlagEmbedding/llm_embedder/README.md), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Paper](https://arxiv.org/pdf/2310.07554.pdf) :fire: - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [masive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR", "BIOSSES", "SCIFACT" ]
evo-design/evo-1.5-8k-base
evo-design
null
[ "pytorch", "safetensors", "stripedhyena", "long context", "deep signal processing", "hybrid", "biology", "genomics", "custom_code", "license:apache-2.0", "region:us" ]
"2024-12-07T04:31:48Z"
2024-12-10T02:20:56+00:00
94,459
1
--- license: apache-2.0 tags: - stripedhyena - long context - deep signal processing - hybrid - biology - genomics --- ## Evo 1.5 <p align="center"> <img src="https://cdn-uploads.huggingface.co/production/uploads/62a1306bbe7fa896d2c8de44/JoEHcvLTUlHoMcgh3mmAz.png" width="70%" /> </p> ### About Evo is a biological foundation model capable of long-context modeling and design. Evo uses the [StripedHyena architecture](https://github.com/togethercomputer/stripedhyena) to enable modeling of sequences at a single-nucleotide, byte-level resolution with near-linear scaling of compute and memory relative to context length. Evo has 7 billion parameters and is trained on OpenGenome, a prokaryotic whole-genome dataset containing ~300 billion tokens. **Evo 1.5** is a version of Evo built off of the Evo 1 model pretrained at 8k context with training extended by 50% more training data, totaling 450 billion tokens. | Checkpoint Name | Description | |----------------------------------------|-------------| | `evo-1.5-8k-base` | A model pretrained with 8,192 context obtained by extending the pretraining of `evo-1-8k-base` to process 50% more training data. | | `evo-1-8k-base` | A model pretrained with 8,192 context. We use this model as the base model for molecular-scale finetuning tasks. | | `evo-1-131k-base` | A model pretrained with 131,072 context using `evo-1-8k-base` as the initialization. We use this model to reason about and generate sequences at the genome scale. | | `evo-1-8k-crispr` | A model fine-tuned on `evo-1-8k-base` specifically on CRISPR-Cas systems. We use this model to generate Cas9/12/13 systems. | | `evo-1-8k-transposon` | A model fine-tuned on `evo-1-8k-base` specifically on transposons. We use this to generate IS200/IS605. | ### How to use Evo Example usage is provided in the [standalone repo](https://github.com/evo-design/evo). ## Cite ``` @article{nguyen2024sequence, author = {Eric Nguyen and Michael Poli and Matthew G. Durrant and Brian Kang and Dhruva Katrekar and David B. Li and Liam J. Bartie and Armin W. Thomas and Samuel H. King and Garyk Brixi and Jeremy Sullivan and Madelena Y. Ng and Ashley Lewis and Aaron Lou and Stefano Ermon and Stephen A. Baccus and Tina Hernandez-Boussard and Christopher Ré and Patrick D. Hsu and Brian L. Hie }, title = {Sequence modeling and design from molecular to genome scale with Evo}, journal = {Science}, volume = {386}, number = {6723}, pages = {eado9336}, year = {2024}, doi = {10.1126/science.ado9336}, URL = {https://www.science.org/doi/abs/10.1126/science.ado9336}, } ```
[ "CAS" ]
ibm-granite/granite-embedding-30m-english
ibm-granite
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "roberta", "feature-extraction", "language", "granite", "embeddings", "mteb", "transformers", "sentence-similarity", "en", "arxiv:0000.00000", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-12-04T20:28:47Z"
2025-03-04T15:10:29+00:00
90,310
9
--- language: - en library_name: sentence-transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - language - granite - embeddings - mteb - transformers model-index: - name: ibm-granite/granite-embedding-30m-english results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 62.856100000000005 - type: f1 value: 51.5046 - type: f1_weighted value: 69.9775 - type: ap value: 15.4995 - type: ap_weighted value: 15.4995 - type: main_score value: 62.856100000000005 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 60.925399999999996 - type: f1 value: 55.0092 - type: f1_weighted value: 64.8014 - type: ap value: 25.0517 - type: ap_weighted value: 25.0517 - type: main_score value: 60.925399999999996 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 62.983599999999996 - type: f1 value: 62.553599999999996 - type: f1_weighted value: 62.553599999999996 - type: ap value: 58.3423 - type: ap_weighted value: 58.3423 - type: main_score value: 62.983599999999996 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 32.178000000000004 - type: f1 value: 31.5201 - type: f1_weighted value: 31.5201 - type: main_score value: 32.178000000000004 - task: type: Retrieval dataset: name: MTEB AppsRetrieval (default) type: CoIR-Retrieval/apps config: default split: test revision: f22508f96b7a36c2415181ed8bb76f76e04ae2d5 metrics: - type: ndcg_at_1 value: 3.5060000000000002 - type: ndcg_at_3 value: 4.789000000000001 - type: ndcg_at_5 value: 5.314 - type: ndcg_at_10 value: 6.203 - type: ndcg_at_20 value: 6.801 - type: ndcg_at_100 value: 8.588 - type: ndcg_at_1000 value: 12.418999999999999 - type: map_at_1 value: 3.5060000000000002 - type: map_at_3 value: 4.471 - type: map_at_5 value: 4.7620000000000005 - type: map_at_10 value: 5.117 - type: map_at_20 value: 5.281000000000001 - type: map_at_100 value: 5.501 - type: map_at_1000 value: 5.611 - type: recall_at_1 value: 3.5060000000000002 - type: recall_at_3 value: 5.71 - type: recall_at_5 value: 6.984999999999999 - type: recall_at_10 value: 9.801 - type: recall_at_20 value: 12.165 - type: recall_at_100 value: 22.205 - type: recall_at_1000 value: 54.396 - type: precision_at_1 value: 3.5060000000000002 - type: precision_at_3 value: 1.9029999999999998 - type: precision_at_5 value: 1.397 - type: precision_at_10 value: 0.98 - type: precision_at_20 value: 0.608 - type: precision_at_100 value: 0.22200000000000003 - type: precision_at_1000 value: 0.054 - type: mrr_at_1 value: 3.5060000000000002 - type: mrr_at_3 value: 4.471 - type: mrr_at_5 value: 4.7618 - type: mrr_at_10 value: 5.1166 - type: mrr_at_20 value: 5.2806 - type: mrr_at_100 value: 5.5014 - type: mrr_at_1000 value: 5.6113 - type: nauc_ndcg_at_1_max value: 32.8089 - type: nauc_ndcg_at_1_std value: 13.0518 - type: nauc_ndcg_at_1_diff1 value: 44.3602 - type: nauc_ndcg_at_3_max value: 28.5037 - type: nauc_ndcg_at_3_std value: 12.1308 - type: nauc_ndcg_at_3_diff1 value: 33.0191 - type: nauc_ndcg_at_5_max value: 25.970100000000002 - type: nauc_ndcg_at_5_std value: 12.089500000000001 - type: nauc_ndcg_at_5_diff1 value: 30.098200000000002 - type: nauc_ndcg_at_10_max value: 23.9177 - type: nauc_ndcg_at_10_std value: 12.1279 - type: nauc_ndcg_at_10_diff1 value: 26.3951 - type: nauc_ndcg_at_20_max value: 22.2086 - type: nauc_ndcg_at_20_std value: 11.355 - type: nauc_ndcg_at_20_diff1 value: 24.9668 - type: nauc_ndcg_at_100_max value: 20.1961 - type: nauc_ndcg_at_100_std value: 11.368300000000001 - type: nauc_ndcg_at_100_diff1 value: 21.654200000000003 - type: nauc_ndcg_at_1000_max value: 19.7802 - type: nauc_ndcg_at_1000_std value: 11.9399 - type: nauc_ndcg_at_1000_diff1 value: 19.8429 - type: nauc_map_at_1_max value: 32.8089 - type: nauc_map_at_1_std value: 13.0518 - type: nauc_map_at_1_diff1 value: 44.3602 - type: nauc_map_at_3_max value: 29.285600000000002 - type: nauc_map_at_3_std value: 12.4277 - type: nauc_map_at_3_diff1 value: 35.2678 - type: nauc_map_at_5_max value: 27.6754 - type: nauc_map_at_5_std value: 12.4042 - type: nauc_map_at_5_diff1 value: 33.330799999999996 - type: nauc_map_at_10_max value: 26.571299999999997 - type: nauc_map_at_10_std value: 12.439400000000001 - type: nauc_map_at_10_diff1 value: 31.275399999999998 - type: nauc_map_at_20_max value: 25.8795 - type: nauc_map_at_20_std value: 12.1596 - type: nauc_map_at_20_diff1 value: 30.6354 - type: nauc_map_at_100_max value: 25.3369 - type: nauc_map_at_100_std value: 12.0245 - type: nauc_map_at_100_diff1 value: 29.8703 - type: nauc_map_at_1000_max value: 25.239800000000002 - type: nauc_map_at_1000_std value: 12.0242 - type: nauc_map_at_1000_diff1 value: 29.7235 - type: nauc_recall_at_1_max value: 32.8089 - type: nauc_recall_at_1_std value: 13.0518 - type: nauc_recall_at_1_diff1 value: 44.3602 - type: nauc_recall_at_3_max value: 26.747700000000002 - type: nauc_recall_at_3_std value: 11.4203 - type: nauc_recall_at_3_diff1 value: 27.9047 - type: nauc_recall_at_5_max value: 22.3707 - type: nauc_recall_at_5_std value: 11.4164 - type: nauc_recall_at_5_diff1 value: 23.4182 - type: nauc_recall_at_10_max value: 19.2758 - type: nauc_recall_at_10_std value: 11.578800000000001 - type: nauc_recall_at_10_diff1 value: 18.030099999999997 - type: nauc_recall_at_20_max value: 16.1643 - type: nauc_recall_at_20_std value: 9.9037 - type: nauc_recall_at_20_diff1 value: 16.0833 - type: nauc_recall_at_100_max value: 13.644700000000002 - type: nauc_recall_at_100_std value: 10.986799999999999 - type: nauc_recall_at_100_diff1 value: 11.0515 - type: nauc_recall_at_1000_max value: 13.9712 - type: nauc_recall_at_1000_std value: 13.4048 - type: nauc_recall_at_1000_diff1 value: 6.569500000000001 - type: nauc_precision_at_1_max value: 32.8089 - type: nauc_precision_at_1_std value: 13.0518 - type: nauc_precision_at_1_diff1 value: 44.3602 - type: nauc_precision_at_3_max value: 26.747700000000002 - type: nauc_precision_at_3_std value: 11.4203 - type: nauc_precision_at_3_diff1 value: 27.9047 - type: nauc_precision_at_5_max value: 22.3707 - type: nauc_precision_at_5_std value: 11.4164 - type: nauc_precision_at_5_diff1 value: 23.4182 - type: nauc_precision_at_10_max value: 19.2758 - type: nauc_precision_at_10_std value: 11.578800000000001 - type: nauc_precision_at_10_diff1 value: 18.030099999999997 - type: nauc_precision_at_20_max value: 16.1643 - type: nauc_precision_at_20_std value: 9.9037 - type: nauc_precision_at_20_diff1 value: 16.0833 - type: nauc_precision_at_100_max value: 13.644700000000002 - type: nauc_precision_at_100_std value: 10.986799999999999 - type: nauc_precision_at_100_diff1 value: 11.0515 - type: nauc_precision_at_1000_max value: 13.9712 - type: nauc_precision_at_1000_std value: 13.4048 - type: nauc_precision_at_1000_diff1 value: 6.569500000000001 - type: nauc_mrr_at_1_max value: 32.8089 - type: nauc_mrr_at_1_std value: 13.0518 - type: nauc_mrr_at_1_diff1 value: 44.3602 - type: nauc_mrr_at_3_max value: 29.285600000000002 - type: nauc_mrr_at_3_std value: 12.4277 - type: nauc_mrr_at_3_diff1 value: 35.2678 - type: nauc_mrr_at_5_max value: 27.6754 - type: nauc_mrr_at_5_std value: 12.4042 - type: nauc_mrr_at_5_diff1 value: 33.330799999999996 - type: nauc_mrr_at_10_max value: 26.571299999999997 - type: nauc_mrr_at_10_std value: 12.439400000000001 - type: nauc_mrr_at_10_diff1 value: 31.275399999999998 - type: nauc_mrr_at_20_max value: 25.8795 - type: nauc_mrr_at_20_std value: 12.1596 - type: nauc_mrr_at_20_diff1 value: 30.6354 - type: nauc_mrr_at_100_max value: 25.337 - type: nauc_mrr_at_100_std value: 12.0245 - type: nauc_mrr_at_100_diff1 value: 29.870400000000004 - type: nauc_mrr_at_1000_max value: 25.2399 - type: nauc_mrr_at_1000_std value: 12.0242 - type: nauc_mrr_at_1000_diff1 value: 29.7236 - type: main_score value: 6.203 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: ndcg_at_1 value: 31.791999999999998 - type: ndcg_at_3 value: 46.453 - type: ndcg_at_5 value: 51.623 - type: ndcg_at_10 value: 56.355999999999995 - type: ndcg_at_20 value: 58.757000000000005 - type: ndcg_at_100 value: 59.789 - type: ndcg_at_1000 value: 59.857000000000006 - type: map_at_1 value: 31.791999999999998 - type: map_at_3 value: 42.757 - type: map_at_5 value: 45.634 - type: map_at_10 value: 47.599000000000004 - type: map_at_20 value: 48.271 - type: map_at_100 value: 48.425000000000004 - type: map_at_1000 value: 48.427 - type: recall_at_1 value: 31.791999999999998 - type: recall_at_3 value: 57.18299999999999 - type: recall_at_5 value: 69.70100000000001 - type: recall_at_10 value: 84.282 - type: recall_at_20 value: 93.67 - type: recall_at_100 value: 99.075 - type: recall_at_1000 value: 99.644 - type: precision_at_1 value: 31.791999999999998 - type: precision_at_3 value: 19.061 - type: precision_at_5 value: 13.94 - type: precision_at_10 value: 8.427999999999999 - type: precision_at_20 value: 4.683 - type: precision_at_100 value: 0.991 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 32.3613 - type: mrr_at_3 value: 42.935 - type: mrr_at_5 value: 45.844 - type: mrr_at_10 value: 47.808099999999996 - type: mrr_at_20 value: 48.4844 - type: mrr_at_100 value: 48.6345 - type: mrr_at_1000 value: 48.6364 - type: nauc_ndcg_at_1_max value: -8.274099999999999 - type: nauc_ndcg_at_1_std value: -8.1976 - type: nauc_ndcg_at_1_diff1 value: 14.155100000000001 - type: nauc_ndcg_at_3_max value: -4.6223 - type: nauc_ndcg_at_3_std value: -10.198500000000001 - type: nauc_ndcg_at_3_diff1 value: 14.516499999999999 - type: nauc_ndcg_at_5_max value: -4.9834000000000005 - type: nauc_ndcg_at_5_std value: -9.6634 - type: nauc_ndcg_at_5_diff1 value: 12.9298 - type: nauc_ndcg_at_10_max value: -4.3251 - type: nauc_ndcg_at_10_std value: -8.3068 - type: nauc_ndcg_at_10_diff1 value: 12.2939 - type: nauc_ndcg_at_20_max value: -3.8912000000000004 - type: nauc_ndcg_at_20_std value: -8.1821 - type: nauc_ndcg_at_20_diff1 value: 12.673599999999999 - type: nauc_ndcg_at_100_max value: -5.0274 - type: nauc_ndcg_at_100_std value: -8.450000000000001 - type: nauc_ndcg_at_100_diff1 value: 12.787399999999998 - type: nauc_ndcg_at_1000_max value: -5.1416 - type: nauc_ndcg_at_1000_std value: -8.6044 - type: nauc_ndcg_at_1000_diff1 value: 12.858600000000001 - type: nauc_map_at_1_max value: -8.274099999999999 - type: nauc_map_at_1_std value: -8.1976 - type: nauc_map_at_1_diff1 value: 14.155100000000001 - type: nauc_map_at_3_max value: -5.6403 - type: nauc_map_at_3_std value: -9.7092 - type: nauc_map_at_3_diff1 value: 14.0705 - type: nauc_map_at_5_max value: -5.8896999999999995 - type: nauc_map_at_5_std value: -9.3946 - type: nauc_map_at_5_diff1 value: 13.208 - type: nauc_map_at_10_max value: -5.7523 - type: nauc_map_at_10_std value: -8.9262 - type: nauc_map_at_10_diff1 value: 12.961500000000001 - type: nauc_map_at_20_max value: -5.7103 - type: nauc_map_at_20_std value: -8.9336 - type: nauc_map_at_20_diff1 value: 13.0351 - type: nauc_map_at_100_max value: -5.8204 - type: nauc_map_at_100_std value: -8.9441 - type: nauc_map_at_100_diff1 value: 13.0722 - type: nauc_map_at_1000_max value: -5.8239 - type: nauc_map_at_1000_std value: -8.9463 - type: nauc_map_at_1000_diff1 value: 13.0724 - type: nauc_recall_at_1_max value: -8.274099999999999 - type: nauc_recall_at_1_std value: -8.1976 - type: nauc_recall_at_1_diff1 value: 14.155100000000001 - type: nauc_recall_at_3_max value: -1.4792 - type: nauc_recall_at_3_std value: -11.6828 - type: nauc_recall_at_3_diff1 value: 16.026 - type: nauc_recall_at_5_max value: -1.6868999999999998 - type: nauc_recall_at_5_std value: -10.5497 - type: nauc_recall_at_5_diff1 value: 11.826 - type: nauc_recall_at_10_max value: 5.1425 - type: nauc_recall_at_10_std value: -3.1008999999999998 - type: nauc_recall_at_10_diff1 value: 7.6911 - type: nauc_recall_at_20_max value: 25.921499999999998 - type: nauc_recall_at_20_std value: 6.812600000000001 - type: nauc_recall_at_20_diff1 value: 8.311300000000001 - type: nauc_recall_at_100_max value: 28.425299999999996 - type: nauc_recall_at_100_std value: 45.9592 - type: nauc_recall_at_100_diff1 value: -11.801 - type: nauc_recall_at_1000_max value: 21.834500000000002 - type: nauc_recall_at_1000_std value: 38.804 - type: nauc_recall_at_1000_diff1 value: -3.5484 - type: nauc_precision_at_1_max value: -8.274099999999999 - type: nauc_precision_at_1_std value: -8.1976 - type: nauc_precision_at_1_diff1 value: 14.155100000000001 - type: nauc_precision_at_3_max value: -1.4792 - type: nauc_precision_at_3_std value: -11.6828 - type: nauc_precision_at_3_diff1 value: 16.026 - type: nauc_precision_at_5_max value: -1.6868999999999998 - type: nauc_precision_at_5_std value: -10.5497 - type: nauc_precision_at_5_diff1 value: 11.826 - type: nauc_precision_at_10_max value: 5.1425 - type: nauc_precision_at_10_std value: -3.1008999999999998 - type: nauc_precision_at_10_diff1 value: 7.6911 - type: nauc_precision_at_20_max value: 25.921499999999998 - type: nauc_precision_at_20_std value: 6.812600000000001 - type: nauc_precision_at_20_diff1 value: 8.311300000000001 - type: nauc_precision_at_100_max value: 28.425299999999996 - type: nauc_precision_at_100_std value: 45.9592 - type: nauc_precision_at_100_diff1 value: -11.801 - type: nauc_precision_at_1000_max value: 21.834500000000002 - type: nauc_precision_at_1000_std value: 38.804 - type: nauc_precision_at_1000_diff1 value: -3.5484 - type: nauc_mrr_at_1_max value: -8.6929 - type: nauc_mrr_at_1_std value: -7.7584 - type: nauc_mrr_at_1_diff1 value: 12.488100000000001 - type: nauc_mrr_at_3_max value: -6.6954 - type: nauc_mrr_at_3_std value: -9.7075 - type: nauc_mrr_at_3_diff1 value: 12.2994 - type: nauc_mrr_at_5_max value: -6.7945 - type: nauc_mrr_at_5_std value: -9.3751 - type: nauc_mrr_at_5_diff1 value: 11.544699999999999 - type: nauc_mrr_at_10_max value: -6.6614 - type: nauc_mrr_at_10_std value: -8.859200000000001 - type: nauc_mrr_at_10_diff1 value: 11.2614 - type: nauc_mrr_at_20_max value: -6.6408 - type: nauc_mrr_at_20_std value: -8.8599 - type: nauc_mrr_at_20_diff1 value: 11.3125 - type: nauc_mrr_at_100_max value: -6.7582 - type: nauc_mrr_at_100_std value: -8.876299999999999 - type: nauc_mrr_at_100_diff1 value: 11.325000000000001 - type: nauc_mrr_at_1000_max value: -6.7619 - type: nauc_mrr_at_1000_std value: -8.878400000000001 - type: nauc_mrr_at_1000_diff1 value: 11.3251 - type: main_score value: 56.355999999999995 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 46.813 - type: v_measure_std value: 13.830899999999998 - type: main_score value: 46.813 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 41.9895 - type: v_measure_std value: 14.3004 - type: main_score value: 41.9895 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 64.1329 - type: mrr value: 76.8303 - type: nAUC_map_max value: 23.5323 - type: nAUC_map_std value: 14.7567 - type: nAUC_map_diff1 value: 11.6783 - type: nAUC_mrr_max value: 32.3309 - type: nAUC_mrr_std value: 19.1617 - type: nAUC_mrr_diff1 value: 23.508699999999997 - type: main_score value: 64.1329 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: pearson value: 90.2058 - type: spearman value: 88.1641 - type: cosine_pearson value: 90.2058 - type: cosine_spearman value: 88.1641 - type: manhattan_pearson value: 87.7579 - type: manhattan_spearman value: 87.6249 - type: euclidean_pearson value: 88.3667 - type: euclidean_spearman value: 88.1641 - type: main_score value: 88.1641 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 77.3247 - type: f1 value: 76.3532 - type: f1_weighted value: 76.3532 - type: main_score value: 77.3247 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 39.018 - type: v_measure_std value: 0.7512 - type: main_score value: 39.018 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 36.8097 - type: v_measure_std value: 0.9368 - type: main_score value: 36.8097 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (python) type: CoIR-Retrieval/CodeSearchNet config: python split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 85.353 - type: ndcg_at_3 value: 89.493 - type: ndcg_at_5 value: 90.347 - type: ndcg_at_10 value: 90.89699999999999 - type: ndcg_at_20 value: 91.20899999999999 - type: ndcg_at_100 value: 91.506 - type: ndcg_at_1000 value: 91.62400000000001 - type: map_at_1 value: 85.353 - type: map_at_3 value: 88.532 - type: map_at_5 value: 89.008 - type: map_at_10 value: 89.238 - type: map_at_20 value: 89.323 - type: map_at_100 value: 89.366 - type: map_at_1000 value: 89.371 - type: recall_at_1 value: 85.353 - type: recall_at_3 value: 92.251 - type: recall_at_5 value: 94.316 - type: recall_at_10 value: 95.998 - type: recall_at_20 value: 97.238 - type: recall_at_100 value: 98.81400000000001 - type: recall_at_1000 value: 99.725 - type: precision_at_1 value: 85.353 - type: precision_at_3 value: 30.75 - type: precision_at_5 value: 18.863 - type: precision_at_10 value: 9.6 - type: precision_at_20 value: 4.862 - type: precision_at_100 value: 0.988 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 85.3533 - type: mrr_at_3 value: 88.5318 - type: mrr_at_5 value: 89.0077 - type: mrr_at_10 value: 89.2381 - type: mrr_at_20 value: 89.3231 - type: mrr_at_100 value: 89.3659 - type: mrr_at_1000 value: 89.3707 - type: nauc_ndcg_at_1_max value: 79.05529999999999 - type: nauc_ndcg_at_1_std value: 6.6982 - type: nauc_ndcg_at_1_diff1 value: 89.6212 - type: nauc_ndcg_at_3_max value: 82.5612 - type: nauc_ndcg_at_3_std value: 10.379199999999999 - type: nauc_ndcg_at_3_diff1 value: 87.809 - type: nauc_ndcg_at_5_max value: 82.4315 - type: nauc_ndcg_at_5_std value: 10.5113 - type: nauc_ndcg_at_5_diff1 value: 88.0763 - type: nauc_ndcg_at_10_max value: 82.4135 - type: nauc_ndcg_at_10_std value: 11.046 - type: nauc_ndcg_at_10_diff1 value: 88.2008 - type: nauc_ndcg_at_20_max value: 82.3276 - type: nauc_ndcg_at_20_std value: 11.4306 - type: nauc_ndcg_at_20_diff1 value: 88.2525 - type: nauc_ndcg_at_100_max value: 82.1023 - type: nauc_ndcg_at_100_std value: 11.2119 - type: nauc_ndcg_at_100_diff1 value: 88.3149 - type: nauc_ndcg_at_1000_max value: 81.91720000000001 - type: nauc_ndcg_at_1000_std value: 10.7203 - type: nauc_ndcg_at_1000_diff1 value: 88.349 - type: nauc_map_at_1_max value: 79.05529999999999 - type: nauc_map_at_1_std value: 6.6982 - type: nauc_map_at_1_diff1 value: 89.6212 - type: nauc_map_at_3_max value: 81.5856 - type: nauc_map_at_3_std value: 9.3626 - type: nauc_map_at_3_diff1 value: 88.2364 - type: nauc_map_at_5_max value: 81.4778 - type: nauc_map_at_5_std value: 9.3662 - type: nauc_map_at_5_diff1 value: 88.3865 - type: nauc_map_at_10_max value: 81.447 - type: nauc_map_at_10_std value: 9.5111 - type: nauc_map_at_10_diff1 value: 88.43469999999999 - type: nauc_map_at_20_max value: 81.4196 - type: nauc_map_at_20_std value: 9.593 - type: nauc_map_at_20_diff1 value: 88.4473 - type: nauc_map_at_100_max value: 81.3925 - type: nauc_map_at_100_std value: 9.5683 - type: nauc_map_at_100_diff1 value: 88.4559 - type: nauc_map_at_1000_max value: 81.3865 - type: nauc_map_at_1000_std value: 9.554 - type: nauc_map_at_1000_diff1 value: 88.457 - type: nauc_recall_at_1_max value: 79.05529999999999 - type: nauc_recall_at_1_std value: 6.6982 - type: nauc_recall_at_1_diff1 value: 89.6212 - type: nauc_recall_at_3_max value: 86.56580000000001 - type: nauc_recall_at_3_std value: 14.5464 - type: nauc_recall_at_3_diff1 value: 86.1047 - type: nauc_recall_at_5_max value: 87.5044 - type: nauc_recall_at_5_std value: 16.7155 - type: nauc_recall_at_5_diff1 value: 86.5603 - type: nauc_recall_at_10_max value: 89.5625 - type: nauc_recall_at_10_std value: 23.230700000000002 - type: nauc_recall_at_10_diff1 value: 86.8079 - type: nauc_recall_at_20_max value: 91.7174 - type: nauc_recall_at_20_std value: 33.203700000000005 - type: nauc_recall_at_20_diff1 value: 86.8468 - type: nauc_recall_at_100_max value: 95.55160000000001 - type: nauc_recall_at_100_std value: 53.0169 - type: nauc_recall_at_100_diff1 value: 87.1867 - type: nauc_recall_at_1000_max value: 97.0907 - type: nauc_recall_at_1000_std value: 75.0177 - type: nauc_recall_at_1000_diff1 value: 91.3005 - type: nauc_precision_at_1_max value: 79.05529999999999 - type: nauc_precision_at_1_std value: 6.6982 - type: nauc_precision_at_1_diff1 value: 89.6212 - type: nauc_precision_at_3_max value: 86.56580000000001 - type: nauc_precision_at_3_std value: 14.5464 - type: nauc_precision_at_3_diff1 value: 86.1047 - type: nauc_precision_at_5_max value: 87.5044 - type: nauc_precision_at_5_std value: 16.7155 - type: nauc_precision_at_5_diff1 value: 86.5603 - type: nauc_precision_at_10_max value: 89.5625 - type: nauc_precision_at_10_std value: 23.230700000000002 - type: nauc_precision_at_10_diff1 value: 86.8079 - type: nauc_precision_at_20_max value: 91.7174 - type: nauc_precision_at_20_std value: 33.203700000000005 - type: nauc_precision_at_20_diff1 value: 86.8468 - type: nauc_precision_at_100_max value: 95.55160000000001 - type: nauc_precision_at_100_std value: 53.0169 - type: nauc_precision_at_100_diff1 value: 87.1867 - type: nauc_precision_at_1000_max value: 97.0907 - type: nauc_precision_at_1000_std value: 75.0177 - type: nauc_precision_at_1000_diff1 value: 91.3005 - type: nauc_mrr_at_1_max value: 79.05529999999999 - type: nauc_mrr_at_1_std value: 6.6982 - type: nauc_mrr_at_1_diff1 value: 89.6212 - type: nauc_mrr_at_3_max value: 81.5856 - type: nauc_mrr_at_3_std value: 9.3626 - type: nauc_mrr_at_3_diff1 value: 88.2364 - type: nauc_mrr_at_5_max value: 81.4778 - type: nauc_mrr_at_5_std value: 9.3662 - type: nauc_mrr_at_5_diff1 value: 88.3865 - type: nauc_mrr_at_10_max value: 81.447 - type: nauc_mrr_at_10_std value: 9.5111 - type: nauc_mrr_at_10_diff1 value: 88.43469999999999 - type: nauc_mrr_at_20_max value: 81.4196 - type: nauc_mrr_at_20_std value: 9.593 - type: nauc_mrr_at_20_diff1 value: 88.4473 - type: nauc_mrr_at_100_max value: 81.3925 - type: nauc_mrr_at_100_std value: 9.5683 - type: nauc_mrr_at_100_diff1 value: 88.4559 - type: nauc_mrr_at_1000_max value: 81.3865 - type: nauc_mrr_at_1000_std value: 9.554 - type: nauc_mrr_at_1000_diff1 value: 88.457 - type: main_score value: 90.89699999999999 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (javascript) type: CoIR-Retrieval/CodeSearchNet config: javascript split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 35.46 - type: ndcg_at_3 value: 42.799 - type: ndcg_at_5 value: 44.64 - type: ndcg_at_10 value: 46.54 - type: ndcg_at_20 value: 48.025 - type: ndcg_at_100 value: 50.307 - type: ndcg_at_1000 value: 51.925 - type: map_at_1 value: 35.46 - type: map_at_3 value: 41.016000000000005 - type: map_at_5 value: 42.038 - type: map_at_10 value: 42.825 - type: map_at_20 value: 43.233 - type: map_at_100 value: 43.541999999999994 - type: map_at_1000 value: 43.599 - type: recall_at_1 value: 35.46 - type: recall_at_3 value: 47.949000000000005 - type: recall_at_5 value: 52.416 - type: recall_at_10 value: 58.28 - type: recall_at_20 value: 64.145 - type: recall_at_100 value: 76.542 - type: recall_at_1000 value: 89.547 - type: precision_at_1 value: 35.46 - type: precision_at_3 value: 15.983 - type: precision_at_5 value: 10.483 - type: precision_at_10 value: 5.827999999999999 - type: precision_at_20 value: 3.2070000000000003 - type: precision_at_100 value: 0.765 - type: precision_at_1000 value: 0.09 - type: mrr_at_1 value: 35.460300000000004 - type: mrr_at_3 value: 41.0159 - type: mrr_at_5 value: 42.038399999999996 - type: mrr_at_10 value: 42.8251 - type: mrr_at_20 value: 43.2333 - type: mrr_at_100 value: 43.542199999999994 - type: mrr_at_1000 value: 43.5986 - type: nauc_ndcg_at_1_max value: 48.2915 - type: nauc_ndcg_at_1_std value: 2.4132000000000002 - type: nauc_ndcg_at_1_diff1 value: 64.10810000000001 - type: nauc_ndcg_at_3_max value: 51.357 - type: nauc_ndcg_at_3_std value: 4.9681999999999995 - type: nauc_ndcg_at_3_diff1 value: 58.012600000000006 - type: nauc_ndcg_at_5_max value: 51.8888 - type: nauc_ndcg_at_5_std value: 6.2654000000000005 - type: nauc_ndcg_at_5_diff1 value: 57.103 - type: nauc_ndcg_at_10_max value: 51.9571 - type: nauc_ndcg_at_10_std value: 7.446 - type: nauc_ndcg_at_10_diff1 value: 56.505700000000004 - type: nauc_ndcg_at_20_max value: 51.638799999999996 - type: nauc_ndcg_at_20_std value: 7.7742 - type: nauc_ndcg_at_20_diff1 value: 55.9805 - type: nauc_ndcg_at_100_max value: 51.3786 - type: nauc_ndcg_at_100_std value: 8.1191 - type: nauc_ndcg_at_100_diff1 value: 56.3265 - type: nauc_ndcg_at_1000_max value: 51.162 - type: nauc_ndcg_at_1000_std value: 7.6863 - type: nauc_ndcg_at_1000_diff1 value: 56.6531 - type: nauc_map_at_1_max value: 48.2915 - type: nauc_map_at_1_std value: 2.4132000000000002 - type: nauc_map_at_1_diff1 value: 64.10810000000001 - type: nauc_map_at_3_max value: 50.6599 - type: nauc_map_at_3_std value: 4.3285 - type: nauc_map_at_3_diff1 value: 59.453100000000006 - type: nauc_map_at_5_max value: 50.9502 - type: nauc_map_at_5_std value: 5.0428 - type: nauc_map_at_5_diff1 value: 58.9452 - type: nauc_map_at_10_max value: 50.9749 - type: nauc_map_at_10_std value: 5.5069 - type: nauc_map_at_10_diff1 value: 58.7167 - type: nauc_map_at_20_max value: 50.8815 - type: nauc_map_at_20_std value: 5.5846 - type: nauc_map_at_20_diff1 value: 58.5793 - type: nauc_map_at_100_max value: 50.8454 - type: nauc_map_at_100_std value: 5.6249 - type: nauc_map_at_100_diff1 value: 58.6352 - type: nauc_map_at_1000_max value: 50.8377 - type: nauc_map_at_1000_std value: 5.6119 - type: nauc_map_at_1000_diff1 value: 58.6477 - type: nauc_recall_at_1_max value: 48.2915 - type: nauc_recall_at_1_std value: 2.4132000000000002 - type: nauc_recall_at_1_diff1 value: 64.10810000000001 - type: nauc_recall_at_3_max value: 53.3613 - type: nauc_recall_at_3_std value: 6.833699999999999 - type: nauc_recall_at_3_diff1 value: 53.8466 - type: nauc_recall_at_5_max value: 54.7395 - type: nauc_recall_at_5_std value: 10.1014 - type: nauc_recall_at_5_diff1 value: 51.520900000000005 - type: nauc_recall_at_10_max value: 55.125299999999996 - type: nauc_recall_at_10_std value: 14.277899999999999 - type: nauc_recall_at_10_diff1 value: 49.1874 - type: nauc_recall_at_20_max value: 54.0194 - type: nauc_recall_at_20_std value: 16.4329 - type: nauc_recall_at_20_diff1 value: 46.1551 - type: nauc_recall_at_100_max value: 52.7898 - type: nauc_recall_at_100_std value: 22.375600000000002 - type: nauc_recall_at_100_diff1 value: 45.351 - type: nauc_recall_at_1000_max value: 49.0379 - type: nauc_recall_at_1000_std value: 26.0579 - type: nauc_recall_at_1000_diff1 value: 41.7849 - type: nauc_precision_at_1_max value: 48.2915 - type: nauc_precision_at_1_std value: 2.4132000000000002 - type: nauc_precision_at_1_diff1 value: 64.10810000000001 - type: nauc_precision_at_3_max value: 53.3613 - type: nauc_precision_at_3_std value: 6.833699999999999 - type: nauc_precision_at_3_diff1 value: 53.8466 - type: nauc_precision_at_5_max value: 54.7395 - type: nauc_precision_at_5_std value: 10.1014 - type: nauc_precision_at_5_diff1 value: 51.520900000000005 - type: nauc_precision_at_10_max value: 55.125299999999996 - type: nauc_precision_at_10_std value: 14.277899999999999 - type: nauc_precision_at_10_diff1 value: 49.1874 - type: nauc_precision_at_20_max value: 54.0194 - type: nauc_precision_at_20_std value: 16.4329 - type: nauc_precision_at_20_diff1 value: 46.1551 - type: nauc_precision_at_100_max value: 52.7898 - type: nauc_precision_at_100_std value: 22.375600000000002 - type: nauc_precision_at_100_diff1 value: 45.351 - type: nauc_precision_at_1000_max value: 49.0379 - type: nauc_precision_at_1000_std value: 26.0579 - type: nauc_precision_at_1000_diff1 value: 41.7849 - type: nauc_mrr_at_1_max value: 48.2915 - type: nauc_mrr_at_1_std value: 2.4132000000000002 - type: nauc_mrr_at_1_diff1 value: 64.10810000000001 - type: nauc_mrr_at_3_max value: 50.6599 - type: nauc_mrr_at_3_std value: 4.3285 - type: nauc_mrr_at_3_diff1 value: 59.453100000000006 - type: nauc_mrr_at_5_max value: 50.9502 - type: nauc_mrr_at_5_std value: 5.0428 - type: nauc_mrr_at_5_diff1 value: 58.9452 - type: nauc_mrr_at_10_max value: 50.9749 - type: nauc_mrr_at_10_std value: 5.5069 - type: nauc_mrr_at_10_diff1 value: 58.7167 - type: nauc_mrr_at_20_max value: 50.8815 - type: nauc_mrr_at_20_std value: 5.5846 - type: nauc_mrr_at_20_diff1 value: 58.5793 - type: nauc_mrr_at_100_max value: 50.8454 - type: nauc_mrr_at_100_std value: 5.6249 - type: nauc_mrr_at_100_diff1 value: 58.6352 - type: nauc_mrr_at_1000_max value: 50.8377 - type: nauc_mrr_at_1000_std value: 5.6119 - type: nauc_mrr_at_1000_diff1 value: 58.6477 - type: main_score value: 46.54 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (go) type: CoIR-Retrieval/CodeSearchNet config: go split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 45.728 - type: ndcg_at_3 value: 54.942 - type: ndcg_at_5 value: 57.19499999999999 - type: ndcg_at_10 value: 59.471 - type: ndcg_at_20 value: 60.888 - type: ndcg_at_100 value: 62.67700000000001 - type: ndcg_at_1000 value: 63.654999999999994 - type: map_at_1 value: 45.728 - type: map_at_3 value: 52.717000000000006 - type: map_at_5 value: 53.968 - type: map_at_10 value: 54.921 - type: map_at_20 value: 55.31 - type: map_at_100 value: 55.555 - type: map_at_1000 value: 55.589999999999996 - type: recall_at_1 value: 45.728 - type: recall_at_3 value: 61.364 - type: recall_at_5 value: 66.83099999999999 - type: recall_at_10 value: 73.8 - type: recall_at_20 value: 79.402 - type: recall_at_100 value: 89.079 - type: recall_at_1000 value: 96.885 - type: precision_at_1 value: 45.728 - type: precision_at_3 value: 20.455000000000002 - type: precision_at_5 value: 13.366 - type: precision_at_10 value: 7.380000000000001 - type: precision_at_20 value: 3.9699999999999998 - type: precision_at_100 value: 0.8909999999999999 - type: precision_at_1000 value: 0.097 - type: mrr_at_1 value: 45.7277 - type: mrr_at_3 value: 52.7169 - type: mrr_at_5 value: 53.9678 - type: mrr_at_10 value: 54.920500000000004 - type: mrr_at_20 value: 55.3099 - type: mrr_at_100 value: 55.5546 - type: mrr_at_1000 value: 55.5896 - type: nauc_ndcg_at_1_max value: 40.5391 - type: nauc_ndcg_at_1_std value: -2.9052000000000002 - type: nauc_ndcg_at_1_diff1 value: 63.2351 - type: nauc_ndcg_at_3_max value: 43.8365 - type: nauc_ndcg_at_3_std value: -0.6831 - type: nauc_ndcg_at_3_diff1 value: 57.782599999999995 - type: nauc_ndcg_at_5_max value: 43.851600000000005 - type: nauc_ndcg_at_5_std value: -0.3032 - type: nauc_ndcg_at_5_diff1 value: 57.0763 - type: nauc_ndcg_at_10_max value: 44.1492 - type: nauc_ndcg_at_10_std value: 0.6748 - type: nauc_ndcg_at_10_diff1 value: 56.8967 - type: nauc_ndcg_at_20_max value: 44.1367 - type: nauc_ndcg_at_20_std value: 0.8896 - type: nauc_ndcg_at_20_diff1 value: 56.97560000000001 - type: nauc_ndcg_at_100_max value: 43.9934 - type: nauc_ndcg_at_100_std value: 1.0534 - type: nauc_ndcg_at_100_diff1 value: 57.347899999999996 - type: nauc_ndcg_at_1000_max value: 43.8679 - type: nauc_ndcg_at_1000_std value: 0.6431 - type: nauc_ndcg_at_1000_diff1 value: 57.6967 - type: nauc_map_at_1_max value: 40.5391 - type: nauc_map_at_1_std value: -2.9052000000000002 - type: nauc_map_at_1_diff1 value: 63.2351 - type: nauc_map_at_3_max value: 43.0286 - type: nauc_map_at_3_std value: -1.2933 - type: nauc_map_at_3_diff1 value: 59.065 - type: nauc_map_at_5_max value: 43.0224 - type: nauc_map_at_5_std value: -1.1081 - type: nauc_map_at_5_diff1 value: 58.7146 - type: nauc_map_at_10_max value: 43.127500000000005 - type: nauc_map_at_10_std value: -0.7247 - type: nauc_map_at_10_diff1 value: 58.6619 - type: nauc_map_at_20_max value: 43.1213 - type: nauc_map_at_20_std value: -0.6853 - type: nauc_map_at_20_diff1 value: 58.704299999999996 - type: nauc_map_at_100_max value: 43.0908 - type: nauc_map_at_100_std value: -0.6792 - type: nauc_map_at_100_diff1 value: 58.7592 - type: nauc_map_at_1000_max value: 43.085499999999996 - type: nauc_map_at_1000_std value: -0.6897 - type: nauc_map_at_1000_diff1 value: 58.7689 - type: nauc_recall_at_1_max value: 40.5391 - type: nauc_recall_at_1_std value: -2.9052000000000002 - type: nauc_recall_at_1_diff1 value: 63.2351 - type: nauc_recall_at_3_max value: 46.3617 - type: nauc_recall_at_3_std value: 1.2550999999999999 - type: nauc_recall_at_3_diff1 value: 53.7993 - type: nauc_recall_at_5_max value: 46.6666 - type: nauc_recall_at_5_std value: 2.5401 - type: nauc_recall_at_5_diff1 value: 51.413799999999995 - type: nauc_recall_at_10_max value: 48.3645 - type: nauc_recall_at_10_std value: 6.8622000000000005 - type: nauc_recall_at_10_diff1 value: 49.6971 - type: nauc_recall_at_20_max value: 49.1074 - type: nauc_recall_at_20_std value: 9.4846 - type: nauc_recall_at_20_diff1 value: 48.5587 - type: nauc_recall_at_100_max value: 51.2638 - type: nauc_recall_at_100_std value: 18.4911 - type: nauc_recall_at_100_diff1 value: 47.2445 - type: nauc_recall_at_1000_max value: 61.0283 - type: nauc_recall_at_1000_std value: 31.5949 - type: nauc_recall_at_1000_diff1 value: 47.239599999999996 - type: nauc_precision_at_1_max value: 40.5391 - type: nauc_precision_at_1_std value: -2.9052000000000002 - type: nauc_precision_at_1_diff1 value: 63.2351 - type: nauc_precision_at_3_max value: 46.3617 - type: nauc_precision_at_3_std value: 1.2550999999999999 - type: nauc_precision_at_3_diff1 value: 53.7993 - type: nauc_precision_at_5_max value: 46.6666 - type: nauc_precision_at_5_std value: 2.5401 - type: nauc_precision_at_5_diff1 value: 51.413799999999995 - type: nauc_precision_at_10_max value: 48.3645 - type: nauc_precision_at_10_std value: 6.8622000000000005 - type: nauc_precision_at_10_diff1 value: 49.6971 - type: nauc_precision_at_20_max value: 49.1074 - type: nauc_precision_at_20_std value: 9.4846 - type: nauc_precision_at_20_diff1 value: 48.5587 - type: nauc_precision_at_100_max value: 51.2638 - type: nauc_precision_at_100_std value: 18.4911 - type: nauc_precision_at_100_diff1 value: 47.2445 - type: nauc_precision_at_1000_max value: 61.0283 - type: nauc_precision_at_1000_std value: 31.5949 - type: nauc_precision_at_1000_diff1 value: 47.239599999999996 - type: nauc_mrr_at_1_max value: 40.5391 - type: nauc_mrr_at_1_std value: -2.9052000000000002 - type: nauc_mrr_at_1_diff1 value: 63.2351 - type: nauc_mrr_at_3_max value: 43.0286 - type: nauc_mrr_at_3_std value: -1.2933 - type: nauc_mrr_at_3_diff1 value: 59.065 - type: nauc_mrr_at_5_max value: 43.0224 - type: nauc_mrr_at_5_std value: -1.1081 - type: nauc_mrr_at_5_diff1 value: 58.7146 - type: nauc_mrr_at_10_max value: 43.127500000000005 - type: nauc_mrr_at_10_std value: -0.7247 - type: nauc_mrr_at_10_diff1 value: 58.6619 - type: nauc_mrr_at_20_max value: 43.1213 - type: nauc_mrr_at_20_std value: -0.6853 - type: nauc_mrr_at_20_diff1 value: 58.704299999999996 - type: nauc_mrr_at_100_max value: 43.0908 - type: nauc_mrr_at_100_std value: -0.6792 - type: nauc_mrr_at_100_diff1 value: 58.7592 - type: nauc_mrr_at_1000_max value: 43.085499999999996 - type: nauc_mrr_at_1000_std value: -0.6897 - type: nauc_mrr_at_1000_diff1 value: 58.7689 - type: main_score value: 59.471 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (ruby) type: CoIR-Retrieval/CodeSearchNet config: ruby split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 38.144 - type: ndcg_at_3 value: 46.086 - type: ndcg_at_5 value: 48.13 - type: ndcg_at_10 value: 50.166 - type: ndcg_at_20 value: 51.672 - type: ndcg_at_100 value: 53.81 - type: ndcg_at_1000 value: 55.401999999999994 - type: map_at_1 value: 38.144 - type: map_at_3 value: 44.118 - type: map_at_5 value: 45.245000000000005 - type: map_at_10 value: 46.061 - type: map_at_20 value: 46.475 - type: map_at_100 value: 46.761 - type: map_at_1000 value: 46.815 - type: recall_at_1 value: 38.144 - type: recall_at_3 value: 51.784 - type: recall_at_5 value: 56.779999999999994 - type: recall_at_10 value: 63.20400000000001 - type: recall_at_20 value: 69.151 - type: recall_at_100 value: 80.809 - type: recall_at_1000 value: 93.65599999999999 - type: precision_at_1 value: 38.144 - type: precision_at_3 value: 17.261000000000003 - type: precision_at_5 value: 11.356 - type: precision_at_10 value: 6.32 - type: precision_at_20 value: 3.458 - type: precision_at_100 value: 0.808 - type: precision_at_1000 value: 0.094 - type: mrr_at_1 value: 38.1443 - type: mrr_at_3 value: 44.1184 - type: mrr_at_5 value: 45.2445 - type: mrr_at_10 value: 46.0607 - type: mrr_at_20 value: 46.475 - type: mrr_at_100 value: 46.7611 - type: mrr_at_1000 value: 46.8146 - type: nauc_ndcg_at_1_max value: 49.8526 - type: nauc_ndcg_at_1_std value: 6.944500000000001 - type: nauc_ndcg_at_1_diff1 value: 59.0325 - type: nauc_ndcg_at_3_max value: 48.8152 - type: nauc_ndcg_at_3_std value: 6.2506 - type: nauc_ndcg_at_3_diff1 value: 51.7373 - type: nauc_ndcg_at_5_max value: 48.4399 - type: nauc_ndcg_at_5_std value: 6.687 - type: nauc_ndcg_at_5_diff1 value: 50.569900000000004 - type: nauc_ndcg_at_10_max value: 47.2669 - type: nauc_ndcg_at_10_std value: 6.703 - type: nauc_ndcg_at_10_diff1 value: 49.3867 - type: nauc_ndcg_at_20_max value: 47.1761 - type: nauc_ndcg_at_20_std value: 7.0552 - type: nauc_ndcg_at_20_diff1 value: 49.3528 - type: nauc_ndcg_at_100_max value: 47.196 - type: nauc_ndcg_at_100_std value: 7.697 - type: nauc_ndcg_at_100_diff1 value: 49.9359 - type: nauc_ndcg_at_1000_max value: 47.4306 - type: nauc_ndcg_at_1000_std value: 7.3536 - type: nauc_ndcg_at_1000_diff1 value: 50.365700000000004 - type: nauc_map_at_1_max value: 49.8526 - type: nauc_map_at_1_std value: 6.944500000000001 - type: nauc_map_at_1_diff1 value: 59.0325 - type: nauc_map_at_3_max value: 48.932900000000004 - type: nauc_map_at_3_std value: 6.285499999999999 - type: nauc_map_at_3_diff1 value: 53.4821 - type: nauc_map_at_5_max value: 48.709799999999994 - type: nauc_map_at_5_std value: 6.5305 - type: nauc_map_at_5_diff1 value: 52.8586 - type: nauc_map_at_10_max value: 48.2504 - type: nauc_map_at_10_std value: 6.535299999999999 - type: nauc_map_at_10_diff1 value: 52.410000000000004 - type: nauc_map_at_20_max value: 48.2424 - type: nauc_map_at_20_std value: 6.6425 - type: nauc_map_at_20_diff1 value: 52.4289 - type: nauc_map_at_100_max value: 48.254999999999995 - type: nauc_map_at_100_std value: 6.7272 - type: nauc_map_at_100_diff1 value: 52.517199999999995 - type: nauc_map_at_1000_max value: 48.2618 - type: nauc_map_at_1000_std value: 6.7179 - type: nauc_map_at_1000_diff1 value: 52.5296 - type: nauc_recall_at_1_max value: 49.8526 - type: nauc_recall_at_1_std value: 6.944500000000001 - type: nauc_recall_at_1_diff1 value: 59.0325 - type: nauc_recall_at_3_max value: 48.5241 - type: nauc_recall_at_3_std value: 6.2048 - type: nauc_recall_at_3_diff1 value: 46.5818 - type: nauc_recall_at_5_max value: 47.6347 - type: nauc_recall_at_5_std value: 7.290299999999999 - type: nauc_recall_at_5_diff1 value: 43.3392 - type: nauc_recall_at_10_max value: 43.4268 - type: nauc_recall_at_10_std value: 7.4028 - type: nauc_recall_at_10_diff1 value: 38.508700000000005 - type: nauc_recall_at_20_max value: 42.416199999999996 - type: nauc_recall_at_20_std value: 9.0454 - type: nauc_recall_at_20_diff1 value: 36.9086 - type: nauc_recall_at_100_max value: 40.23 - type: nauc_recall_at_100_std value: 15.776000000000002 - type: nauc_recall_at_100_diff1 value: 36.492599999999996 - type: nauc_recall_at_1000_max value: 36.7611 - type: nauc_recall_at_1000_std value: 16.9938 - type: nauc_recall_at_1000_diff1 value: 29.5398 - type: nauc_precision_at_1_max value: 49.8526 - type: nauc_precision_at_1_std value: 6.944500000000001 - type: nauc_precision_at_1_diff1 value: 59.0325 - type: nauc_precision_at_3_max value: 48.5241 - type: nauc_precision_at_3_std value: 6.2048 - type: nauc_precision_at_3_diff1 value: 46.5818 - type: nauc_precision_at_5_max value: 47.6347 - type: nauc_precision_at_5_std value: 7.290299999999999 - type: nauc_precision_at_5_diff1 value: 43.3392 - type: nauc_precision_at_10_max value: 43.4268 - type: nauc_precision_at_10_std value: 7.4028 - type: nauc_precision_at_10_diff1 value: 38.508700000000005 - type: nauc_precision_at_20_max value: 42.416199999999996 - type: nauc_precision_at_20_std value: 9.0454 - type: nauc_precision_at_20_diff1 value: 36.9086 - type: nauc_precision_at_100_max value: 40.23 - type: nauc_precision_at_100_std value: 15.776000000000002 - type: nauc_precision_at_100_diff1 value: 36.492599999999996 - type: nauc_precision_at_1000_max value: 36.7611 - type: nauc_precision_at_1000_std value: 16.9938 - type: nauc_precision_at_1000_diff1 value: 29.5398 - type: nauc_mrr_at_1_max value: 49.8526 - type: nauc_mrr_at_1_std value: 6.944500000000001 - type: nauc_mrr_at_1_diff1 value: 59.0325 - type: nauc_mrr_at_3_max value: 48.932900000000004 - type: nauc_mrr_at_3_std value: 6.285499999999999 - type: nauc_mrr_at_3_diff1 value: 53.4821 - type: nauc_mrr_at_5_max value: 48.709799999999994 - type: nauc_mrr_at_5_std value: 6.5305 - type: nauc_mrr_at_5_diff1 value: 52.8586 - type: nauc_mrr_at_10_max value: 48.2504 - type: nauc_mrr_at_10_std value: 6.535299999999999 - type: nauc_mrr_at_10_diff1 value: 52.410000000000004 - type: nauc_mrr_at_20_max value: 48.2424 - type: nauc_mrr_at_20_std value: 6.6425 - type: nauc_mrr_at_20_diff1 value: 52.4289 - type: nauc_mrr_at_100_max value: 48.254999999999995 - type: nauc_mrr_at_100_std value: 6.7272 - type: nauc_mrr_at_100_diff1 value: 52.517199999999995 - type: nauc_mrr_at_1000_max value: 48.2618 - type: nauc_mrr_at_1000_std value: 6.7179 - type: nauc_mrr_at_1000_diff1 value: 52.5296 - type: main_score value: 50.166 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (java) type: CoIR-Retrieval/CodeSearchNet config: java split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 42.355 - type: ndcg_at_3 value: 50.89 - type: ndcg_at_5 value: 53.089 - type: ndcg_at_10 value: 55.062 - type: ndcg_at_20 value: 56.373 - type: ndcg_at_100 value: 58.268 - type: ndcg_at_1000 value: 59.367999999999995 - type: map_at_1 value: 42.355 - type: map_at_3 value: 48.825 - type: map_at_5 value: 50.05 - type: map_at_10 value: 50.866 - type: map_at_20 value: 51.227999999999994 - type: map_at_100 value: 51.486 - type: map_at_1000 value: 51.525 - type: recall_at_1 value: 42.355 - type: recall_at_3 value: 56.851 - type: recall_at_5 value: 62.173 - type: recall_at_10 value: 68.26100000000001 - type: recall_at_20 value: 73.437 - type: recall_at_100 value: 83.706 - type: recall_at_1000 value: 92.506 - type: precision_at_1 value: 42.355 - type: precision_at_3 value: 18.95 - type: precision_at_5 value: 12.435 - type: precision_at_10 value: 6.8260000000000005 - type: precision_at_20 value: 3.672 - type: precision_at_100 value: 0.8370000000000001 - type: precision_at_1000 value: 0.093 - type: mrr_at_1 value: 42.3551 - type: mrr_at_3 value: 48.8255 - type: mrr_at_5 value: 50.049600000000005 - type: mrr_at_10 value: 50.8665 - type: mrr_at_20 value: 51.227999999999994 - type: mrr_at_100 value: 51.486 - type: mrr_at_1000 value: 51.525200000000005 - type: nauc_ndcg_at_1_max value: 41.261700000000005 - type: nauc_ndcg_at_1_std value: -4.1932 - type: nauc_ndcg_at_1_diff1 value: 62.1792 - type: nauc_ndcg_at_3_max value: 43.6389 - type: nauc_ndcg_at_3_std value: -2.7453000000000003 - type: nauc_ndcg_at_3_diff1 value: 56.621 - type: nauc_ndcg_at_5_max value: 43.5895 - type: nauc_ndcg_at_5_std value: -2.1214 - type: nauc_ndcg_at_5_diff1 value: 55.7216 - type: nauc_ndcg_at_10_max value: 43.56 - type: nauc_ndcg_at_10_std value: -1.2124 - type: nauc_ndcg_at_10_diff1 value: 55.1817 - type: nauc_ndcg_at_20_max value: 43.6918 - type: nauc_ndcg_at_20_std value: -0.4332 - type: nauc_ndcg_at_20_diff1 value: 54.9887 - type: nauc_ndcg_at_100_max value: 43.945499999999996 - type: nauc_ndcg_at_100_std value: 0.3674 - type: nauc_ndcg_at_100_diff1 value: 55.237899999999996 - type: nauc_ndcg_at_1000_max value: 43.8498 - type: nauc_ndcg_at_1000_std value: 0.1663 - type: nauc_ndcg_at_1000_diff1 value: 55.6509 - type: nauc_map_at_1_max value: 41.261700000000005 - type: nauc_map_at_1_std value: -4.1932 - type: nauc_map_at_1_diff1 value: 62.1792 - type: nauc_map_at_3_max value: 43.0699 - type: nauc_map_at_3_std value: -3.1619 - type: nauc_map_at_3_diff1 value: 57.961600000000004 - type: nauc_map_at_5_max value: 43.0235 - type: nauc_map_at_5_std value: -2.8471 - type: nauc_map_at_5_diff1 value: 57.492399999999996 - type: nauc_map_at_10_max value: 43.0155 - type: nauc_map_at_10_std value: -2.4906 - type: nauc_map_at_10_diff1 value: 57.308899999999994 - type: nauc_map_at_20_max value: 43.0405 - type: nauc_map_at_20_std value: -2.299 - type: nauc_map_at_20_diff1 value: 57.262 - type: nauc_map_at_100_max value: 43.0606 - type: nauc_map_at_100_std value: -2.2096 - type: nauc_map_at_100_diff1 value: 57.2982 - type: nauc_map_at_1000_max value: 43.0566 - type: nauc_map_at_1000_std value: -2.2155 - type: nauc_map_at_1000_diff1 value: 57.312 - type: nauc_recall_at_1_max value: 41.261700000000005 - type: nauc_recall_at_1_std value: -4.1932 - type: nauc_recall_at_1_diff1 value: 62.1792 - type: nauc_recall_at_3_max value: 45.368199999999995 - type: nauc_recall_at_3_std value: -1.4471 - type: nauc_recall_at_3_diff1 value: 52.5416 - type: nauc_recall_at_5_max value: 45.421299999999995 - type: nauc_recall_at_5_std value: 0.3829 - type: nauc_recall_at_5_diff1 value: 49.8591 - type: nauc_recall_at_10_max value: 45.4698 - type: nauc_recall_at_10_std value: 3.9899999999999998 - type: nauc_recall_at_10_diff1 value: 47.100500000000004 - type: nauc_recall_at_20_max value: 46.4998 - type: nauc_recall_at_20_std value: 8.8468 - type: nauc_recall_at_20_diff1 value: 45.027899999999995 - type: nauc_recall_at_100_max value: 50.79559999999999 - type: nauc_recall_at_100_std value: 21.8125 - type: nauc_recall_at_100_diff1 value: 42.735099999999996 - type: nauc_recall_at_1000_max value: 55.116 - type: nauc_recall_at_1000_std value: 37.5788 - type: nauc_recall_at_1000_diff1 value: 42.2857 - type: nauc_precision_at_1_max value: 41.261700000000005 - type: nauc_precision_at_1_std value: -4.1932 - type: nauc_precision_at_1_diff1 value: 62.1792 - type: nauc_precision_at_3_max value: 45.368199999999995 - type: nauc_precision_at_3_std value: -1.4471 - type: nauc_precision_at_3_diff1 value: 52.5416 - type: nauc_precision_at_5_max value: 45.421299999999995 - type: nauc_precision_at_5_std value: 0.3829 - type: nauc_precision_at_5_diff1 value: 49.8591 - type: nauc_precision_at_10_max value: 45.4698 - type: nauc_precision_at_10_std value: 3.9899999999999998 - type: nauc_precision_at_10_diff1 value: 47.100500000000004 - type: nauc_precision_at_20_max value: 46.4998 - type: nauc_precision_at_20_std value: 8.8468 - type: nauc_precision_at_20_diff1 value: 45.027899999999995 - type: nauc_precision_at_100_max value: 50.79559999999999 - type: nauc_precision_at_100_std value: 21.8125 - type: nauc_precision_at_100_diff1 value: 42.735099999999996 - type: nauc_precision_at_1000_max value: 55.116 - type: nauc_precision_at_1000_std value: 37.5788 - type: nauc_precision_at_1000_diff1 value: 42.2857 - type: nauc_mrr_at_1_max value: 41.261700000000005 - type: nauc_mrr_at_1_std value: -4.1932 - type: nauc_mrr_at_1_diff1 value: 62.1792 - type: nauc_mrr_at_3_max value: 43.0699 - type: nauc_mrr_at_3_std value: -3.1619 - type: nauc_mrr_at_3_diff1 value: 57.961600000000004 - type: nauc_mrr_at_5_max value: 43.0235 - type: nauc_mrr_at_5_std value: -2.8471 - type: nauc_mrr_at_5_diff1 value: 57.492399999999996 - type: nauc_mrr_at_10_max value: 43.0155 - type: nauc_mrr_at_10_std value: -2.4906 - type: nauc_mrr_at_10_diff1 value: 57.308899999999994 - type: nauc_mrr_at_20_max value: 43.0405 - type: nauc_mrr_at_20_std value: -2.299 - type: nauc_mrr_at_20_diff1 value: 57.262 - type: nauc_mrr_at_100_max value: 43.0606 - type: nauc_mrr_at_100_std value: -2.2096 - type: nauc_mrr_at_100_diff1 value: 57.2982 - type: nauc_mrr_at_1000_max value: 43.0566 - type: nauc_mrr_at_1000_std value: -2.2155 - type: nauc_mrr_at_1000_diff1 value: 57.312 - type: main_score value: 55.062 - task: type: Retrieval dataset: name: MTEB COIRCodeSearchNetRetrieval (php) type: CoIR-Retrieval/CodeSearchNet config: php split: test revision: 4adc7bc41202b5c13543c9c886a25f340634dab3 metrics: - type: ndcg_at_1 value: 36.835 - type: ndcg_at_3 value: 45.147999999999996 - type: ndcg_at_5 value: 47.497 - type: ndcg_at_10 value: 49.784 - type: ndcg_at_20 value: 51.410999999999994 - type: ndcg_at_100 value: 53.715 - type: ndcg_at_1000 value: 55.102 - type: map_at_1 value: 36.835 - type: map_at_3 value: 43.126 - type: map_at_5 value: 44.429 - type: map_at_10 value: 45.377 - type: map_at_20 value: 45.821 - type: map_at_100 value: 46.139 - type: map_at_1000 value: 46.188 - type: recall_at_1 value: 36.835 - type: recall_at_3 value: 50.992000000000004 - type: recall_at_5 value: 56.693000000000005 - type: recall_at_10 value: 63.743 - type: recall_at_20 value: 70.194 - type: recall_at_100 value: 82.65299999999999 - type: recall_at_1000 value: 93.728 - type: precision_at_1 value: 36.835 - type: precision_at_3 value: 16.997 - type: precision_at_5 value: 11.339 - type: precision_at_10 value: 6.3740000000000006 - type: precision_at_20 value: 3.51 - type: precision_at_100 value: 0.827 - type: precision_at_1000 value: 0.094 - type: mrr_at_1 value: 36.8346 - type: mrr_at_3 value: 43.1259 - type: mrr_at_5 value: 44.4289 - type: mrr_at_10 value: 45.3769 - type: mrr_at_20 value: 45.8215 - type: mrr_at_100 value: 46.138600000000004 - type: mrr_at_1000 value: 46.1881 - type: nauc_ndcg_at_1_max value: 36.9844 - type: nauc_ndcg_at_1_std value: -3.2222 - type: nauc_ndcg_at_1_diff1 value: 58.896 - type: nauc_ndcg_at_3_max value: 37.6355 - type: nauc_ndcg_at_3_std value: -2.2689 - type: nauc_ndcg_at_3_diff1 value: 52.771100000000004 - type: nauc_ndcg_at_5_max value: 38.175599999999996 - type: nauc_ndcg_at_5_std value: -1.5131999999999999 - type: nauc_ndcg_at_5_diff1 value: 52.0101 - type: nauc_ndcg_at_10_max value: 38.2873 - type: nauc_ndcg_at_10_std value: -0.5444 - type: nauc_ndcg_at_10_diff1 value: 51.3992 - type: nauc_ndcg_at_20_max value: 38.324200000000005 - type: nauc_ndcg_at_20_std value: 0.1328 - type: nauc_ndcg_at_20_diff1 value: 51.2346 - type: nauc_ndcg_at_100_max value: 38.6313 - type: nauc_ndcg_at_100_std value: 0.9426 - type: nauc_ndcg_at_100_diff1 value: 51.65729999999999 - type: nauc_ndcg_at_1000_max value: 38.6274 - type: nauc_ndcg_at_1000_std value: 0.69 - type: nauc_ndcg_at_1000_diff1 value: 52.1029 - type: nauc_map_at_1_max value: 36.9844 - type: nauc_map_at_1_std value: -3.2222 - type: nauc_map_at_1_diff1 value: 58.896 - type: nauc_map_at_3_max value: 37.523 - type: nauc_map_at_3_std value: -2.5115 - type: nauc_map_at_3_diff1 value: 54.17960000000001 - type: nauc_map_at_5_max value: 37.8191 - type: nauc_map_at_5_std value: -2.1073 - type: nauc_map_at_5_diff1 value: 53.780499999999996 - type: nauc_map_at_10_max value: 37.8581 - type: nauc_map_at_10_std value: -1.7191999999999998 - type: nauc_map_at_10_diff1 value: 53.541700000000006 - type: nauc_map_at_20_max value: 37.8684 - type: nauc_map_at_20_std value: -1.5565 - type: nauc_map_at_20_diff1 value: 53.5155 - type: nauc_map_at_100_max value: 37.9101 - type: nauc_map_at_100_std value: -1.4577 - type: nauc_map_at_100_diff1 value: 53.5894 - type: nauc_map_at_1000_max value: 37.9109 - type: nauc_map_at_1000_std value: -1.4617 - type: nauc_map_at_1000_diff1 value: 53.6044 - type: nauc_recall_at_1_max value: 36.9844 - type: nauc_recall_at_1_std value: -3.2222 - type: nauc_recall_at_1_diff1 value: 58.896 - type: nauc_recall_at_3_max value: 37.9468 - type: nauc_recall_at_3_std value: -1.5512 - type: nauc_recall_at_3_diff1 value: 48.6655 - type: nauc_recall_at_5_max value: 39.3342 - type: nauc_recall_at_5_std value: 0.44739999999999996 - type: nauc_recall_at_5_diff1 value: 46.475100000000005 - type: nauc_recall_at_10_max value: 39.8619 - type: nauc_recall_at_10_std value: 4.0042 - type: nauc_recall_at_10_diff1 value: 43.8251 - type: nauc_recall_at_20_max value: 40.226299999999995 - type: nauc_recall_at_20_std value: 8.052299999999999 - type: nauc_recall_at_20_diff1 value: 41.937400000000004 - type: nauc_recall_at_100_max value: 44.221 - type: nauc_recall_at_100_std value: 20.433699999999998 - type: nauc_recall_at_100_diff1 value: 40.745599999999996 - type: nauc_recall_at_1000_max value: 52.6045 - type: nauc_recall_at_1000_std value: 40.3497 - type: nauc_recall_at_1000_diff1 value: 40.248 - type: nauc_precision_at_1_max value: 36.9844 - type: nauc_precision_at_1_std value: -3.2222 - type: nauc_precision_at_1_diff1 value: 58.896 - type: nauc_precision_at_3_max value: 37.9468 - type: nauc_precision_at_3_std value: -1.5512 - type: nauc_precision_at_3_diff1 value: 48.6655 - type: nauc_precision_at_5_max value: 39.3342 - type: nauc_precision_at_5_std value: 0.44739999999999996 - type: nauc_precision_at_5_diff1 value: 46.475100000000005 - type: nauc_precision_at_10_max value: 39.8619 - type: nauc_precision_at_10_std value: 4.0042 - type: nauc_precision_at_10_diff1 value: 43.8251 - type: nauc_precision_at_20_max value: 40.226299999999995 - type: nauc_precision_at_20_std value: 8.052299999999999 - type: nauc_precision_at_20_diff1 value: 41.937400000000004 - type: nauc_precision_at_100_max value: 44.221 - type: nauc_precision_at_100_std value: 20.433699999999998 - type: nauc_precision_at_100_diff1 value: 40.745599999999996 - type: nauc_precision_at_1000_max value: 52.6045 - type: nauc_precision_at_1000_std value: 40.3497 - type: nauc_precision_at_1000_diff1 value: 40.248 - type: nauc_mrr_at_1_max value: 36.9844 - type: nauc_mrr_at_1_std value: -3.2222 - type: nauc_mrr_at_1_diff1 value: 58.896 - type: nauc_mrr_at_3_max value: 37.523 - type: nauc_mrr_at_3_std value: -2.5115 - type: nauc_mrr_at_3_diff1 value: 54.17960000000001 - type: nauc_mrr_at_5_max value: 37.8191 - type: nauc_mrr_at_5_std value: -2.1073 - type: nauc_mrr_at_5_diff1 value: 53.780499999999996 - type: nauc_mrr_at_10_max value: 37.8581 - type: nauc_mrr_at_10_std value: -1.7191999999999998 - type: nauc_mrr_at_10_diff1 value: 53.541700000000006 - type: nauc_mrr_at_20_max value: 37.8684 - type: nauc_mrr_at_20_std value: -1.5565 - type: nauc_mrr_at_20_diff1 value: 53.5155 - type: nauc_mrr_at_100_max value: 37.9101 - type: nauc_mrr_at_100_std value: -1.4577 - type: nauc_mrr_at_100_diff1 value: 53.5894 - type: nauc_mrr_at_1000_max value: 37.9109 - type: nauc_mrr_at_1000_std value: -1.4617 - type: nauc_mrr_at_1000_diff1 value: 53.6044 - type: main_score value: 49.784 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: ndcg_at_1 value: 44.206 - type: ndcg_at_3 value: 49.364999999999995 - type: ndcg_at_5 value: 51.429 - type: ndcg_at_10 value: 54.106 - type: ndcg_at_20 value: 56.271 - type: ndcg_at_100 value: 59.33500000000001 - type: ndcg_at_1000 value: 61.015 - type: map_at_1 value: 35.797000000000004 - type: map_at_3 value: 44.137 - type: map_at_5 value: 46.062999999999995 - type: map_at_10 value: 47.793 - type: map_at_20 value: 48.730000000000004 - type: map_at_100 value: 49.422 - type: map_at_1000 value: 49.546 - type: recall_at_1 value: 35.797000000000004 - type: recall_at_3 value: 51.224000000000004 - type: recall_at_5 value: 57.218999999999994 - type: recall_at_10 value: 65.182 - type: recall_at_20 value: 72.76700000000001 - type: recall_at_100 value: 86.654 - type: recall_at_1000 value: 97.131 - type: precision_at_1 value: 44.206 - type: precision_at_3 value: 23.653 - type: precision_at_5 value: 16.91 - type: precision_at_10 value: 10.443 - type: precision_at_20 value: 6.194999999999999 - type: precision_at_100 value: 1.6310000000000002 - type: precision_at_1000 value: 0.214 - type: mrr_at_1 value: 44.206 - type: mrr_at_3 value: 51.430600000000005 - type: mrr_at_5 value: 52.839800000000004 - type: mrr_at_10 value: 53.808 - type: mrr_at_20 value: 54.2585 - type: mrr_at_100 value: 54.540200000000006 - type: mrr_at_1000 value: 54.577799999999996 - type: nauc_ndcg_at_1_max value: 45.573 - type: nauc_ndcg_at_1_std value: -5.092300000000001 - type: nauc_ndcg_at_1_diff1 value: 50.8011 - type: nauc_ndcg_at_3_max value: 44.7194 - type: nauc_ndcg_at_3_std value: -2.979 - type: nauc_ndcg_at_3_diff1 value: 49.4014 - type: nauc_ndcg_at_5_max value: 45.9838 - type: nauc_ndcg_at_5_std value: -2.4417999999999997 - type: nauc_ndcg_at_5_diff1 value: 48.2985 - type: nauc_ndcg_at_10_max value: 45.6755 - type: nauc_ndcg_at_10_std value: -2.1826000000000003 - type: nauc_ndcg_at_10_diff1 value: 48.443799999999996 - type: nauc_ndcg_at_20_max value: 45.967200000000005 - type: nauc_ndcg_at_20_std value: -0.3553 - type: nauc_ndcg_at_20_diff1 value: 48.0216 - type: nauc_ndcg_at_100_max value: 46.3459 - type: nauc_ndcg_at_100_std value: 0.6947 - type: nauc_ndcg_at_100_diff1 value: 48.3313 - type: nauc_ndcg_at_1000_max value: 46.245599999999996 - type: nauc_ndcg_at_1000_std value: -0.3032 - type: nauc_ndcg_at_1000_diff1 value: 48.3821 - type: nauc_map_at_1_max value: 38.896 - type: nauc_map_at_1_std value: -5.7093 - type: nauc_map_at_1_diff1 value: 54.4608 - type: nauc_map_at_3_max value: 42.6164 - type: nauc_map_at_3_std value: -4.6751000000000005 - type: nauc_map_at_3_diff1 value: 52.23759999999999 - type: nauc_map_at_5_max value: 43.9491 - type: nauc_map_at_5_std value: -3.8674 - type: nauc_map_at_5_diff1 value: 51.03189999999999 - type: nauc_map_at_10_max value: 44.4192 - type: nauc_map_at_10_std value: -3.4564999999999997 - type: nauc_map_at_10_diff1 value: 50.6846 - type: nauc_map_at_20_max value: 44.8404 - type: nauc_map_at_20_std value: -2.67 - type: nauc_map_at_20_diff1 value: 50.3892 - type: nauc_map_at_100_max value: 44.9988 - type: nauc_map_at_100_std value: -2.4528000000000003 - type: nauc_map_at_100_diff1 value: 50.2602 - type: nauc_map_at_1000_max value: 45.0043 - type: nauc_map_at_1000_std value: -2.5084 - type: nauc_map_at_1000_diff1 value: 50.2302 - type: nauc_recall_at_1_max value: 38.896 - type: nauc_recall_at_1_std value: -5.7093 - type: nauc_recall_at_1_diff1 value: 54.4608 - type: nauc_recall_at_3_max value: 40.917500000000004 - type: nauc_recall_at_3_std value: -2.9875 - type: nauc_recall_at_3_diff1 value: 47.935 - type: nauc_recall_at_5_max value: 43.578 - type: nauc_recall_at_5_std value: -0.0832 - type: nauc_recall_at_5_diff1 value: 43.924800000000005 - type: nauc_recall_at_10_max value: 42.3348 - type: nauc_recall_at_10_std value: 1.2774 - type: nauc_recall_at_10_diff1 value: 42.5842 - type: nauc_recall_at_20_max value: 43.4429 - type: nauc_recall_at_20_std value: 9.6387 - type: nauc_recall_at_20_diff1 value: 40.1222 - type: nauc_recall_at_100_max value: 47.6245 - type: nauc_recall_at_100_std value: 28.7436 - type: nauc_recall_at_100_diff1 value: 42.3728 - type: nauc_recall_at_1000_max value: 57.4835 - type: nauc_recall_at_1000_std value: 66.6109 - type: nauc_recall_at_1000_diff1 value: 48.025 - type: nauc_precision_at_1_max value: 45.573 - type: nauc_precision_at_1_std value: -5.092300000000001 - type: nauc_precision_at_1_diff1 value: 50.8011 - type: nauc_precision_at_3_max value: 39.7982 - type: nauc_precision_at_3_std value: 1.3032 - type: nauc_precision_at_3_diff1 value: 26.422600000000003 - type: nauc_precision_at_5_max value: 36.86 - type: nauc_precision_at_5_std value: 3.9888 - type: nauc_precision_at_5_diff1 value: 13.4191 - type: nauc_precision_at_10_max value: 26.663199999999996 - type: nauc_precision_at_10_std value: 6.388299999999999 - type: nauc_precision_at_10_diff1 value: 2.1197 - type: nauc_precision_at_20_max value: 19.8196 - type: nauc_precision_at_20_std value: 9.0818 - type: nauc_precision_at_20_diff1 value: -6.483999999999999 - type: nauc_precision_at_100_max value: 5.6951 - type: nauc_precision_at_100_std value: 5.3285 - type: nauc_precision_at_100_diff1 value: -17.9036 - type: nauc_precision_at_1000_max value: -9.107999999999999 - type: nauc_precision_at_1000_std value: -7.5626999999999995 - type: nauc_precision_at_1000_diff1 value: -27.7189 - type: nauc_mrr_at_1_max value: 45.573 - type: nauc_mrr_at_1_std value: -5.092300000000001 - type: nauc_mrr_at_1_diff1 value: 50.8011 - type: nauc_mrr_at_3_max value: 46.394800000000004 - type: nauc_mrr_at_3_std value: -3.6457 - type: nauc_mrr_at_3_diff1 value: 48.8878 - type: nauc_mrr_at_5_max value: 46.7342 - type: nauc_mrr_at_5_std value: -3.2079999999999997 - type: nauc_mrr_at_5_diff1 value: 47.9827 - type: nauc_mrr_at_10_max value: 46.4047 - type: nauc_mrr_at_10_std value: -2.9571 - type: nauc_mrr_at_10_diff1 value: 48.036 - type: nauc_mrr_at_20_max value: 46.3645 - type: nauc_mrr_at_20_std value: -2.6208 - type: nauc_mrr_at_20_diff1 value: 48.030699999999996 - type: nauc_mrr_at_100_max value: 46.3951 - type: nauc_mrr_at_100_std value: -2.693 - type: nauc_mrr_at_100_diff1 value: 48.128 - type: nauc_mrr_at_1000_max value: 46.403299999999994 - type: nauc_mrr_at_1000_std value: -2.7043999999999997 - type: nauc_mrr_at_1000_diff1 value: 48.1413 - type: main_score value: 54.106 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: ndcg_at_1 value: 41.274 - type: ndcg_at_3 value: 46.022999999999996 - type: ndcg_at_5 value: 47.882999999999996 - type: ndcg_at_10 value: 50.251000000000005 - type: ndcg_at_20 value: 51.93 - type: ndcg_at_100 value: 54.725 - type: ndcg_at_1000 value: 56.635000000000005 - type: map_at_1 value: 32.748 - type: map_at_3 value: 40.916000000000004 - type: map_at_5 value: 42.620999999999995 - type: map_at_10 value: 44.138 - type: map_at_20 value: 44.911 - type: map_at_100 value: 45.565 - type: map_at_1000 value: 45.698 - type: recall_at_1 value: 32.748 - type: recall_at_3 value: 47.522999999999996 - type: recall_at_5 value: 52.957 - type: recall_at_10 value: 60.321999999999996 - type: recall_at_20 value: 66.506 - type: recall_at_100 value: 79.669 - type: recall_at_1000 value: 91.73 - type: precision_at_1 value: 41.274 - type: precision_at_3 value: 22.718 - type: precision_at_5 value: 16.064 - type: precision_at_10 value: 9.828000000000001 - type: precision_at_20 value: 5.783 - type: precision_at_100 value: 1.5730000000000002 - type: precision_at_1000 value: 0.202 - type: mrr_at_1 value: 41.273900000000005 - type: mrr_at_3 value: 48.2378 - type: mrr_at_5 value: 49.5626 - type: mrr_at_10 value: 50.459900000000005 - type: mrr_at_20 value: 50.805 - type: mrr_at_100 value: 51.069900000000004 - type: mrr_at_1000 value: 51.1088 - type: nauc_ndcg_at_1_max value: 44.7657 - type: nauc_ndcg_at_1_std value: 3.7028 - type: nauc_ndcg_at_1_diff1 value: 52.017199999999995 - type: nauc_ndcg_at_3_max value: 45.2602 - type: nauc_ndcg_at_3_std value: 3.9891 - type: nauc_ndcg_at_3_diff1 value: 48.9746 - type: nauc_ndcg_at_5_max value: 45.0766 - type: nauc_ndcg_at_5_std value: 4.1764 - type: nauc_ndcg_at_5_diff1 value: 48.5708 - type: nauc_ndcg_at_10_max value: 45.0325 - type: nauc_ndcg_at_10_std value: 4.8281 - type: nauc_ndcg_at_10_diff1 value: 47.6424 - type: nauc_ndcg_at_20_max value: 45.2904 - type: nauc_ndcg_at_20_std value: 5.739 - type: nauc_ndcg_at_20_diff1 value: 47.7781 - type: nauc_ndcg_at_100_max value: 45.6547 - type: nauc_ndcg_at_100_std value: 7.6744 - type: nauc_ndcg_at_100_diff1 value: 47.2483 - type: nauc_ndcg_at_1000_max value: 45.5879 - type: nauc_ndcg_at_1000_std value: 7.919 - type: nauc_ndcg_at_1000_diff1 value: 47.172799999999995 - type: nauc_map_at_1_max value: 35.7481 - type: nauc_map_at_1_std value: -6.451 - type: nauc_map_at_1_diff1 value: 55.3994 - type: nauc_map_at_3_max value: 41.4679 - type: nauc_map_at_3_std value: -2.2265 - type: nauc_map_at_3_diff1 value: 51.9234 - type: nauc_map_at_5_max value: 42.2532 - type: nauc_map_at_5_std value: -0.9950000000000001 - type: nauc_map_at_5_diff1 value: 51.172200000000004 - type: nauc_map_at_10_max value: 43.0496 - type: nauc_map_at_10_std value: 0.3319 - type: nauc_map_at_10_diff1 value: 50.3961 - type: nauc_map_at_20_max value: 43.6286 - type: nauc_map_at_20_std value: 1.2991000000000001 - type: nauc_map_at_20_diff1 value: 50.2938 - type: nauc_map_at_100_max value: 43.906800000000004 - type: nauc_map_at_100_std value: 2.1626 - type: nauc_map_at_100_diff1 value: 50.1124 - type: nauc_map_at_1000_max value: 43.9529 - type: nauc_map_at_1000_std value: 2.309 - type: nauc_map_at_1000_diff1 value: 50.0859 - type: nauc_recall_at_1_max value: 35.7481 - type: nauc_recall_at_1_std value: -6.451 - type: nauc_recall_at_1_diff1 value: 55.3994 - type: nauc_recall_at_3_max value: 40.739 - type: nauc_recall_at_3_std value: -0.9688 - type: nauc_recall_at_3_diff1 value: 47.1898 - type: nauc_recall_at_5_max value: 41.494 - type: nauc_recall_at_5_std value: 2.1174 - type: nauc_recall_at_5_diff1 value: 44.5816 - type: nauc_recall_at_10_max value: 41.739 - type: nauc_recall_at_10_std value: 5.7603 - type: nauc_recall_at_10_diff1 value: 39.9929 - type: nauc_recall_at_20_max value: 42.9217 - type: nauc_recall_at_20_std value: 10.6088 - type: nauc_recall_at_20_diff1 value: 39.1455 - type: nauc_recall_at_100_max value: 45.1375 - type: nauc_recall_at_100_std value: 25.986700000000003 - type: nauc_recall_at_100_diff1 value: 33.972 - type: nauc_recall_at_1000_max value: 46.050200000000004 - type: nauc_recall_at_1000_std value: 44.597300000000004 - type: nauc_recall_at_1000_diff1 value: 26.326100000000004 - type: nauc_precision_at_1_max value: 44.7657 - type: nauc_precision_at_1_std value: 3.7028 - type: nauc_precision_at_1_diff1 value: 52.017199999999995 - type: nauc_precision_at_3_max value: 44.291799999999995 - type: nauc_precision_at_3_std value: 18.334500000000002 - type: nauc_precision_at_3_diff1 value: 25.625500000000002 - type: nauc_precision_at_5_max value: 40.8025 - type: nauc_precision_at_5_std value: 23.6687 - type: nauc_precision_at_5_diff1 value: 16.6574 - type: nauc_precision_at_10_max value: 35.7196 - type: nauc_precision_at_10_std value: 29.852099999999997 - type: nauc_precision_at_10_diff1 value: 5.6891 - type: nauc_precision_at_20_max value: 30.119 - type: nauc_precision_at_20_std value: 33.204 - type: nauc_precision_at_20_diff1 value: -0.23509999999999998 - type: nauc_precision_at_100_max value: 18.7797 - type: nauc_precision_at_100_std value: 38.9405 - type: nauc_precision_at_100_diff1 value: -10.8005 - type: nauc_precision_at_1000_max value: 9.0466 - type: nauc_precision_at_1000_std value: 35.3392 - type: nauc_precision_at_1000_diff1 value: -16.3137 - type: nauc_mrr_at_1_max value: 44.7657 - type: nauc_mrr_at_1_std value: 3.7028 - type: nauc_mrr_at_1_diff1 value: 52.017199999999995 - type: nauc_mrr_at_3_max value: 45.8134 - type: nauc_mrr_at_3_std value: 5.6788 - type: nauc_mrr_at_3_diff1 value: 48.666199999999996 - type: nauc_mrr_at_5_max value: 45.8823 - type: nauc_mrr_at_5_std value: 6.4417 - type: nauc_mrr_at_5_diff1 value: 48.1545 - type: nauc_mrr_at_10_max value: 45.813500000000005 - type: nauc_mrr_at_10_std value: 6.7535 - type: nauc_mrr_at_10_diff1 value: 47.726400000000005 - type: nauc_mrr_at_20_max value: 45.792500000000004 - type: nauc_mrr_at_20_std value: 6.8521 - type: nauc_mrr_at_20_diff1 value: 47.7553 - type: nauc_mrr_at_100_max value: 45.8482 - type: nauc_mrr_at_100_std value: 6.979399999999999 - type: nauc_mrr_at_100_diff1 value: 47.7743 - type: nauc_mrr_at_1000_max value: 45.8456 - type: nauc_mrr_at_1000_std value: 6.9712 - type: nauc_mrr_at_1000_diff1 value: 47.7803 - type: main_score value: 50.251000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: ndcg_at_1 value: 47.147 - type: ndcg_at_3 value: 53.969 - type: ndcg_at_5 value: 56.743 - type: ndcg_at_10 value: 59.318000000000005 - type: ndcg_at_20 value: 60.897999999999996 - type: ndcg_at_100 value: 62.971999999999994 - type: ndcg_at_1000 value: 64.033 - type: map_at_1 value: 41.126000000000005 - type: map_at_3 value: 50.388999999999996 - type: map_at_5 value: 52.286 - type: map_at_10 value: 53.661 - type: map_at_20 value: 54.228 - type: map_at_100 value: 54.588 - type: map_at_1000 value: 54.638 - type: recall_at_1 value: 41.126000000000005 - type: recall_at_3 value: 58.374 - type: recall_at_5 value: 65.226 - type: recall_at_10 value: 72.69099999999999 - type: recall_at_20 value: 78.62 - type: recall_at_100 value: 88.69200000000001 - type: recall_at_1000 value: 96.232 - type: precision_at_1 value: 47.147 - type: precision_at_3 value: 24.159 - type: precision_at_5 value: 16.577 - type: precision_at_10 value: 9.549000000000001 - type: precision_at_20 value: 5.276 - type: precision_at_100 value: 1.224 - type: precision_at_1000 value: 0.135 - type: mrr_at_1 value: 47.147299999999994 - type: mrr_at_3 value: 54.4305 - type: mrr_at_5 value: 55.95719999999999 - type: mrr_at_10 value: 56.8499 - type: mrr_at_20 value: 57.230000000000004 - type: mrr_at_100 value: 57.4584 - type: mrr_at_1000 value: 57.4867 - type: nauc_ndcg_at_1_max value: 43.5129 - type: nauc_ndcg_at_1_std value: -3.5116 - type: nauc_ndcg_at_1_diff1 value: 52.717000000000006 - type: nauc_ndcg_at_3_max value: 43.6514 - type: nauc_ndcg_at_3_std value: -3.7903 - type: nauc_ndcg_at_3_diff1 value: 48.7913 - type: nauc_ndcg_at_5_max value: 44.465700000000005 - type: nauc_ndcg_at_5_std value: -3.3794999999999997 - type: nauc_ndcg_at_5_diff1 value: 48.8527 - type: nauc_ndcg_at_10_max value: 46.0891 - type: nauc_ndcg_at_10_std value: -0.5534 - type: nauc_ndcg_at_10_diff1 value: 48.857099999999996 - type: nauc_ndcg_at_20_max value: 46.1334 - type: nauc_ndcg_at_20_std value: 0.2072 - type: nauc_ndcg_at_20_diff1 value: 48.8269 - type: nauc_ndcg_at_100_max value: 46.2793 - type: nauc_ndcg_at_100_std value: 1.2965 - type: nauc_ndcg_at_100_diff1 value: 48.6421 - type: nauc_ndcg_at_1000_max value: 46.1606 - type: nauc_ndcg_at_1000_std value: 0.5259 - type: nauc_ndcg_at_1000_diff1 value: 48.9864 - type: nauc_map_at_1_max value: 36.4337 - type: nauc_map_at_1_std value: -5.6848 - type: nauc_map_at_1_diff1 value: 53.42360000000001 - type: nauc_map_at_3_max value: 41.6669 - type: nauc_map_at_3_std value: -5.6545 - type: nauc_map_at_3_diff1 value: 49.6128 - type: nauc_map_at_5_max value: 42.6809 - type: nauc_map_at_5_std value: -4.9988 - type: nauc_map_at_5_diff1 value: 49.645 - type: nauc_map_at_10_max value: 43.7393 - type: nauc_map_at_10_std value: -3.3649 - type: nauc_map_at_10_diff1 value: 49.574 - type: nauc_map_at_20_max value: 43.9855 - type: nauc_map_at_20_std value: -2.8590999999999998 - type: nauc_map_at_20_diff1 value: 49.5139 - type: nauc_map_at_100_max value: 44.0978 - type: nauc_map_at_100_std value: -2.604 - type: nauc_map_at_100_diff1 value: 49.4857 - type: nauc_map_at_1000_max value: 44.114399999999996 - type: nauc_map_at_1000_std value: -2.6081 - type: nauc_map_at_1000_diff1 value: 49.508799999999994 - type: nauc_recall_at_1_max value: 36.4337 - type: nauc_recall_at_1_std value: -5.6848 - type: nauc_recall_at_1_diff1 value: 53.42360000000001 - type: nauc_recall_at_3_max value: 41.320299999999996 - type: nauc_recall_at_3_std value: -5.7135 - type: nauc_recall_at_3_diff1 value: 45.0436 - type: nauc_recall_at_5_max value: 43.1656 - type: nauc_recall_at_5_std value: -3.8888 - type: nauc_recall_at_5_diff1 value: 44.3304 - type: nauc_recall_at_10_max value: 48.9816 - type: nauc_recall_at_10_std value: 5.9506000000000006 - type: nauc_recall_at_10_diff1 value: 43.9217 - type: nauc_recall_at_20_max value: 50.5525 - type: nauc_recall_at_20_std value: 11.8017 - type: nauc_recall_at_20_diff1 value: 43.4987 - type: nauc_recall_at_100_max value: 54.654 - type: nauc_recall_at_100_std value: 31.634800000000002 - type: nauc_recall_at_100_diff1 value: 38.7139 - type: nauc_recall_at_1000_max value: 62.253 - type: nauc_recall_at_1000_std value: 42.6522 - type: nauc_recall_at_1000_diff1 value: 38.3715 - type: nauc_precision_at_1_max value: 43.5129 - type: nauc_precision_at_1_std value: -3.5116 - type: nauc_precision_at_1_diff1 value: 52.717000000000006 - type: nauc_precision_at_3_max value: 41.983399999999996 - type: nauc_precision_at_3_std value: 2.4643 - type: nauc_precision_at_3_diff1 value: 28.185 - type: nauc_precision_at_5_max value: 39.8061 - type: nauc_precision_at_5_std value: 6.4715 - type: nauc_precision_at_5_diff1 value: 21.333199999999998 - type: nauc_precision_at_10_max value: 37.914500000000004 - type: nauc_precision_at_10_std value: 17.1485 - type: nauc_precision_at_10_diff1 value: 12.6277 - type: nauc_precision_at_20_max value: 34.0432 - type: nauc_precision_at_20_std value: 23.0425 - type: nauc_precision_at_20_diff1 value: 5.551699999999999 - type: nauc_precision_at_100_max value: 26.0405 - type: nauc_precision_at_100_std value: 28.572599999999998 - type: nauc_precision_at_100_diff1 value: -4.2162 - type: nauc_precision_at_1000_max value: 20.176099999999998 - type: nauc_precision_at_1000_std value: 27.293499999999998 - type: nauc_precision_at_1000_diff1 value: -7.4514 - type: nauc_mrr_at_1_max value: 43.5129 - type: nauc_mrr_at_1_std value: -3.5116 - type: nauc_mrr_at_1_diff1 value: 52.717000000000006 - type: nauc_mrr_at_3_max value: 44.9785 - type: nauc_mrr_at_3_std value: -2.2618 - type: nauc_mrr_at_3_diff1 value: 49.8663 - type: nauc_mrr_at_5_max value: 45.1749 - type: nauc_mrr_at_5_std value: -2.1027 - type: nauc_mrr_at_5_diff1 value: 49.8332 - type: nauc_mrr_at_10_max value: 45.6015 - type: nauc_mrr_at_10_std value: -1.3832 - type: nauc_mrr_at_10_diff1 value: 49.9586 - type: nauc_mrr_at_20_max value: 45.535399999999996 - type: nauc_mrr_at_20_std value: -1.2799 - type: nauc_mrr_at_20_diff1 value: 49.9829 - type: nauc_mrr_at_100_max value: 45.5168 - type: nauc_mrr_at_100_std value: -1.2195 - type: nauc_mrr_at_100_diff1 value: 49.9728 - type: nauc_mrr_at_1000_max value: 45.5076 - type: nauc_mrr_at_1000_std value: -1.2494 - type: nauc_mrr_at_1000_diff1 value: 49.977 - type: main_score value: 59.318000000000005 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: ndcg_at_1 value: 30.734 - type: ndcg_at_3 value: 38.672000000000004 - type: ndcg_at_5 value: 40.954 - type: ndcg_at_10 value: 43.564 - type: ndcg_at_20 value: 45.48 - type: ndcg_at_100 value: 48.419000000000004 - type: ndcg_at_1000 value: 50.404 - type: map_at_1 value: 28.464 - type: map_at_3 value: 35.704 - type: map_at_5 value: 37.116 - type: map_at_10 value: 38.279999999999994 - type: map_at_20 value: 38.834 - type: map_at_100 value: 39.277 - type: map_at_1000 value: 39.355000000000004 - type: recall_at_1 value: 28.464 - type: recall_at_3 value: 44.588 - type: recall_at_5 value: 50.031000000000006 - type: recall_at_10 value: 57.621 - type: recall_at_20 value: 64.85499999999999 - type: recall_at_100 value: 79.66 - type: recall_at_1000 value: 94.633 - type: precision_at_1 value: 30.734 - type: precision_at_3 value: 16.497 - type: precision_at_5 value: 11.254 - type: precision_at_10 value: 6.633 - type: precision_at_20 value: 3.757 - type: precision_at_100 value: 0.9560000000000001 - type: precision_at_1000 value: 0.116 - type: mrr_at_1 value: 30.734499999999997 - type: mrr_at_3 value: 38.1356 - type: mrr_at_5 value: 39.3616 - type: mrr_at_10 value: 40.4225 - type: mrr_at_20 value: 40.9334 - type: mrr_at_100 value: 41.297200000000004 - type: mrr_at_1000 value: 41.354600000000005 - type: nauc_ndcg_at_1_max value: 30.2094 - type: nauc_ndcg_at_1_std value: -6.9741 - type: nauc_ndcg_at_1_diff1 value: 47.5543 - type: nauc_ndcg_at_3_max value: 31.4334 - type: nauc_ndcg_at_3_std value: -4.7826 - type: nauc_ndcg_at_3_diff1 value: 41.1025 - type: nauc_ndcg_at_5_max value: 32.3557 - type: nauc_ndcg_at_5_std value: -4.1379 - type: nauc_ndcg_at_5_diff1 value: 40.81 - type: nauc_ndcg_at_10_max value: 32.3949 - type: nauc_ndcg_at_10_std value: -2.3524 - type: nauc_ndcg_at_10_diff1 value: 39.5175 - type: nauc_ndcg_at_20_max value: 31.680500000000002 - type: nauc_ndcg_at_20_std value: -1.7559000000000002 - type: nauc_ndcg_at_20_diff1 value: 38.1515 - type: nauc_ndcg_at_100_max value: 31.4167 - type: nauc_ndcg_at_100_std value: -1.0329 - type: nauc_ndcg_at_100_diff1 value: 37.8268 - type: nauc_ndcg_at_1000_max value: 31.736900000000002 - type: nauc_ndcg_at_1000_std value: -1.8415000000000001 - type: nauc_ndcg_at_1000_diff1 value: 39.0335 - type: nauc_map_at_1_max value: 28.260099999999998 - type: nauc_map_at_1_std value: -9.0806 - type: nauc_map_at_1_diff1 value: 47.6706 - type: nauc_map_at_3_max value: 30.551000000000002 - type: nauc_map_at_3_std value: -6.0257 - type: nauc_map_at_3_diff1 value: 42.8155 - type: nauc_map_at_5_max value: 31.285800000000002 - type: nauc_map_at_5_std value: -5.671600000000001 - type: nauc_map_at_5_diff1 value: 42.5887 - type: nauc_map_at_10_max value: 31.329800000000002 - type: nauc_map_at_10_std value: -4.8092999999999995 - type: nauc_map_at_10_diff1 value: 41.9856 - type: nauc_map_at_20_max value: 31.2046 - type: nauc_map_at_20_std value: -4.612 - type: nauc_map_at_20_diff1 value: 41.658699999999996 - type: nauc_map_at_100_max value: 31.181399999999996 - type: nauc_map_at_100_std value: -4.4687 - type: nauc_map_at_100_diff1 value: 41.5836 - type: nauc_map_at_1000_max value: 31.1979 - type: nauc_map_at_1000_std value: -4.4772 - type: nauc_map_at_1000_diff1 value: 41.627900000000004 - type: nauc_recall_at_1_max value: 28.260099999999998 - type: nauc_recall_at_1_std value: -9.0806 - type: nauc_recall_at_1_diff1 value: 47.6706 - type: nauc_recall_at_3_max value: 31.129800000000003 - type: nauc_recall_at_3_std value: -3.2782 - type: nauc_recall_at_3_diff1 value: 35.4529 - type: nauc_recall_at_5_max value: 33.6541 - type: nauc_recall_at_5_std value: -1.7704999999999997 - type: nauc_recall_at_5_diff1 value: 34.9944 - type: nauc_recall_at_10_max value: 33.536100000000005 - type: nauc_recall_at_10_std value: 3.4567 - type: nauc_recall_at_10_diff1 value: 30.553599999999996 - type: nauc_recall_at_20_max value: 29.889100000000003 - type: nauc_recall_at_20_std value: 6.5926 - type: nauc_recall_at_20_diff1 value: 23.217 - type: nauc_recall_at_100_max value: 27.4646 - type: nauc_recall_at_100_std value: 15.746199999999998 - type: nauc_recall_at_100_diff1 value: 15.1327 - type: nauc_recall_at_1000_max value: 32.294200000000004 - type: nauc_recall_at_1000_std value: 21.6293 - type: nauc_recall_at_1000_diff1 value: 11.265600000000001 - type: nauc_precision_at_1_max value: 30.2094 - type: nauc_precision_at_1_std value: -6.9741 - type: nauc_precision_at_1_diff1 value: 47.5543 - type: nauc_precision_at_3_max value: 34.3053 - type: nauc_precision_at_3_std value: 0.42760000000000004 - type: nauc_precision_at_3_diff1 value: 33.4827 - type: nauc_precision_at_5_max value: 35.4035 - type: nauc_precision_at_5_std value: 2.3141 - type: nauc_precision_at_5_diff1 value: 30.8004 - type: nauc_precision_at_10_max value: 33.4042 - type: nauc_precision_at_10_std value: 8.6847 - type: nauc_precision_at_10_diff1 value: 23.558200000000003 - type: nauc_precision_at_20_max value: 29.015200000000004 - type: nauc_precision_at_20_std value: 11.3556 - type: nauc_precision_at_20_diff1 value: 15.774099999999999 - type: nauc_precision_at_100_max value: 16.663700000000002 - type: nauc_precision_at_100_std value: 14.666100000000002 - type: nauc_precision_at_100_diff1 value: 2.1911 - type: nauc_precision_at_1000_max value: 7.348599999999999 - type: nauc_precision_at_1000_std value: 8.8804 - type: nauc_precision_at_1000_diff1 value: -7.026599999999999 - type: nauc_mrr_at_1_max value: 30.2094 - type: nauc_mrr_at_1_std value: -6.9741 - type: nauc_mrr_at_1_diff1 value: 47.5543 - type: nauc_mrr_at_3_max value: 31.831500000000002 - type: nauc_mrr_at_3_std value: -3.6407000000000003 - type: nauc_mrr_at_3_diff1 value: 42.445 - type: nauc_mrr_at_5_max value: 32.273 - type: nauc_mrr_at_5_std value: -3.5416000000000003 - type: nauc_mrr_at_5_diff1 value: 42.5464 - type: nauc_mrr_at_10_max value: 32.3297 - type: nauc_mrr_at_10_std value: -2.9149000000000003 - type: nauc_mrr_at_10_diff1 value: 42.0233 - type: nauc_mrr_at_20_max value: 32.124 - type: nauc_mrr_at_20_std value: -2.7826 - type: nauc_mrr_at_20_diff1 value: 41.652 - type: nauc_mrr_at_100_max value: 32.0994 - type: nauc_mrr_at_100_std value: -2.7182999999999997 - type: nauc_mrr_at_100_diff1 value: 41.6024 - type: nauc_mrr_at_1000_max value: 32.1058 - type: nauc_mrr_at_1000_std value: -2.7332 - type: nauc_mrr_at_1000_diff1 value: 41.652899999999995 - type: main_score value: 43.564 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: ndcg_at_1 value: 22.886 - type: ndcg_at_3 value: 27.864 - type: ndcg_at_5 value: 30.177 - type: ndcg_at_10 value: 32.749 - type: ndcg_at_20 value: 35.343 - type: ndcg_at_100 value: 39.095 - type: ndcg_at_1000 value: 41.656 - type: map_at_1 value: 18.119 - type: map_at_3 value: 24.340999999999998 - type: map_at_5 value: 25.861 - type: map_at_10 value: 27.055 - type: map_at_20 value: 27.855 - type: map_at_100 value: 28.461 - type: map_at_1000 value: 28.577 - type: recall_at_1 value: 18.119 - type: recall_at_3 value: 31.633 - type: recall_at_5 value: 37.532 - type: recall_at_10 value: 44.983000000000004 - type: recall_at_20 value: 54.234 - type: recall_at_100 value: 72.396 - type: recall_at_1000 value: 90.223 - type: precision_at_1 value: 22.886 - type: precision_at_3 value: 13.682 - type: precision_at_5 value: 9.950000000000001 - type: precision_at_10 value: 6.1690000000000005 - type: precision_at_20 value: 3.8120000000000003 - type: precision_at_100 value: 1.0699999999999998 - type: precision_at_1000 value: 0.14300000000000002 - type: mrr_at_1 value: 22.8856 - type: mrr_at_3 value: 29.6642 - type: mrr_at_5 value: 31.107000000000003 - type: mrr_at_10 value: 32.2342 - type: mrr_at_20 value: 32.8971 - type: mrr_at_100 value: 33.2804 - type: mrr_at_1000 value: 33.3395 - type: nauc_ndcg_at_1_max value: 24.8022 - type: nauc_ndcg_at_1_std value: -0.5363 - type: nauc_ndcg_at_1_diff1 value: 33.1639 - type: nauc_ndcg_at_3_max value: 22.0142 - type: nauc_ndcg_at_3_std value: 0.9467 - type: nauc_ndcg_at_3_diff1 value: 28.9545 - type: nauc_ndcg_at_5_max value: 21.9949 - type: nauc_ndcg_at_5_std value: 2.2558000000000002 - type: nauc_ndcg_at_5_diff1 value: 27.4516 - type: nauc_ndcg_at_10_max value: 21.5958 - type: nauc_ndcg_at_10_std value: 3.5044 - type: nauc_ndcg_at_10_diff1 value: 26.9835 - type: nauc_ndcg_at_20_max value: 21.940299999999997 - type: nauc_ndcg_at_20_std value: 4.6913 - type: nauc_ndcg_at_20_diff1 value: 26.8386 - type: nauc_ndcg_at_100_max value: 22.4749 - type: nauc_ndcg_at_100_std value: 6.1636999999999995 - type: nauc_ndcg_at_100_diff1 value: 27.4132 - type: nauc_ndcg_at_1000_max value: 23.034299999999998 - type: nauc_ndcg_at_1000_std value: 5.7944 - type: nauc_ndcg_at_1000_diff1 value: 27.3963 - type: nauc_map_at_1_max value: 21.4135 - type: nauc_map_at_1_std value: 0.649 - type: nauc_map_at_1_diff1 value: 32.1954 - type: nauc_map_at_3_max value: 20.8778 - type: nauc_map_at_3_std value: 1.0705 - type: nauc_map_at_3_diff1 value: 28.5319 - type: nauc_map_at_5_max value: 21.0234 - type: nauc_map_at_5_std value: 1.5574 - type: nauc_map_at_5_diff1 value: 27.996399999999998 - type: nauc_map_at_10_max value: 20.9927 - type: nauc_map_at_10_std value: 2.2451 - type: nauc_map_at_10_diff1 value: 27.8283 - type: nauc_map_at_20_max value: 21.16 - type: nauc_map_at_20_std value: 2.6176999999999997 - type: nauc_map_at_20_diff1 value: 27.7722 - type: nauc_map_at_100_max value: 21.3551 - type: nauc_map_at_100_std value: 2.8299000000000003 - type: nauc_map_at_100_diff1 value: 27.8752 - type: nauc_map_at_1000_max value: 21.3871 - type: nauc_map_at_1000_std value: 2.7986 - type: nauc_map_at_1000_diff1 value: 27.8709 - type: nauc_recall_at_1_max value: 21.4135 - type: nauc_recall_at_1_std value: 0.649 - type: nauc_recall_at_1_diff1 value: 32.1954 - type: nauc_recall_at_3_max value: 19.3537 - type: nauc_recall_at_3_std value: 1.4591 - type: nauc_recall_at_3_diff1 value: 25.1911 - type: nauc_recall_at_5_max value: 19.6154 - type: nauc_recall_at_5_std value: 3.5305000000000004 - type: nauc_recall_at_5_diff1 value: 22.6218 - type: nauc_recall_at_10_max value: 18.3048 - type: nauc_recall_at_10_std value: 6.1244 - type: nauc_recall_at_10_diff1 value: 21.6834 - type: nauc_recall_at_20_max value: 18.4913 - type: nauc_recall_at_20_std value: 10.083599999999999 - type: nauc_recall_at_20_diff1 value: 20.502200000000002 - type: nauc_recall_at_100_max value: 19.0212 - type: nauc_recall_at_100_std value: 21.8101 - type: nauc_recall_at_100_diff1 value: 21.2653 - type: nauc_recall_at_1000_max value: 29.3582 - type: nauc_recall_at_1000_std value: 42.8902 - type: nauc_recall_at_1000_diff1 value: 14.060900000000002 - type: nauc_precision_at_1_max value: 24.8022 - type: nauc_precision_at_1_std value: -0.5363 - type: nauc_precision_at_1_diff1 value: 33.1639 - type: nauc_precision_at_3_max value: 23.9746 - type: nauc_precision_at_3_std value: 0.9273999999999999 - type: nauc_precision_at_3_diff1 value: 26.0507 - type: nauc_precision_at_5_max value: 23.5487 - type: nauc_precision_at_5_std value: 2.8788 - type: nauc_precision_at_5_diff1 value: 22.439799999999998 - type: nauc_precision_at_10_max value: 21.826999999999998 - type: nauc_precision_at_10_std value: 5.6201 - type: nauc_precision_at_10_diff1 value: 19.8703 - type: nauc_precision_at_20_max value: 21.199399999999997 - type: nauc_precision_at_20_std value: 8.9305 - type: nauc_precision_at_20_diff1 value: 18.043 - type: nauc_precision_at_100_max value: 17.2345 - type: nauc_precision_at_100_std value: 10.0714 - type: nauc_precision_at_100_diff1 value: 14.521999999999998 - type: nauc_precision_at_1000_max value: 7.5709 - type: nauc_precision_at_1000_std value: 0.2689 - type: nauc_precision_at_1000_diff1 value: 4.4733 - type: nauc_mrr_at_1_max value: 24.8022 - type: nauc_mrr_at_1_std value: -0.5363 - type: nauc_mrr_at_1_diff1 value: 33.1639 - type: nauc_mrr_at_3_max value: 24.435499999999998 - type: nauc_mrr_at_3_std value: 0.9502999999999999 - type: nauc_mrr_at_3_diff1 value: 30.7875 - type: nauc_mrr_at_5_max value: 24.7103 - type: nauc_mrr_at_5_std value: 1.8724999999999998 - type: nauc_mrr_at_5_diff1 value: 30.086000000000002 - type: nauc_mrr_at_10_max value: 24.5685 - type: nauc_mrr_at_10_std value: 2.1533 - type: nauc_mrr_at_10_diff1 value: 29.862899999999996 - type: nauc_mrr_at_20_max value: 24.662100000000002 - type: nauc_mrr_at_20_std value: 2.3742 - type: nauc_mrr_at_20_diff1 value: 29.751300000000004 - type: nauc_mrr_at_100_max value: 24.635099999999998 - type: nauc_mrr_at_100_std value: 2.4393000000000002 - type: nauc_mrr_at_100_diff1 value: 29.741 - type: nauc_mrr_at_1000_max value: 24.651699999999998 - type: nauc_mrr_at_1000_std value: 2.4291 - type: nauc_mrr_at_1000_diff1 value: 29.7639 - type: main_score value: 32.749 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: ndcg_at_1 value: 38.114 - type: ndcg_at_3 value: 42.986000000000004 - type: ndcg_at_5 value: 45.893 - type: ndcg_at_10 value: 48.339999999999996 - type: ndcg_at_20 value: 50.617000000000004 - type: ndcg_at_100 value: 53.861000000000004 - type: ndcg_at_1000 value: 55.701 - type: map_at_1 value: 30.517 - type: map_at_3 value: 38.443 - type: map_at_5 value: 40.685 - type: map_at_10 value: 42.031 - type: map_at_20 value: 42.79 - type: map_at_100 value: 43.415 - type: map_at_1000 value: 43.525000000000006 - type: recall_at_1 value: 30.517 - type: recall_at_3 value: 46.015 - type: recall_at_5 value: 53.801 - type: recall_at_10 value: 61.332 - type: recall_at_20 value: 69.274 - type: recall_at_100 value: 84.051 - type: recall_at_1000 value: 95.826 - type: precision_at_1 value: 38.114 - type: precision_at_3 value: 20.821 - type: precision_at_5 value: 15.034 - type: precision_at_10 value: 8.892999999999999 - type: precision_at_20 value: 5.231 - type: precision_at_100 value: 1.375 - type: precision_at_1000 value: 0.172 - type: mrr_at_1 value: 38.1136 - type: mrr_at_3 value: 45.1716 - type: mrr_at_5 value: 46.8175 - type: mrr_at_10 value: 47.7831 - type: mrr_at_20 value: 48.329 - type: mrr_at_100 value: 48.6471 - type: mrr_at_1000 value: 48.6877 - type: nauc_ndcg_at_1_max value: 40.1541 - type: nauc_ndcg_at_1_std value: 1.4596 - type: nauc_ndcg_at_1_diff1 value: 56.6442 - type: nauc_ndcg_at_3_max value: 38.9776 - type: nauc_ndcg_at_3_std value: 1.464 - type: nauc_ndcg_at_3_diff1 value: 51.5596 - type: nauc_ndcg_at_5_max value: 38.8678 - type: nauc_ndcg_at_5_std value: 2.5537 - type: nauc_ndcg_at_5_diff1 value: 50.522 - type: nauc_ndcg_at_10_max value: 38.698100000000004 - type: nauc_ndcg_at_10_std value: 2.7959 - type: nauc_ndcg_at_10_diff1 value: 49.8331 - type: nauc_ndcg_at_20_max value: 39.7247 - type: nauc_ndcg_at_20_std value: 4.1737 - type: nauc_ndcg_at_20_diff1 value: 49.5233 - type: nauc_ndcg_at_100_max value: 40.649 - type: nauc_ndcg_at_100_std value: 5.7359 - type: nauc_ndcg_at_100_diff1 value: 50.0626 - type: nauc_ndcg_at_1000_max value: 40.765299999999996 - type: nauc_ndcg_at_1000_std value: 5.5551 - type: nauc_ndcg_at_1000_diff1 value: 50.3599 - type: nauc_map_at_1_max value: 35.659 - type: nauc_map_at_1_std value: -3.8913 - type: nauc_map_at_1_diff1 value: 57.7115 - type: nauc_map_at_3_max value: 37.3901 - type: nauc_map_at_3_std value: -0.88 - type: nauc_map_at_3_diff1 value: 52.9203 - type: nauc_map_at_5_max value: 38.0129 - type: nauc_map_at_5_std value: 0.1544 - type: nauc_map_at_5_diff1 value: 52.1596 - type: nauc_map_at_10_max value: 38.3708 - type: nauc_map_at_10_std value: 0.7947 - type: nauc_map_at_10_diff1 value: 51.909000000000006 - type: nauc_map_at_20_max value: 38.690200000000004 - type: nauc_map_at_20_std value: 1.2379 - type: nauc_map_at_20_diff1 value: 51.775000000000006 - type: nauc_map_at_100_max value: 38.9637 - type: nauc_map_at_100_std value: 1.5914000000000001 - type: nauc_map_at_100_diff1 value: 51.90820000000001 - type: nauc_map_at_1000_max value: 38.9784 - type: nauc_map_at_1000_std value: 1.6184 - type: nauc_map_at_1000_diff1 value: 51.909000000000006 - type: nauc_recall_at_1_max value: 35.659 - type: nauc_recall_at_1_std value: -3.8913 - type: nauc_recall_at_1_diff1 value: 57.7115 - type: nauc_recall_at_3_max value: 34.6073 - type: nauc_recall_at_3_std value: 0.0162 - type: nauc_recall_at_3_diff1 value: 47.0539 - type: nauc_recall_at_5_max value: 34.3868 - type: nauc_recall_at_5_std value: 3.1425 - type: nauc_recall_at_5_diff1 value: 43.1625 - type: nauc_recall_at_10_max value: 33.6467 - type: nauc_recall_at_10_std value: 4.1808 - type: nauc_recall_at_10_diff1 value: 39.711600000000004 - type: nauc_recall_at_20_max value: 36.3449 - type: nauc_recall_at_20_std value: 9.7358 - type: nauc_recall_at_20_diff1 value: 36.5764 - type: nauc_recall_at_100_max value: 40.563500000000005 - type: nauc_recall_at_100_std value: 23.5405 - type: nauc_recall_at_100_diff1 value: 34.2152 - type: nauc_recall_at_1000_max value: 57.387699999999995 - type: nauc_recall_at_1000_std value: 50.897999999999996 - type: nauc_recall_at_1000_diff1 value: 32.9321 - type: nauc_precision_at_1_max value: 40.1541 - type: nauc_precision_at_1_std value: 1.4596 - type: nauc_precision_at_1_diff1 value: 56.6442 - type: nauc_precision_at_3_max value: 36.586600000000004 - type: nauc_precision_at_3_std value: 9.7112 - type: nauc_precision_at_3_diff1 value: 33.8758 - type: nauc_precision_at_5_max value: 34.1914 - type: nauc_precision_at_5_std value: 13.7515 - type: nauc_precision_at_5_diff1 value: 24.6272 - type: nauc_precision_at_10_max value: 30.764999999999997 - type: nauc_precision_at_10_std value: 16.9823 - type: nauc_precision_at_10_diff1 value: 15.954799999999999 - type: nauc_precision_at_20_max value: 27.976699999999997 - type: nauc_precision_at_20_std value: 21.465999999999998 - type: nauc_precision_at_20_diff1 value: 7.0363999999999995 - type: nauc_precision_at_100_max value: 17.6394 - type: nauc_precision_at_100_std value: 23.4207 - type: nauc_precision_at_100_diff1 value: -4.0614 - type: nauc_precision_at_1000_max value: 3.8186999999999998 - type: nauc_precision_at_1000_std value: 16.0902 - type: nauc_precision_at_1000_diff1 value: -14.5093 - type: nauc_mrr_at_1_max value: 40.1541 - type: nauc_mrr_at_1_std value: 1.4596 - type: nauc_mrr_at_1_diff1 value: 56.6442 - type: nauc_mrr_at_3_max value: 40.4577 - type: nauc_mrr_at_3_std value: 3.558 - type: nauc_mrr_at_3_diff1 value: 53.0569 - type: nauc_mrr_at_5_max value: 40.6135 - type: nauc_mrr_at_5_std value: 4.3164 - type: nauc_mrr_at_5_diff1 value: 52.3585 - type: nauc_mrr_at_10_max value: 40.6563 - type: nauc_mrr_at_10_std value: 4.3038 - type: nauc_mrr_at_10_diff1 value: 52.2149 - type: nauc_mrr_at_20_max value: 40.914 - type: nauc_mrr_at_20_std value: 4.5423 - type: nauc_mrr_at_20_diff1 value: 52.2729 - type: nauc_mrr_at_100_max value: 40.8944 - type: nauc_mrr_at_100_std value: 4.546 - type: nauc_mrr_at_100_diff1 value: 52.315400000000004 - type: nauc_mrr_at_1000_max value: 40.893499999999996 - type: nauc_mrr_at_1000_std value: 4.5310999999999995 - type: nauc_mrr_at_1000_diff1 value: 52.337500000000006 - type: main_score value: 48.339999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: ndcg_at_1 value: 34.247 - type: ndcg_at_3 value: 38.976 - type: ndcg_at_5 value: 41.332 - type: ndcg_at_10 value: 44.065 - type: ndcg_at_20 value: 46.312999999999995 - type: ndcg_at_100 value: 49.434 - type: ndcg_at_1000 value: 51.681999999999995 - type: map_at_1 value: 27.395999999999997 - type: map_at_3 value: 34.782999999999994 - type: map_at_5 value: 36.63 - type: map_at_10 value: 38.043 - type: map_at_20 value: 38.783 - type: map_at_100 value: 39.341 - type: map_at_1000 value: 39.454 - type: recall_at_1 value: 27.395999999999997 - type: recall_at_3 value: 41.785 - type: recall_at_5 value: 48.303000000000004 - type: recall_at_10 value: 56.481 - type: recall_at_20 value: 64.473 - type: recall_at_100 value: 79.012 - type: recall_at_1000 value: 94.182 - type: precision_at_1 value: 34.247 - type: precision_at_3 value: 18.759999999999998 - type: precision_at_5 value: 13.333 - type: precision_at_10 value: 8.059 - type: precision_at_20 value: 4.766 - type: precision_at_100 value: 1.258 - type: precision_at_1000 value: 0.16199999999999998 - type: mrr_at_1 value: 34.2466 - type: mrr_at_3 value: 41.172 - type: mrr_at_5 value: 42.701699999999995 - type: mrr_at_10 value: 43.6807 - type: mrr_at_20 value: 44.1991 - type: mrr_at_100 value: 44.5097 - type: mrr_at_1000 value: 44.5693 - type: nauc_ndcg_at_1_max value: 38.232 - type: nauc_ndcg_at_1_std value: 3.374 - type: nauc_ndcg_at_1_diff1 value: 51.223200000000006 - type: nauc_ndcg_at_3_max value: 38.839800000000004 - type: nauc_ndcg_at_3_std value: 6.529 - type: nauc_ndcg_at_3_diff1 value: 44.2371 - type: nauc_ndcg_at_5_max value: 39.0094 - type: nauc_ndcg_at_5_std value: 8.2202 - type: nauc_ndcg_at_5_diff1 value: 44.8305 - type: nauc_ndcg_at_10_max value: 40.1918 - type: nauc_ndcg_at_10_std value: 9.9826 - type: nauc_ndcg_at_10_diff1 value: 43.5034 - type: nauc_ndcg_at_20_max value: 40.7846 - type: nauc_ndcg_at_20_std value: 11.0178 - type: nauc_ndcg_at_20_diff1 value: 43.176199999999994 - type: nauc_ndcg_at_100_max value: 40.5507 - type: nauc_ndcg_at_100_std value: 13.0203 - type: nauc_ndcg_at_100_diff1 value: 43.2445 - type: nauc_ndcg_at_1000_max value: 40.8071 - type: nauc_ndcg_at_1000_std value: 11.7945 - type: nauc_ndcg_at_1000_diff1 value: 43.8587 - type: nauc_map_at_1_max value: 33.517599999999995 - type: nauc_map_at_1_std value: -0.7517 - type: nauc_map_at_1_diff1 value: 52.92059999999999 - type: nauc_map_at_3_max value: 36.8937 - type: nauc_map_at_3_std value: 4.0335 - type: nauc_map_at_3_diff1 value: 46.4322 - type: nauc_map_at_5_max value: 37.602000000000004 - type: nauc_map_at_5_std value: 5.3923 - type: nauc_map_at_5_diff1 value: 46.6764 - type: nauc_map_at_10_max value: 38.3082 - type: nauc_map_at_10_std value: 6.483600000000001 - type: nauc_map_at_10_diff1 value: 46.0255 - type: nauc_map_at_20_max value: 38.655899999999995 - type: nauc_map_at_20_std value: 6.8814 - type: nauc_map_at_20_diff1 value: 45.8245 - type: nauc_map_at_100_max value: 38.7492 - type: nauc_map_at_100_std value: 7.327100000000001 - type: nauc_map_at_100_diff1 value: 45.8365 - type: nauc_map_at_1000_max value: 38.7584 - type: nauc_map_at_1000_std value: 7.2851 - type: nauc_map_at_1000_diff1 value: 45.8479 - type: nauc_recall_at_1_max value: 33.517599999999995 - type: nauc_recall_at_1_std value: -0.7517 - type: nauc_recall_at_1_diff1 value: 52.92059999999999 - type: nauc_recall_at_3_max value: 37.0749 - type: nauc_recall_at_3_std value: 7.466399999999999 - type: nauc_recall_at_3_diff1 value: 39.454 - type: nauc_recall_at_5_max value: 37.227199999999996 - type: nauc_recall_at_5_std value: 11.7497 - type: nauc_recall_at_5_diff1 value: 39.402 - type: nauc_recall_at_10_max value: 39.901199999999996 - type: nauc_recall_at_10_std value: 16.7381 - type: nauc_recall_at_10_diff1 value: 34.3843 - type: nauc_recall_at_20_max value: 41.0603 - type: nauc_recall_at_20_std value: 20.78 - type: nauc_recall_at_20_diff1 value: 32.2975 - type: nauc_recall_at_100_max value: 38.3499 - type: nauc_recall_at_100_std value: 38.7219 - type: nauc_recall_at_100_diff1 value: 29.078100000000003 - type: nauc_recall_at_1000_max value: 48.2277 - type: nauc_recall_at_1000_std value: 55.4646 - type: nauc_recall_at_1000_diff1 value: 26.919900000000002 - type: nauc_precision_at_1_max value: 38.232 - type: nauc_precision_at_1_std value: 3.374 - type: nauc_precision_at_1_diff1 value: 51.223200000000006 - type: nauc_precision_at_3_max value: 39.8718 - type: nauc_precision_at_3_std value: 14.112 - type: nauc_precision_at_3_diff1 value: 28.971200000000003 - type: nauc_precision_at_5_max value: 38.7064 - type: nauc_precision_at_5_std value: 18.1345 - type: nauc_precision_at_5_diff1 value: 26.5685 - type: nauc_precision_at_10_max value: 36.4352 - type: nauc_precision_at_10_std value: 22.331500000000002 - type: nauc_precision_at_10_diff1 value: 17.163600000000002 - type: nauc_precision_at_20_max value: 33.2221 - type: nauc_precision_at_20_std value: 24.252000000000002 - type: nauc_precision_at_20_diff1 value: 9.0445 - type: nauc_precision_at_100_max value: 16.5544 - type: nauc_precision_at_100_std value: 22.867199999999997 - type: nauc_precision_at_100_diff1 value: -3.8588999999999998 - type: nauc_precision_at_1000_max value: 1.7690000000000001 - type: nauc_precision_at_1000_std value: 8.2609 - type: nauc_precision_at_1000_diff1 value: -13.8927 - type: nauc_mrr_at_1_max value: 38.232 - type: nauc_mrr_at_1_std value: 3.374 - type: nauc_mrr_at_1_diff1 value: 51.223200000000006 - type: nauc_mrr_at_3_max value: 40.2699 - type: nauc_mrr_at_3_std value: 7.6 - type: nauc_mrr_at_3_diff1 value: 45.1804 - type: nauc_mrr_at_5_max value: 40.1434 - type: nauc_mrr_at_5_std value: 8.3698 - type: nauc_mrr_at_5_diff1 value: 45.1772 - type: nauc_mrr_at_10_max value: 40.6102 - type: nauc_mrr_at_10_std value: 8.9793 - type: nauc_mrr_at_10_diff1 value: 44.6458 - type: nauc_mrr_at_20_max value: 40.5002 - type: nauc_mrr_at_20_std value: 9.003 - type: nauc_mrr_at_20_diff1 value: 44.671 - type: nauc_mrr_at_100_max value: 40.4429 - type: nauc_mrr_at_100_std value: 9.131 - type: nauc_mrr_at_100_diff1 value: 44.728899999999996 - type: nauc_mrr_at_1000_max value: 40.4634 - type: nauc_mrr_at_1000_std value: 9.1018 - type: nauc_mrr_at_1000_diff1 value: 44.7656 - type: main_score value: 44.065 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 33.917750000000005 - type: ndcg_at_3 value: 39.253750000000004 - type: ndcg_at_5 value: 41.62250000000001 - type: ndcg_at_10 value: 44.29191666666667 - type: ndcg_at_20 value: 46.318083333333334 - type: ndcg_at_100 value: 49.489000000000004 - type: ndcg_at_1000 value: 51.534083333333335 - type: map_at_1 value: 28.50841666666667 - type: map_at_3 value: 35.52141666666667 - type: map_at_5 value: 37.228500000000004 - type: map_at_10 value: 38.61175 - type: map_at_20 value: 39.3125 - type: map_at_100 value: 39.882083333333334 - type: map_at_1000 value: 39.995916666666666 - type: recall_at_1 value: 28.50841666666667 - type: recall_at_3 value: 42.46875000000001 - type: recall_at_5 value: 48.59916666666667 - type: recall_at_10 value: 56.56024999999999 - type: recall_at_20 value: 63.96383333333333 - type: recall_at_100 value: 79.2645 - type: recall_at_1000 value: 93.25150000000002 - type: precision_at_1 value: 33.917750000000005 - type: precision_at_3 value: 18.19558333333333 - type: precision_at_5 value: 12.950166666666668 - type: precision_at_10 value: 7.866333333333333 - type: precision_at_20 value: 4.614749999999999 - type: precision_at_100 value: 1.2374166666666666 - type: precision_at_1000 value: 0.16091666666666668 - type: mrr_at_1 value: 33.917699999999996 - type: mrr_at_3 value: 40.448166666666665 - type: mrr_at_5 value: 41.903483333333334 - type: mrr_at_10 value: 42.944941666666665 - type: mrr_at_20 value: 43.43391666666666 - type: mrr_at_100 value: 43.782399999999996 - type: mrr_at_1000 value: 43.832325 - type: nauc_ndcg_at_1_max value: 38.768750000000004 - type: nauc_ndcg_at_1_std value: 0.5314750000000001 - type: nauc_ndcg_at_1_diff1 value: 50.18021666666667 - type: nauc_ndcg_at_3_max value: 37.73569166666667 - type: nauc_ndcg_at_3_std value: 1.9756250000000004 - type: nauc_ndcg_at_3_diff1 value: 45.217191666666665 - type: nauc_ndcg_at_5_max value: 38.19843333333333 - type: nauc_ndcg_at_5_std value: 2.760133333333333 - type: nauc_ndcg_at_5_diff1 value: 44.559908333333325 - type: nauc_ndcg_at_10_max value: 38.34826666666667 - type: nauc_ndcg_at_10_std value: 3.8177249999999994 - type: nauc_ndcg_at_10_diff1 value: 43.772149999999996 - type: nauc_ndcg_at_20_max value: 38.53288333333333 - type: nauc_ndcg_at_20_std value: 4.801466666666668 - type: nauc_ndcg_at_20_diff1 value: 43.312774999999995 - type: nauc_ndcg_at_100_max value: 38.912774999999996 - type: nauc_ndcg_at_100_std value: 6.39795 - type: nauc_ndcg_at_100_diff1 value: 43.38179166666667 - type: nauc_ndcg_at_1000_max value: 39.0197 - type: nauc_ndcg_at_1000_std value: 5.861708333333333 - type: nauc_ndcg_at_1000_diff1 value: 43.78785833333334 - type: nauc_map_at_1_max value: 34.808508333333336 - type: nauc_map_at_1_std value: -2.4239916666666663 - type: nauc_map_at_1_diff1 value: 51.88476666666666 - type: nauc_map_at_3_max value: 36.516549999999995 - type: nauc_map_at_3_std value: 0.008974999999999955 - type: nauc_map_at_3_diff1 value: 47.11013333333332 - type: nauc_map_at_5_max value: 37.17583333333333 - type: nauc_map_at_5_std value: 0.7668083333333334 - type: nauc_map_at_5_diff1 value: 46.496975 - type: nauc_map_at_10_max value: 37.54620833333333 - type: nauc_map_at_10_std value: 1.5577166666666666 - type: nauc_map_at_10_diff1 value: 46.02030833333334 - type: nauc_map_at_20_max value: 37.738058333333335 - type: nauc_map_at_20_std value: 2.0228750000000004 - type: nauc_map_at_20_diff1 value: 45.837608333333336 - type: nauc_map_at_100_max value: 37.864575 - type: nauc_map_at_100_std value: 2.3781916666666665 - type: nauc_map_at_100_diff1 value: 45.818783333333336 - type: nauc_map_at_1000_max value: 37.8704 - type: nauc_map_at_1000_std value: 2.403341666666667 - type: nauc_map_at_1000_diff1 value: 45.83103333333333 - type: nauc_recall_at_1_max value: 34.808508333333336 - type: nauc_recall_at_1_std value: -2.4239916666666663 - type: nauc_recall_at_1_diff1 value: 51.88476666666666 - type: nauc_recall_at_3_max value: 35.12659166666666 - type: nauc_recall_at_3_std value: 1.5866916666666664 - type: nauc_recall_at_3_diff1 value: 41.56113333333334 - type: nauc_recall_at_5_max value: 36.147058333333334 - type: nauc_recall_at_5_std value: 3.803583333333333 - type: nauc_recall_at_5_diff1 value: 39.051366666666674 - type: nauc_recall_at_10_max value: 36.10466666666667 - type: nauc_recall_at_10_std value: 7.102541666666666 - type: nauc_recall_at_10_diff1 value: 35.79460833333333 - type: nauc_recall_at_20_max value: 36.25878333333333 - type: nauc_recall_at_20_std value: 11.494475000000001 - type: nauc_recall_at_20_diff1 value: 33.06425833333333 - type: nauc_recall_at_100_max value: 38.00966666666667 - type: nauc_recall_at_100_std value: 27.040050000000004 - type: nauc_recall_at_100_diff1 value: 29.968625 - type: nauc_recall_at_1000_max value: 45.32993333333334 - type: nauc_recall_at_1000_std value: 45.327316666666675 - type: nauc_recall_at_1000_diff1 value: 28.088641666666668 - type: nauc_precision_at_1_max value: 38.768750000000004 - type: nauc_precision_at_1_std value: 0.5314750000000001 - type: nauc_precision_at_1_diff1 value: 50.18021666666667 - type: nauc_precision_at_3_max value: 36.52460833333333 - type: nauc_precision_at_3_std value: 7.665850000000001 - type: nauc_precision_at_3_diff1 value: 31.133191666666672 - type: nauc_precision_at_5_max value: 35.20106666666667 - type: nauc_precision_at_5_std value: 10.746766666666666 - type: nauc_precision_at_5_diff1 value: 24.582291666666663 - type: nauc_precision_at_10_max value: 31.465108333333337 - type: nauc_precision_at_10_std value: 15.019074999999999 - type: nauc_precision_at_10_diff1 value: 16.25574166666667 - type: nauc_precision_at_20_max value: 27.589949999999995 - type: nauc_precision_at_20_std value: 18.108775 - type: nauc_precision_at_20_diff1 value: 9.511666666666668 - type: nauc_precision_at_100_max value: 17.18691666666667 - type: nauc_precision_at_100_std value: 21.440466666666666 - type: nauc_precision_at_100_diff1 value: -1.2442166666666667 - type: nauc_precision_at_1000_max value: 5.215425 - type: nauc_precision_at_1000_std value: 13.896516666666663 - type: nauc_precision_at_1000_diff1 value: -10.446258333333335 - type: nauc_mrr_at_1_max value: 38.768750000000004 - type: nauc_mrr_at_1_std value: 0.5314750000000001 - type: nauc_mrr_at_1_diff1 value: 50.18021666666667 - type: nauc_mrr_at_3_max value: 38.979308333333336 - type: nauc_mrr_at_3_std value: 2.755991666666666 - type: nauc_mrr_at_3_diff1 value: 45.991875 - type: nauc_mrr_at_5_max value: 39.26664166666667 - type: nauc_mrr_at_5_std value: 3.2105333333333332 - type: nauc_mrr_at_5_diff1 value: 45.54448333333333 - type: nauc_mrr_at_10_max value: 39.239558333333335 - type: nauc_mrr_at_10_std value: 3.57125 - type: nauc_mrr_at_10_diff1 value: 45.24083333333333 - type: nauc_mrr_at_20_max value: 39.212075 - type: nauc_mrr_at_20_std value: 3.7281833333333334 - type: nauc_mrr_at_20_diff1 value: 45.153083333333335 - type: nauc_mrr_at_100_max value: 39.221091666666666 - type: nauc_mrr_at_100_std value: 3.823533333333333 - type: nauc_mrr_at_100_diff1 value: 45.19413333333333 - type: nauc_mrr_at_1000_max value: 39.22478333333333 - type: nauc_mrr_at_1000_std value: 3.8052833333333327 - type: nauc_mrr_at_1000_diff1 value: 45.21384166666667 - type: main_score value: 44.29191666666667 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 44.29191666666667 - type: ndcg_at_10 value: 44.29191666666667 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: ndcg_at_1 value: 29.141000000000002 - type: ndcg_at_3 value: 33.861000000000004 - type: ndcg_at_5 value: 35.887 - type: ndcg_at_10 value: 38.596000000000004 - type: ndcg_at_20 value: 40.172000000000004 - type: ndcg_at_100 value: 43.375 - type: ndcg_at_1000 value: 45.562000000000005 - type: map_at_1 value: 25.728 - type: map_at_3 value: 31.268 - type: map_at_5 value: 32.596000000000004 - type: map_at_10 value: 33.903 - type: map_at_20 value: 34.392 - type: map_at_100 value: 34.853 - type: map_at_1000 value: 34.943999999999996 - type: recall_at_1 value: 25.728 - type: recall_at_3 value: 36.638 - type: recall_at_5 value: 41.689 - type: recall_at_10 value: 50.121 - type: recall_at_20 value: 56.043 - type: recall_at_100 value: 72.382 - type: recall_at_1000 value: 88.306 - type: precision_at_1 value: 29.141000000000002 - type: precision_at_3 value: 14.826 - type: precision_at_5 value: 10.428999999999998 - type: precision_at_10 value: 6.334 - type: precision_at_20 value: 3.589 - type: precision_at_100 value: 0.9520000000000001 - type: precision_at_1000 value: 0.121 - type: mrr_at_1 value: 29.141099999999998 - type: mrr_at_3 value: 34.407 - type: mrr_at_5 value: 35.68 - type: mrr_at_10 value: 36.739 - type: mrr_at_20 value: 37.1572 - type: mrr_at_100 value: 37.5448 - type: mrr_at_1000 value: 37.607600000000005 - type: nauc_ndcg_at_1_max value: 43.0703 - type: nauc_ndcg_at_1_std value: 7.8586 - type: nauc_ndcg_at_1_diff1 value: 57.5204 - type: nauc_ndcg_at_3_max value: 41.7529 - type: nauc_ndcg_at_3_std value: 8.549800000000001 - type: nauc_ndcg_at_3_diff1 value: 52.7211 - type: nauc_ndcg_at_5_max value: 43.404399999999995 - type: nauc_ndcg_at_5_std value: 9.117799999999999 - type: nauc_ndcg_at_5_diff1 value: 52.607400000000005 - type: nauc_ndcg_at_10_max value: 43.8638 - type: nauc_ndcg_at_10_std value: 10.7135 - type: nauc_ndcg_at_10_diff1 value: 50.7607 - type: nauc_ndcg_at_20_max value: 43.3389 - type: nauc_ndcg_at_20_std value: 11.7901 - type: nauc_ndcg_at_20_diff1 value: 50.056900000000006 - type: nauc_ndcg_at_100_max value: 43.580600000000004 - type: nauc_ndcg_at_100_std value: 13.616900000000001 - type: nauc_ndcg_at_100_diff1 value: 49.359700000000004 - type: nauc_ndcg_at_1000_max value: 43.6164 - type: nauc_ndcg_at_1000_std value: 13.5428 - type: nauc_ndcg_at_1000_diff1 value: 50.0821 - type: nauc_map_at_1_max value: 40.5495 - type: nauc_map_at_1_std value: 3.5229999999999997 - type: nauc_map_at_1_diff1 value: 59.7723 - type: nauc_map_at_3_max value: 41.2977 - type: nauc_map_at_3_std value: 6.9411000000000005 - type: nauc_map_at_3_diff1 value: 54.879999999999995 - type: nauc_map_at_5_max value: 42.5686 - type: nauc_map_at_5_std value: 7.8032 - type: nauc_map_at_5_diff1 value: 54.4624 - type: nauc_map_at_10_max value: 43.1361 - type: nauc_map_at_10_std value: 8.8783 - type: nauc_map_at_10_diff1 value: 53.747 - type: nauc_map_at_20_max value: 42.9941 - type: nauc_map_at_20_std value: 9.1777 - type: nauc_map_at_20_diff1 value: 53.5394 - type: nauc_map_at_100_max value: 42.960300000000004 - type: nauc_map_at_100_std value: 9.3584 - type: nauc_map_at_100_diff1 value: 53.3856 - type: nauc_map_at_1000_max value: 42.9595 - type: nauc_map_at_1000_std value: 9.3575 - type: nauc_map_at_1000_diff1 value: 53.4136 - type: nauc_recall_at_1_max value: 40.5495 - type: nauc_recall_at_1_std value: 3.5229999999999997 - type: nauc_recall_at_1_diff1 value: 59.7723 - type: nauc_recall_at_3_max value: 39.5622 - type: nauc_recall_at_3_std value: 7.614 - type: nauc_recall_at_3_diff1 value: 49.469 - type: nauc_recall_at_5_max value: 43.086400000000005 - type: nauc_recall_at_5_std value: 9.1332 - type: nauc_recall_at_5_diff1 value: 47.8829 - type: nauc_recall_at_10_max value: 43.054700000000004 - type: nauc_recall_at_10_std value: 13.116900000000001 - type: nauc_recall_at_10_diff1 value: 40.804 - type: nauc_recall_at_20_max value: 40.8398 - type: nauc_recall_at_20_std value: 17.099600000000002 - type: nauc_recall_at_20_diff1 value: 37.8978 - type: nauc_recall_at_100_max value: 41.8268 - type: nauc_recall_at_100_std value: 31.5507 - type: nauc_recall_at_100_diff1 value: 28.8246 - type: nauc_recall_at_1000_max value: 44.7113 - type: nauc_recall_at_1000_std value: 49.8697 - type: nauc_recall_at_1000_diff1 value: 26.7287 - type: nauc_precision_at_1_max value: 43.0703 - type: nauc_precision_at_1_std value: 7.8586 - type: nauc_precision_at_1_diff1 value: 57.5204 - type: nauc_precision_at_3_max value: 41.098 - type: nauc_precision_at_3_std value: 16.1082 - type: nauc_precision_at_3_diff1 value: 40.5806 - type: nauc_precision_at_5_max value: 43.8705 - type: nauc_precision_at_5_std value: 19.470299999999998 - type: nauc_precision_at_5_diff1 value: 36.9411 - type: nauc_precision_at_10_max value: 41.5225 - type: nauc_precision_at_10_std value: 22.9023 - type: nauc_precision_at_10_diff1 value: 28.0016 - type: nauc_precision_at_20_max value: 36.68 - type: nauc_precision_at_20_std value: 25.5411 - type: nauc_precision_at_20_diff1 value: 22.3414 - type: nauc_precision_at_100_max value: 25.8805 - type: nauc_precision_at_100_std value: 29.0719 - type: nauc_precision_at_100_diff1 value: 7.4353 - type: nauc_precision_at_1000_max value: 12.2406 - type: nauc_precision_at_1000_std value: 22.909 - type: nauc_precision_at_1000_diff1 value: -4.0427 - type: nauc_mrr_at_1_max value: 43.0703 - type: nauc_mrr_at_1_std value: 7.8586 - type: nauc_mrr_at_1_diff1 value: 57.5204 - type: nauc_mrr_at_3_max value: 42.4962 - type: nauc_mrr_at_3_std value: 9.9083 - type: nauc_mrr_at_3_diff1 value: 52.81 - type: nauc_mrr_at_5_max value: 43.7188 - type: nauc_mrr_at_5_std value: 10.2951 - type: nauc_mrr_at_5_diff1 value: 52.9848 - type: nauc_mrr_at_10_max value: 43.6725 - type: nauc_mrr_at_10_std value: 10.8946 - type: nauc_mrr_at_10_diff1 value: 52.037 - type: nauc_mrr_at_20_max value: 43.4857 - type: nauc_mrr_at_20_std value: 11.097700000000001 - type: nauc_mrr_at_20_diff1 value: 51.83560000000001 - type: nauc_mrr_at_100_max value: 43.4906 - type: nauc_mrr_at_100_std value: 11.2695 - type: nauc_mrr_at_100_diff1 value: 51.783500000000004 - type: nauc_mrr_at_1000_max value: 43.490899999999996 - type: nauc_mrr_at_1000_std value: 11.2507 - type: nauc_mrr_at_1000_diff1 value: 51.8107 - type: main_score value: 38.596000000000004 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: ndcg_at_1 value: 24.054000000000002 - type: ndcg_at_3 value: 29.115999999999996 - type: ndcg_at_5 value: 31.286 - type: ndcg_at_10 value: 33.722 - type: ndcg_at_20 value: 35.844 - type: ndcg_at_100 value: 39.361000000000004 - type: ndcg_at_1000 value: 42.064 - type: map_at_1 value: 19.911 - type: map_at_3 value: 25.874999999999996 - type: map_at_5 value: 27.403 - type: map_at_10 value: 28.559 - type: map_at_20 value: 29.213 - type: map_at_100 value: 29.784 - type: map_at_1000 value: 29.909999999999997 - type: recall_at_1 value: 19.911 - type: recall_at_3 value: 32.195 - type: recall_at_5 value: 37.818000000000005 - type: recall_at_10 value: 45.183 - type: recall_at_20 value: 53.081999999999994 - type: recall_at_100 value: 70.25 - type: recall_at_1000 value: 89.22200000000001 - type: precision_at_1 value: 24.054000000000002 - type: precision_at_3 value: 13.914000000000001 - type: precision_at_5 value: 10.069 - type: precision_at_10 value: 6.194 - type: precision_at_20 value: 3.7060000000000004 - type: precision_at_100 value: 1.058 - type: precision_at_1000 value: 0.148 - type: mrr_at_1 value: 24.0537 - type: mrr_at_3 value: 30.161700000000003 - type: mrr_at_5 value: 31.505499999999998 - type: mrr_at_10 value: 32.4828 - type: mrr_at_20 value: 33.054899999999996 - type: mrr_at_100 value: 33.4643 - type: mrr_at_1000 value: 33.534000000000006 - type: nauc_ndcg_at_1_max value: 30.663200000000003 - type: nauc_ndcg_at_1_std value: 1.6019999999999999 - type: nauc_ndcg_at_1_diff1 value: 45.730199999999996 - type: nauc_ndcg_at_3_max value: 28.5124 - type: nauc_ndcg_at_3_std value: 3.4572 - type: nauc_ndcg_at_3_diff1 value: 37.109500000000004 - type: nauc_ndcg_at_5_max value: 28.8788 - type: nauc_ndcg_at_5_std value: 4.5551 - type: nauc_ndcg_at_5_diff1 value: 36.1603 - type: nauc_ndcg_at_10_max value: 28.4392 - type: nauc_ndcg_at_10_std value: 5.1365 - type: nauc_ndcg_at_10_diff1 value: 34.6232 - type: nauc_ndcg_at_20_max value: 28.4854 - type: nauc_ndcg_at_20_std value: 6.6366 - type: nauc_ndcg_at_20_diff1 value: 34.5488 - type: nauc_ndcg_at_100_max value: 29.17 - type: nauc_ndcg_at_100_std value: 7.904 - type: nauc_ndcg_at_100_diff1 value: 34.7771 - type: nauc_ndcg_at_1000_max value: 29.437 - type: nauc_ndcg_at_1000_std value: 7.5479 - type: nauc_ndcg_at_1000_diff1 value: 35.605399999999996 - type: nauc_map_at_1_max value: 28.6015 - type: nauc_map_at_1_std value: 1.6265 - type: nauc_map_at_1_diff1 value: 46.170899999999996 - type: nauc_map_at_3_max value: 27.931099999999997 - type: nauc_map_at_3_std value: 3.3492 - type: nauc_map_at_3_diff1 value: 39.2592 - type: nauc_map_at_5_max value: 28.268700000000003 - type: nauc_map_at_5_std value: 3.9050000000000002 - type: nauc_map_at_5_diff1 value: 38.488299999999995 - type: nauc_map_at_10_max value: 28.197400000000002 - type: nauc_map_at_10_std value: 4.1464 - type: nauc_map_at_10_diff1 value: 37.7547 - type: nauc_map_at_20_max value: 28.27 - type: nauc_map_at_20_std value: 4.5844000000000005 - type: nauc_map_at_20_diff1 value: 37.7547 - type: nauc_map_at_100_max value: 28.458 - type: nauc_map_at_100_std value: 4.786300000000001 - type: nauc_map_at_100_diff1 value: 37.782199999999996 - type: nauc_map_at_1000_max value: 28.4996 - type: nauc_map_at_1000_std value: 4.7852 - type: nauc_map_at_1000_diff1 value: 37.816300000000005 - type: nauc_recall_at_1_max value: 28.6015 - type: nauc_recall_at_1_std value: 1.6265 - type: nauc_recall_at_1_diff1 value: 46.170899999999996 - type: nauc_recall_at_3_max value: 25.9988 - type: nauc_recall_at_3_std value: 4.1643 - type: nauc_recall_at_3_diff1 value: 31.9357 - type: nauc_recall_at_5_max value: 26.6721 - type: nauc_recall_at_5_std value: 6.1122000000000005 - type: nauc_recall_at_5_diff1 value: 29.1941 - type: nauc_recall_at_10_max value: 24.9394 - type: nauc_recall_at_10_std value: 7.313 - type: nauc_recall_at_10_diff1 value: 24.283099999999997 - type: nauc_recall_at_20_max value: 24.3242 - type: nauc_recall_at_20_std value: 12.6805 - type: nauc_recall_at_20_diff1 value: 22.8247 - type: nauc_recall_at_100_max value: 26.917799999999996 - type: nauc_recall_at_100_std value: 21.5069 - type: nauc_recall_at_100_diff1 value: 21.205 - type: nauc_recall_at_1000_max value: 29.8594 - type: nauc_recall_at_1000_std value: 31.4363 - type: nauc_recall_at_1000_diff1 value: 23.8707 - type: nauc_precision_at_1_max value: 30.663200000000003 - type: nauc_precision_at_1_std value: 1.6019999999999999 - type: nauc_precision_at_1_diff1 value: 45.730199999999996 - type: nauc_precision_at_3_max value: 28.3435 - type: nauc_precision_at_3_std value: 4.1368 - type: nauc_precision_at_3_diff1 value: 28.5551 - type: nauc_precision_at_5_max value: 28.49 - type: nauc_precision_at_5_std value: 5.8044 - type: nauc_precision_at_5_diff1 value: 24.5061 - type: nauc_precision_at_10_max value: 26.255699999999997 - type: nauc_precision_at_10_std value: 6.998799999999999 - type: nauc_precision_at_10_diff1 value: 18.3038 - type: nauc_precision_at_20_max value: 25.217699999999997 - type: nauc_precision_at_20_std value: 9.9304 - type: nauc_precision_at_20_diff1 value: 15.4876 - type: nauc_precision_at_100_max value: 21.865499999999997 - type: nauc_precision_at_100_std value: 10.746500000000001 - type: nauc_precision_at_100_diff1 value: 7.4687 - type: nauc_precision_at_1000_max value: 18.4782 - type: nauc_precision_at_1000_std value: 3.0096000000000003 - type: nauc_precision_at_1000_diff1 value: 3.3539 - type: nauc_mrr_at_1_max value: 30.663200000000003 - type: nauc_mrr_at_1_std value: 1.6019999999999999 - type: nauc_mrr_at_1_diff1 value: 45.730199999999996 - type: nauc_mrr_at_3_max value: 29.9128 - type: nauc_mrr_at_3_std value: 3.4235 - type: nauc_mrr_at_3_diff1 value: 39.1412 - type: nauc_mrr_at_5_max value: 30.3311 - type: nauc_mrr_at_5_std value: 4.0177 - type: nauc_mrr_at_5_diff1 value: 38.7065 - type: nauc_mrr_at_10_max value: 30.144399999999997 - type: nauc_mrr_at_10_std value: 4.2534 - type: nauc_mrr_at_10_diff1 value: 38.0266 - type: nauc_mrr_at_20_max value: 30.1249 - type: nauc_mrr_at_20_std value: 4.6181 - type: nauc_mrr_at_20_diff1 value: 38.002 - type: nauc_mrr_at_100_max value: 30.1948 - type: nauc_mrr_at_100_std value: 4.7099 - type: nauc_mrr_at_100_diff1 value: 38.0455 - type: nauc_mrr_at_1000_max value: 30.1966 - type: nauc_mrr_at_1000_std value: 4.6948 - type: nauc_mrr_at_1000_diff1 value: 38.0747 - type: main_score value: 33.722 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: ndcg_at_1 value: 35.168 - type: ndcg_at_3 value: 39.972 - type: ndcg_at_5 value: 42.586 - type: ndcg_at_10 value: 46.071 - type: ndcg_at_20 value: 48.028999999999996 - type: ndcg_at_100 value: 51.351 - type: ndcg_at_1000 value: 53.169999999999995 - type: map_at_1 value: 29.819000000000003 - type: map_at_3 value: 36.571999999999996 - type: map_at_5 value: 38.385999999999996 - type: map_at_10 value: 40.073 - type: map_at_20 value: 40.72 - type: map_at_100 value: 41.289 - type: map_at_1000 value: 41.375 - type: recall_at_1 value: 29.819000000000003 - type: recall_at_3 value: 43.245 - type: recall_at_5 value: 49.931 - type: recall_at_10 value: 60.075 - type: recall_at_20 value: 67.118 - type: recall_at_100 value: 82.771 - type: recall_at_1000 value: 95.219 - type: precision_at_1 value: 35.168 - type: precision_at_3 value: 18.221 - type: precision_at_5 value: 12.892000000000001 - type: precision_at_10 value: 7.985 - type: precision_at_20 value: 4.529 - type: precision_at_100 value: 1.185 - type: precision_at_1000 value: 0.14400000000000002 - type: mrr_at_1 value: 35.1679 - type: mrr_at_3 value: 41.4024 - type: mrr_at_5 value: 43.039500000000004 - type: mrr_at_10 value: 44.3808 - type: mrr_at_20 value: 44.823299999999996 - type: mrr_at_100 value: 45.1914 - type: mrr_at_1000 value: 45.2339 - type: nauc_ndcg_at_1_max value: 43.9321 - type: nauc_ndcg_at_1_std value: -6.0145 - type: nauc_ndcg_at_1_diff1 value: 53.6293 - type: nauc_ndcg_at_3_max value: 42.0025 - type: nauc_ndcg_at_3_std value: -5.6881 - type: nauc_ndcg_at_3_diff1 value: 47.9461 - type: nauc_ndcg_at_5_max value: 42.916900000000005 - type: nauc_ndcg_at_5_std value: -4.2002999999999995 - type: nauc_ndcg_at_5_diff1 value: 48.0738 - type: nauc_ndcg_at_10_max value: 42.6014 - type: nauc_ndcg_at_10_std value: -2.8179 - type: nauc_ndcg_at_10_diff1 value: 46.792899999999996 - type: nauc_ndcg_at_20_max value: 41.9182 - type: nauc_ndcg_at_20_std value: -2.6714 - type: nauc_ndcg_at_20_diff1 value: 46.111000000000004 - type: nauc_ndcg_at_100_max value: 42.6218 - type: nauc_ndcg_at_100_std value: -1.6882000000000001 - type: nauc_ndcg_at_100_diff1 value: 46.3204 - type: nauc_ndcg_at_1000_max value: 42.6413 - type: nauc_ndcg_at_1000_std value: -2.2983 - type: nauc_ndcg_at_1000_diff1 value: 46.840399999999995 - type: nauc_map_at_1_max value: 41.256 - type: nauc_map_at_1_std value: -7.5877 - type: nauc_map_at_1_diff1 value: 56.383300000000006 - type: nauc_map_at_3_max value: 41.904 - type: nauc_map_at_3_std value: -6.548 - type: nauc_map_at_3_diff1 value: 50.7949 - type: nauc_map_at_5_max value: 42.568400000000004 - type: nauc_map_at_5_std value: -5.3873999999999995 - type: nauc_map_at_5_diff1 value: 50.3791 - type: nauc_map_at_10_max value: 42.6619 - type: nauc_map_at_10_std value: -4.8052 - type: nauc_map_at_10_diff1 value: 49.5933 - type: nauc_map_at_20_max value: 42.4985 - type: nauc_map_at_20_std value: -4.7620000000000005 - type: nauc_map_at_20_diff1 value: 49.3214 - type: nauc_map_at_100_max value: 42.6165 - type: nauc_map_at_100_std value: -4.595599999999999 - type: nauc_map_at_100_diff1 value: 49.277100000000004 - type: nauc_map_at_1000_max value: 42.6146 - type: nauc_map_at_1000_std value: -4.5920000000000005 - type: nauc_map_at_1000_diff1 value: 49.2815 - type: nauc_recall_at_1_max value: 41.256 - type: nauc_recall_at_1_std value: -7.5877 - type: nauc_recall_at_1_diff1 value: 56.383300000000006 - type: nauc_recall_at_3_max value: 39.626099999999994 - type: nauc_recall_at_3_std value: -5.973 - type: nauc_recall_at_3_diff1 value: 44.651 - type: nauc_recall_at_5_max value: 41.4392 - type: nauc_recall_at_5_std value: -1.8328 - type: nauc_recall_at_5_diff1 value: 42.928399999999996 - type: nauc_recall_at_10_max value: 38.807 - type: nauc_recall_at_10_std value: 2.863 - type: nauc_recall_at_10_diff1 value: 37.6663 - type: nauc_recall_at_20_max value: 34.9705 - type: nauc_recall_at_20_std value: 4.1407 - type: nauc_recall_at_20_diff1 value: 33.6156 - type: nauc_recall_at_100_max value: 38.4049 - type: nauc_recall_at_100_std value: 16.7735 - type: nauc_recall_at_100_diff1 value: 30.724800000000002 - type: nauc_recall_at_1000_max value: 42.9152 - type: nauc_recall_at_1000_std value: 32.1176 - type: nauc_recall_at_1000_diff1 value: 33.2582 - type: nauc_precision_at_1_max value: 43.9321 - type: nauc_precision_at_1_std value: -6.0145 - type: nauc_precision_at_1_diff1 value: 53.6293 - type: nauc_precision_at_3_max value: 38.1748 - type: nauc_precision_at_3_std value: -2.3163 - type: nauc_precision_at_3_diff1 value: 31.2502 - type: nauc_precision_at_5_max value: 36.503 - type: nauc_precision_at_5_std value: 2.0892 - type: nauc_precision_at_5_diff1 value: 25.249100000000002 - type: nauc_precision_at_10_max value: 30.2104 - type: nauc_precision_at_10_std value: 6.6937999999999995 - type: nauc_precision_at_10_diff1 value: 14.0684 - type: nauc_precision_at_20_max value: 23.6494 - type: nauc_precision_at_20_std value: 7.216500000000001 - type: nauc_precision_at_20_diff1 value: 6.7953 - type: nauc_precision_at_100_max value: 11.2361 - type: nauc_precision_at_100_std value: 11.824 - type: nauc_precision_at_100_diff1 value: -7.6405 - type: nauc_precision_at_1000_max value: -3.8651 - type: nauc_precision_at_1000_std value: 5.367999999999999 - type: nauc_precision_at_1000_diff1 value: -17.473 - type: nauc_mrr_at_1_max value: 43.9321 - type: nauc_mrr_at_1_std value: -6.0145 - type: nauc_mrr_at_1_diff1 value: 53.6293 - type: nauc_mrr_at_3_max value: 42.8188 - type: nauc_mrr_at_3_std value: -5.1393 - type: nauc_mrr_at_3_diff1 value: 48.3128 - type: nauc_mrr_at_5_max value: 43.5383 - type: nauc_mrr_at_5_std value: -4.2538 - type: nauc_mrr_at_5_diff1 value: 48.0319 - type: nauc_mrr_at_10_max value: 43.121700000000004 - type: nauc_mrr_at_10_std value: -3.7823 - type: nauc_mrr_at_10_diff1 value: 47.6064 - type: nauc_mrr_at_20_max value: 42.8886 - type: nauc_mrr_at_20_std value: -3.8175 - type: nauc_mrr_at_20_diff1 value: 47.5437 - type: nauc_mrr_at_100_max value: 42.9514 - type: nauc_mrr_at_100_std value: -3.8205000000000005 - type: nauc_mrr_at_100_diff1 value: 47.6513 - type: nauc_mrr_at_1000_max value: 42.9567 - type: nauc_mrr_at_1000_std value: -3.8327 - type: nauc_mrr_at_1000_diff1 value: 47.6603 - type: main_score value: 46.071 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 33.794000000000004 - type: ndcg_at_3 value: 38.442 - type: ndcg_at_5 value: 40.737 - type: ndcg_at_10 value: 43.832 - type: ndcg_at_20 value: 45.589 - type: ndcg_at_100 value: 49.514 - type: ndcg_at_1000 value: 51.742 - type: map_at_1 value: 28.409000000000002 - type: map_at_3 value: 34.337 - type: map_at_5 value: 35.985 - type: map_at_10 value: 37.621 - type: map_at_20 value: 38.391 - type: map_at_100 value: 39.233000000000004 - type: map_at_1000 value: 39.471000000000004 - type: recall_at_1 value: 28.409000000000002 - type: recall_at_3 value: 40.133 - type: recall_at_5 value: 45.913 - type: recall_at_10 value: 55.388000000000005 - type: recall_at_20 value: 62.134 - type: recall_at_100 value: 81.517 - type: recall_at_1000 value: 95.038 - type: precision_at_1 value: 33.794000000000004 - type: precision_at_3 value: 17.787 - type: precision_at_5 value: 13.241 - type: precision_at_10 value: 8.597000000000001 - type: precision_at_20 value: 5.267 - type: precision_at_100 value: 1.652 - type: precision_at_1000 value: 0.251 - type: mrr_at_1 value: 33.7945 - type: mrr_at_3 value: 39.5257 - type: mrr_at_5 value: 41.087 - type: mrr_at_10 value: 42.3491 - type: mrr_at_20 value: 42.7479 - type: mrr_at_100 value: 43.1961 - type: mrr_at_1000 value: 43.2373 - type: nauc_ndcg_at_1_max value: 43.9886 - type: nauc_ndcg_at_1_std value: 9.8923 - type: nauc_ndcg_at_1_diff1 value: 50.394000000000005 - type: nauc_ndcg_at_3_max value: 43.074200000000005 - type: nauc_ndcg_at_3_std value: 13.5108 - type: nauc_ndcg_at_3_diff1 value: 47.0674 - type: nauc_ndcg_at_5_max value: 42.810700000000004 - type: nauc_ndcg_at_5_std value: 14.119499999999999 - type: nauc_ndcg_at_5_diff1 value: 46.822 - type: nauc_ndcg_at_10_max value: 43.533699999999996 - type: nauc_ndcg_at_10_std value: 14.009599999999999 - type: nauc_ndcg_at_10_diff1 value: 47.3163 - type: nauc_ndcg_at_20_max value: 44.4973 - type: nauc_ndcg_at_20_std value: 14.5044 - type: nauc_ndcg_at_20_diff1 value: 47.2833 - type: nauc_ndcg_at_100_max value: 44.7593 - type: nauc_ndcg_at_100_std value: 16.833000000000002 - type: nauc_ndcg_at_100_diff1 value: 47.251599999999996 - type: nauc_ndcg_at_1000_max value: 44.790600000000005 - type: nauc_ndcg_at_1000_std value: 15.987199999999998 - type: nauc_ndcg_at_1000_diff1 value: 47.4071 - type: nauc_map_at_1_max value: 43.4155 - type: nauc_map_at_1_std value: 6.3514 - type: nauc_map_at_1_diff1 value: 54.8257 - type: nauc_map_at_3_max value: 43.1906 - type: nauc_map_at_3_std value: 9.823 - type: nauc_map_at_3_diff1 value: 49.5974 - type: nauc_map_at_5_max value: 43.1564 - type: nauc_map_at_5_std value: 10.3498 - type: nauc_map_at_5_diff1 value: 48.7876 - type: nauc_map_at_10_max value: 43.6805 - type: nauc_map_at_10_std value: 10.844199999999999 - type: nauc_map_at_10_diff1 value: 48.5759 - type: nauc_map_at_20_max value: 44.121700000000004 - type: nauc_map_at_20_std value: 11.6161 - type: nauc_map_at_20_diff1 value: 48.4631 - type: nauc_map_at_100_max value: 44.1124 - type: nauc_map_at_100_std value: 12.439 - type: nauc_map_at_100_diff1 value: 48.4742 - type: nauc_map_at_1000_max value: 44.0146 - type: nauc_map_at_1000_std value: 12.708 - type: nauc_map_at_1000_diff1 value: 48.5587 - type: nauc_recall_at_1_max value: 43.4155 - type: nauc_recall_at_1_std value: 6.3514 - type: nauc_recall_at_1_diff1 value: 54.8257 - type: nauc_recall_at_3_max value: 40.941300000000005 - type: nauc_recall_at_3_std value: 12.864700000000001 - type: nauc_recall_at_3_diff1 value: 44.642900000000004 - type: nauc_recall_at_5_max value: 39.6961 - type: nauc_recall_at_5_std value: 13.6938 - type: nauc_recall_at_5_diff1 value: 42.142 - type: nauc_recall_at_10_max value: 40.2068 - type: nauc_recall_at_10_std value: 14.1258 - type: nauc_recall_at_10_diff1 value: 42.244 - type: nauc_recall_at_20_max value: 42.7956 - type: nauc_recall_at_20_std value: 17.518 - type: nauc_recall_at_20_diff1 value: 42.3104 - type: nauc_recall_at_100_max value: 43.4746 - type: nauc_recall_at_100_std value: 39.7613 - type: nauc_recall_at_100_diff1 value: 40.5005 - type: nauc_recall_at_1000_max value: 58.044 - type: nauc_recall_at_1000_std value: 56.4975 - type: nauc_recall_at_1000_diff1 value: 40.238600000000005 - type: nauc_precision_at_1_max value: 43.9886 - type: nauc_precision_at_1_std value: 9.8923 - type: nauc_precision_at_1_diff1 value: 50.394000000000005 - type: nauc_precision_at_3_max value: 37.436 - type: nauc_precision_at_3_std value: 19.9652 - type: nauc_precision_at_3_diff1 value: 31.1933 - type: nauc_precision_at_5_max value: 32.124900000000004 - type: nauc_precision_at_5_std value: 22.8439 - type: nauc_precision_at_5_diff1 value: 23.325699999999998 - type: nauc_precision_at_10_max value: 26.956200000000003 - type: nauc_precision_at_10_std value: 24.7414 - type: nauc_precision_at_10_diff1 value: 15.1951 - type: nauc_precision_at_20_max value: 20.924799999999998 - type: nauc_precision_at_20_std value: 27.1802 - type: nauc_precision_at_20_diff1 value: 8.575800000000001 - type: nauc_precision_at_100_max value: 3.8554 - type: nauc_precision_at_100_std value: 32.46 - type: nauc_precision_at_100_diff1 value: 1.1094 - type: nauc_precision_at_1000_max value: -4.0572 - type: nauc_precision_at_1000_std value: 29.813499999999998 - type: nauc_precision_at_1000_diff1 value: 0.7384 - type: nauc_mrr_at_1_max value: 43.9886 - type: nauc_mrr_at_1_std value: 9.8923 - type: nauc_mrr_at_1_diff1 value: 50.394000000000005 - type: nauc_mrr_at_3_max value: 43.5962 - type: nauc_mrr_at_3_std value: 13.738 - type: nauc_mrr_at_3_diff1 value: 46.9918 - type: nauc_mrr_at_5_max value: 43.6259 - type: nauc_mrr_at_5_std value: 13.3696 - type: nauc_mrr_at_5_diff1 value: 46.7241 - type: nauc_mrr_at_10_max value: 43.7969 - type: nauc_mrr_at_10_std value: 13.477500000000001 - type: nauc_mrr_at_10_diff1 value: 47.125499999999995 - type: nauc_mrr_at_20_max value: 43.8469 - type: nauc_mrr_at_20_std value: 13.5156 - type: nauc_mrr_at_20_diff1 value: 47.088 - type: nauc_mrr_at_100_max value: 43.8068 - type: nauc_mrr_at_100_std value: 13.7051 - type: nauc_mrr_at_100_diff1 value: 47.153600000000004 - type: nauc_mrr_at_1000_max value: 43.8016 - type: nauc_mrr_at_1000_std value: 13.661999999999999 - type: nauc_mrr_at_1000_diff1 value: 47.1571 - type: main_score value: 43.832 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_1 value: 26.247999999999998 - type: ndcg_at_3 value: 31.799 - type: ndcg_at_5 value: 34.563 - type: ndcg_at_10 value: 36.889 - type: ndcg_at_20 value: 39.330999999999996 - type: ndcg_at_100 value: 42.426 - type: ndcg_at_1000 value: 44.745000000000005 - type: map_at_1 value: 24.067 - type: map_at_3 value: 29.492 - type: map_at_5 value: 31.11 - type: map_at_10 value: 32.184000000000005 - type: map_at_20 value: 32.903 - type: map_at_100 value: 33.357 - type: map_at_1000 value: 33.458 - type: recall_at_1 value: 24.067 - type: recall_at_3 value: 36.272 - type: recall_at_5 value: 42.77 - type: recall_at_10 value: 49.344 - type: recall_at_20 value: 58.46 - type: recall_at_100 value: 74.11999999999999 - type: recall_at_1000 value: 91.276 - type: precision_at_1 value: 26.247999999999998 - type: precision_at_3 value: 13.309000000000001 - type: precision_at_5 value: 9.649000000000001 - type: precision_at_10 value: 5.712 - type: precision_at_20 value: 3.466 - type: precision_at_100 value: 0.915 - type: precision_at_1000 value: 0.123 - type: mrr_at_1 value: 26.247700000000002 - type: mrr_at_3 value: 31.638899999999996 - type: mrr_at_5 value: 33.1824 - type: mrr_at_10 value: 34.1493 - type: mrr_at_20 value: 34.7716 - type: mrr_at_100 value: 35.1893 - type: mrr_at_1000 value: 35.2507 - type: nauc_ndcg_at_1_max value: 36.3215 - type: nauc_ndcg_at_1_std value: 0.6172000000000001 - type: nauc_ndcg_at_1_diff1 value: 50.767799999999994 - type: nauc_ndcg_at_3_max value: 32.5903 - type: nauc_ndcg_at_3_std value: 2.5009 - type: nauc_ndcg_at_3_diff1 value: 44.7412 - type: nauc_ndcg_at_5_max value: 32.616499999999995 - type: nauc_ndcg_at_5_std value: 2.2826 - type: nauc_ndcg_at_5_diff1 value: 41.7193 - type: nauc_ndcg_at_10_max value: 32.063399999999994 - type: nauc_ndcg_at_10_std value: 2.7484 - type: nauc_ndcg_at_10_diff1 value: 40.9919 - type: nauc_ndcg_at_20_max value: 32.6337 - type: nauc_ndcg_at_20_std value: 3.6401000000000003 - type: nauc_ndcg_at_20_diff1 value: 39.4371 - type: nauc_ndcg_at_100_max value: 33.4504 - type: nauc_ndcg_at_100_std value: 6.5571 - type: nauc_ndcg_at_100_diff1 value: 40.103899999999996 - type: nauc_ndcg_at_1000_max value: 33.413399999999996 - type: nauc_ndcg_at_1000_std value: 6.1167 - type: nauc_ndcg_at_1000_diff1 value: 40.3296 - type: nauc_map_at_1_max value: 33.9516 - type: nauc_map_at_1_std value: -2.0814 - type: nauc_map_at_1_diff1 value: 51.6831 - type: nauc_map_at_3_max value: 32.4114 - type: nauc_map_at_3_std value: 0.9002 - type: nauc_map_at_3_diff1 value: 46.3164 - type: nauc_map_at_5_max value: 32.7406 - type: nauc_map_at_5_std value: 0.9598000000000001 - type: nauc_map_at_5_diff1 value: 44.576100000000004 - type: nauc_map_at_10_max value: 32.669 - type: nauc_map_at_10_std value: 1.4043 - type: nauc_map_at_10_diff1 value: 44.1697 - type: nauc_map_at_20_max value: 32.807199999999995 - type: nauc_map_at_20_std value: 1.7632999999999999 - type: nauc_map_at_20_diff1 value: 43.745400000000004 - type: nauc_map_at_100_max value: 32.9749 - type: nauc_map_at_100_std value: 2.1647 - type: nauc_map_at_100_diff1 value: 43.8445 - type: nauc_map_at_1000_max value: 32.9631 - type: nauc_map_at_1000_std value: 2.164 - type: nauc_map_at_1000_diff1 value: 43.8217 - type: nauc_recall_at_1_max value: 33.9516 - type: nauc_recall_at_1_std value: -2.0814 - type: nauc_recall_at_1_diff1 value: 51.6831 - type: nauc_recall_at_3_max value: 30.248199999999997 - type: nauc_recall_at_3_std value: 4.3766 - type: nauc_recall_at_3_diff1 value: 40.7147 - type: nauc_recall_at_5_max value: 29.749799999999997 - type: nauc_recall_at_5_std value: 3.739 - type: nauc_recall_at_5_diff1 value: 33.4515 - type: nauc_recall_at_10_max value: 27.8039 - type: nauc_recall_at_10_std value: 4.3235 - type: nauc_recall_at_10_diff1 value: 31.706200000000003 - type: nauc_recall_at_20_max value: 29.4726 - type: nauc_recall_at_20_std value: 7.2537 - type: nauc_recall_at_20_diff1 value: 24.763099999999998 - type: nauc_recall_at_100_max value: 32.6767 - type: nauc_recall_at_100_std value: 28.704400000000003 - type: nauc_recall_at_100_diff1 value: 23.6186 - type: nauc_recall_at_1000_max value: 35.3748 - type: nauc_recall_at_1000_std value: 49.2642 - type: nauc_recall_at_1000_diff1 value: 15.0664 - type: nauc_precision_at_1_max value: 36.3215 - type: nauc_precision_at_1_std value: 0.6172000000000001 - type: nauc_precision_at_1_diff1 value: 50.767799999999994 - type: nauc_precision_at_3_max value: 32.4313 - type: nauc_precision_at_3_std value: 6.8161 - type: nauc_precision_at_3_diff1 value: 39.4056 - type: nauc_precision_at_5_max value: 32.1058 - type: nauc_precision_at_5_std value: 7.5455 - type: nauc_precision_at_5_diff1 value: 29.119899999999998 - type: nauc_precision_at_10_max value: 29.9078 - type: nauc_precision_at_10_std value: 11.8851 - type: nauc_precision_at_10_diff1 value: 22.5166 - type: nauc_precision_at_20_max value: 29.212300000000003 - type: nauc_precision_at_20_std value: 16.1047 - type: nauc_precision_at_20_diff1 value: 12.209299999999999 - type: nauc_precision_at_100_max value: 24.7982 - type: nauc_precision_at_100_std value: 29.3162 - type: nauc_precision_at_100_diff1 value: 0.8240000000000001 - type: nauc_precision_at_1000_max value: -0.8333 - type: nauc_precision_at_1000_std value: 17.0877 - type: nauc_precision_at_1000_diff1 value: -25.4924 - type: nauc_mrr_at_1_max value: 36.3215 - type: nauc_mrr_at_1_std value: 0.6172000000000001 - type: nauc_mrr_at_1_diff1 value: 50.767799999999994 - type: nauc_mrr_at_3_max value: 34.7464 - type: nauc_mrr_at_3_std value: 2.9025 - type: nauc_mrr_at_3_diff1 value: 45.7566 - type: nauc_mrr_at_5_max value: 34.454 - type: nauc_mrr_at_5_std value: 2.9497 - type: nauc_mrr_at_5_diff1 value: 43.948 - type: nauc_mrr_at_10_max value: 34.1548 - type: nauc_mrr_at_10_std value: 3.0771 - type: nauc_mrr_at_10_diff1 value: 43.626599999999996 - type: nauc_mrr_at_20_max value: 34.3061 - type: nauc_mrr_at_20_std value: 3.2359999999999998 - type: nauc_mrr_at_20_diff1 value: 43.2516 - type: nauc_mrr_at_100_max value: 34.3776 - type: nauc_mrr_at_100_std value: 3.5534999999999997 - type: nauc_mrr_at_100_diff1 value: 43.432900000000004 - type: nauc_mrr_at_1000_max value: 34.3807 - type: nauc_mrr_at_1000_std value: 3.5423999999999998 - type: nauc_mrr_at_1000_diff1 value: 43.4448 - type: main_score value: 36.889 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: ndcg_at_1 value: 29.837000000000003 - type: ndcg_at_3 value: 25.392 - type: ndcg_at_5 value: 27.153 - type: ndcg_at_10 value: 30.263 - type: ndcg_at_20 value: 33.073 - type: ndcg_at_100 value: 37.228 - type: ndcg_at_1000 value: 40.677 - type: map_at_1 value: 13.189 - type: map_at_3 value: 18.512999999999998 - type: map_at_5 value: 20.212 - type: map_at_10 value: 21.789 - type: map_at_20 value: 22.787 - type: map_at_100 value: 23.580000000000002 - type: map_at_1000 value: 23.772 - type: recall_at_1 value: 13.189 - type: recall_at_3 value: 23.255 - type: recall_at_5 value: 28.445999999999998 - type: recall_at_10 value: 35.355 - type: recall_at_20 value: 43.187999999999995 - type: recall_at_100 value: 59.255 - type: recall_at_1000 value: 78.637 - type: precision_at_1 value: 29.837000000000003 - type: precision_at_3 value: 18.545 - type: precision_at_5 value: 14.241000000000001 - type: precision_at_10 value: 9.179 - type: precision_at_20 value: 5.808 - type: precision_at_100 value: 1.659 - type: precision_at_1000 value: 0.22999999999999998 - type: mrr_at_1 value: 29.8371 - type: mrr_at_3 value: 38.2845 - type: mrr_at_5 value: 40.300799999999995 - type: mrr_at_10 value: 41.3765 - type: mrr_at_20 value: 41.958400000000005 - type: mrr_at_100 value: 42.281600000000005 - type: mrr_at_1000 value: 42.3193 - type: nauc_ndcg_at_1_max value: 29.676000000000002 - type: nauc_ndcg_at_1_std value: 20.4771 - type: nauc_ndcg_at_1_diff1 value: 22.0866 - type: nauc_ndcg_at_3_max value: 34.3256 - type: nauc_ndcg_at_3_std value: 18.886400000000002 - type: nauc_ndcg_at_3_diff1 value: 19.692999999999998 - type: nauc_ndcg_at_5_max value: 36.709599999999995 - type: nauc_ndcg_at_5_std value: 21.857 - type: nauc_ndcg_at_5_diff1 value: 20.2605 - type: nauc_ndcg_at_10_max value: 36.951699999999995 - type: nauc_ndcg_at_10_std value: 24.1201 - type: nauc_ndcg_at_10_diff1 value: 19.5268 - type: nauc_ndcg_at_20_max value: 37.2598 - type: nauc_ndcg_at_20_std value: 26.072699999999998 - type: nauc_ndcg_at_20_diff1 value: 18.5947 - type: nauc_ndcg_at_100_max value: 37.5131 - type: nauc_ndcg_at_100_std value: 27.3519 - type: nauc_ndcg_at_100_diff1 value: 18.7028 - type: nauc_ndcg_at_1000_max value: 37.4262 - type: nauc_ndcg_at_1000_std value: 27.158700000000003 - type: nauc_ndcg_at_1000_diff1 value: 19.2395 - type: nauc_map_at_1_max value: 32.2132 - type: nauc_map_at_1_std value: 15.244 - type: nauc_map_at_1_diff1 value: 26.2965 - type: nauc_map_at_3_max value: 35.157 - type: nauc_map_at_3_std value: 16.8008 - type: nauc_map_at_3_diff1 value: 21.7011 - type: nauc_map_at_5_max value: 36.0907 - type: nauc_map_at_5_std value: 19.0433 - type: nauc_map_at_5_diff1 value: 21.5595 - type: nauc_map_at_10_max value: 36.1498 - type: nauc_map_at_10_std value: 20.7259 - type: nauc_map_at_10_diff1 value: 20.816599999999998 - type: nauc_map_at_20_max value: 36.365199999999994 - type: nauc_map_at_20_std value: 21.6367 - type: nauc_map_at_20_diff1 value: 20.4563 - type: nauc_map_at_100_max value: 36.503600000000006 - type: nauc_map_at_100_std value: 22.020200000000003 - type: nauc_map_at_100_diff1 value: 20.5135 - type: nauc_map_at_1000_max value: 36.4843 - type: nauc_map_at_1000_std value: 22.0155 - type: nauc_map_at_1000_diff1 value: 20.5659 - type: nauc_recall_at_1_max value: 32.2132 - type: nauc_recall_at_1_std value: 15.244 - type: nauc_recall_at_1_diff1 value: 26.2965 - type: nauc_recall_at_3_max value: 34.6294 - type: nauc_recall_at_3_std value: 16.517200000000003 - type: nauc_recall_at_3_diff1 value: 16.6413 - type: nauc_recall_at_5_max value: 35.938700000000004 - type: nauc_recall_at_5_std value: 21.1943 - type: nauc_recall_at_5_diff1 value: 16.702 - type: nauc_recall_at_10_max value: 34.956900000000005 - type: nauc_recall_at_10_std value: 24.6739 - type: nauc_recall_at_10_diff1 value: 14.4465 - type: nauc_recall_at_20_max value: 33.873799999999996 - type: nauc_recall_at_20_std value: 27.9903 - type: nauc_recall_at_20_diff1 value: 11.1114 - type: nauc_recall_at_100_max value: 33.123799999999996 - type: nauc_recall_at_100_std value: 31.4933 - type: nauc_recall_at_100_diff1 value: 10.3246 - type: nauc_recall_at_1000_max value: 32.9304 - type: nauc_recall_at_1000_std value: 33.5144 - type: nauc_recall_at_1000_diff1 value: 10.810699999999999 - type: nauc_precision_at_1_max value: 29.676000000000002 - type: nauc_precision_at_1_std value: 20.4771 - type: nauc_precision_at_1_diff1 value: 22.0866 - type: nauc_precision_at_3_max value: 32.0765 - type: nauc_precision_at_3_std value: 20.6039 - type: nauc_precision_at_3_diff1 value: 13.585700000000001 - type: nauc_precision_at_5_max value: 33.5445 - type: nauc_precision_at_5_std value: 26.567400000000003 - type: nauc_precision_at_5_diff1 value: 14.421700000000001 - type: nauc_precision_at_10_max value: 29.520200000000003 - type: nauc_precision_at_10_std value: 28.8453 - type: nauc_precision_at_10_diff1 value: 11.2529 - type: nauc_precision_at_20_max value: 25.610300000000002 - type: nauc_precision_at_20_std value: 30.6799 - type: nauc_precision_at_20_diff1 value: 6.8877 - type: nauc_precision_at_100_max value: 18.3639 - type: nauc_precision_at_100_std value: 28.2568 - type: nauc_precision_at_100_diff1 value: 3.8568 - type: nauc_precision_at_1000_max value: 6.9706 - type: nauc_precision_at_1000_std value: 18.9339 - type: nauc_precision_at_1000_diff1 value: 0.6999 - type: nauc_mrr_at_1_max value: 29.676000000000002 - type: nauc_mrr_at_1_std value: 20.4771 - type: nauc_mrr_at_1_diff1 value: 22.0866 - type: nauc_mrr_at_3_max value: 32.559900000000006 - type: nauc_mrr_at_3_std value: 22.1817 - type: nauc_mrr_at_3_diff1 value: 19.1362 - type: nauc_mrr_at_5_max value: 33.692299999999996 - type: nauc_mrr_at_5_std value: 23.5179 - type: nauc_mrr_at_5_diff1 value: 19.9908 - type: nauc_mrr_at_10_max value: 33.6748 - type: nauc_mrr_at_10_std value: 23.624200000000002 - type: nauc_mrr_at_10_diff1 value: 19.969 - type: nauc_mrr_at_20_max value: 33.562599999999996 - type: nauc_mrr_at_20_std value: 23.776 - type: nauc_mrr_at_20_diff1 value: 19.8259 - type: nauc_mrr_at_100_max value: 33.4998 - type: nauc_mrr_at_100_std value: 23.7432 - type: nauc_mrr_at_100_diff1 value: 19.8137 - type: nauc_mrr_at_1000_max value: 33.4876 - type: nauc_mrr_at_1000_std value: 23.719199999999997 - type: nauc_mrr_at_1000_diff1 value: 19.817 - type: main_score value: 30.263 - task: type: Retrieval dataset: name: MTEB CodeFeedbackMT (default) type: CoIR-Retrieval/codefeedback-mt config: default split: test revision: b0f12fa0c0dd67f59c95a5c33d02aeeb4c398c5f metrics: - type: ndcg_at_1 value: 27.002 - type: ndcg_at_3 value: 33.597 - type: ndcg_at_5 value: 35.75 - type: ndcg_at_10 value: 37.757000000000005 - type: ndcg_at_20 value: 39.36 - type: ndcg_at_100 value: 41.806 - type: ndcg_at_1000 value: 43.675000000000004 - type: map_at_1 value: 27.002 - type: map_at_3 value: 31.964 - type: map_at_5 value: 33.158 - type: map_at_10 value: 33.988 - type: map_at_20 value: 34.43 - type: map_at_100 value: 34.760000000000005 - type: map_at_1000 value: 34.821999999999996 - type: recall_at_1 value: 27.002 - type: recall_at_3 value: 38.329 - type: recall_at_5 value: 43.557 - type: recall_at_10 value: 49.755 - type: recall_at_20 value: 56.082 - type: recall_at_100 value: 69.376 - type: recall_at_1000 value: 84.56 - type: precision_at_1 value: 27.002 - type: precision_at_3 value: 12.776000000000002 - type: precision_at_5 value: 8.711 - type: precision_at_10 value: 4.976 - type: precision_at_20 value: 2.804 - type: precision_at_100 value: 0.694 - type: precision_at_1000 value: 0.08499999999999999 - type: mrr_at_1 value: 27.001599999999996 - type: mrr_at_3 value: 31.9638 - type: mrr_at_5 value: 33.158300000000004 - type: mrr_at_10 value: 33.9877 - type: mrr_at_20 value: 34.429700000000004 - type: mrr_at_100 value: 34.760200000000005 - type: mrr_at_1000 value: 34.822399999999995 - type: nauc_ndcg_at_1_max value: 14.691199999999998 - type: nauc_ndcg_at_1_std value: -18.2481 - type: nauc_ndcg_at_1_diff1 value: 51.82940000000001 - type: nauc_ndcg_at_3_max value: 15.9155 - type: nauc_ndcg_at_3_std value: -18.21 - type: nauc_ndcg_at_3_diff1 value: 46.4667 - type: nauc_ndcg_at_5_max value: 16.2958 - type: nauc_ndcg_at_5_std value: -17.8939 - type: nauc_ndcg_at_5_diff1 value: 45.4591 - type: nauc_ndcg_at_10_max value: 16.6542 - type: nauc_ndcg_at_10_std value: -17.121 - type: nauc_ndcg_at_10_diff1 value: 44.5803 - type: nauc_ndcg_at_20_max value: 17.210800000000003 - type: nauc_ndcg_at_20_std value: -16.3918 - type: nauc_ndcg_at_20_diff1 value: 44.0927 - type: nauc_ndcg_at_100_max value: 17.8597 - type: nauc_ndcg_at_100_std value: -14.35 - type: nauc_ndcg_at_100_diff1 value: 43.561 - type: nauc_ndcg_at_1000_max value: 18.0753 - type: nauc_ndcg_at_1000_std value: -13.827300000000001 - type: nauc_ndcg_at_1000_diff1 value: 43.9433 - type: nauc_map_at_1_max value: 14.691199999999998 - type: nauc_map_at_1_std value: -18.2481 - type: nauc_map_at_1_diff1 value: 51.82940000000001 - type: nauc_map_at_3_max value: 15.657099999999998 - type: nauc_map_at_3_std value: -18.253700000000002 - type: nauc_map_at_3_diff1 value: 47.749399999999994 - type: nauc_map_at_5_max value: 15.8683 - type: nauc_map_at_5_std value: -18.0718 - type: nauc_map_at_5_diff1 value: 47.176899999999996 - type: nauc_map_at_10_max value: 16.0118 - type: nauc_map_at_10_std value: -17.7494 - type: nauc_map_at_10_diff1 value: 46.818799999999996 - type: nauc_map_at_20_max value: 16.1658 - type: nauc_map_at_20_std value: -17.552400000000002 - type: nauc_map_at_20_diff1 value: 46.694 - type: nauc_map_at_100_max value: 16.2407 - type: nauc_map_at_100_std value: -17.289099999999998 - type: nauc_map_at_100_diff1 value: 46.6325 - type: nauc_map_at_1000_max value: 16.2491 - type: nauc_map_at_1000_std value: -17.2655 - type: nauc_map_at_1000_diff1 value: 46.646300000000004 - type: nauc_recall_at_1_max value: 14.691199999999998 - type: nauc_recall_at_1_std value: -18.2481 - type: nauc_recall_at_1_diff1 value: 51.82940000000001 - type: nauc_recall_at_3_max value: 16.6167 - type: nauc_recall_at_3_std value: -18.0762 - type: nauc_recall_at_3_diff1 value: 42.9204 - type: nauc_recall_at_5_max value: 17.522299999999998 - type: nauc_recall_at_5_std value: -17.349899999999998 - type: nauc_recall_at_5_diff1 value: 40.5682 - type: nauc_recall_at_10_max value: 18.6573 - type: nauc_recall_at_10_std value: -14.9976 - type: nauc_recall_at_10_diff1 value: 37.7799 - type: nauc_recall_at_20_max value: 21.0226 - type: nauc_recall_at_20_std value: -11.8854 - type: nauc_recall_at_20_diff1 value: 35.3475 - type: nauc_recall_at_100_max value: 26.442300000000003 - type: nauc_recall_at_100_std value: 2.9998 - type: nauc_recall_at_100_diff1 value: 29.618699999999997 - type: nauc_recall_at_1000_max value: 36.3607 - type: nauc_recall_at_1000_std value: 24.0336 - type: nauc_recall_at_1000_diff1 value: 25.6114 - type: nauc_precision_at_1_max value: 14.691199999999998 - type: nauc_precision_at_1_std value: -18.2481 - type: nauc_precision_at_1_diff1 value: 51.82940000000001 - type: nauc_precision_at_3_max value: 16.6167 - type: nauc_precision_at_3_std value: -18.0762 - type: nauc_precision_at_3_diff1 value: 42.9204 - type: nauc_precision_at_5_max value: 17.522299999999998 - type: nauc_precision_at_5_std value: -17.349899999999998 - type: nauc_precision_at_5_diff1 value: 40.5682 - type: nauc_precision_at_10_max value: 18.6573 - type: nauc_precision_at_10_std value: -14.9976 - type: nauc_precision_at_10_diff1 value: 37.7799 - type: nauc_precision_at_20_max value: 21.0226 - type: nauc_precision_at_20_std value: -11.8854 - type: nauc_precision_at_20_diff1 value: 35.3475 - type: nauc_precision_at_100_max value: 26.442300000000003 - type: nauc_precision_at_100_std value: 2.9998 - type: nauc_precision_at_100_diff1 value: 29.618699999999997 - type: nauc_precision_at_1000_max value: 36.3607 - type: nauc_precision_at_1000_std value: 24.0336 - type: nauc_precision_at_1000_diff1 value: 25.6114 - type: nauc_mrr_at_1_max value: 14.691199999999998 - type: nauc_mrr_at_1_std value: -18.2481 - type: nauc_mrr_at_1_diff1 value: 51.82940000000001 - type: nauc_mrr_at_3_max value: 15.657099999999998 - type: nauc_mrr_at_3_std value: -18.253700000000002 - type: nauc_mrr_at_3_diff1 value: 47.749399999999994 - type: nauc_mrr_at_5_max value: 15.8683 - type: nauc_mrr_at_5_std value: -18.0718 - type: nauc_mrr_at_5_diff1 value: 47.176899999999996 - type: nauc_mrr_at_10_max value: 16.0118 - type: nauc_mrr_at_10_std value: -17.7494 - type: nauc_mrr_at_10_diff1 value: 46.818799999999996 - type: nauc_mrr_at_20_max value: 16.1658 - type: nauc_mrr_at_20_std value: -17.552400000000002 - type: nauc_mrr_at_20_diff1 value: 46.694 - type: nauc_mrr_at_100_max value: 16.2407 - type: nauc_mrr_at_100_std value: -17.289099999999998 - type: nauc_mrr_at_100_diff1 value: 46.6325 - type: nauc_mrr_at_1000_max value: 16.2491 - type: nauc_mrr_at_1000_std value: -17.2655 - type: nauc_mrr_at_1000_diff1 value: 46.646300000000004 - type: main_score value: 37.757000000000005 - task: type: Retrieval dataset: name: MTEB CodeFeedbackST (default) type: CoIR-Retrieval/codefeedback-st config: default split: test revision: d213819e87aab9010628da8b73ab4eb337c89340 metrics: - type: ndcg_at_1 value: 53.335 - type: ndcg_at_3 value: 64.78399999999999 - type: ndcg_at_5 value: 67.418 - type: ndcg_at_10 value: 69.425 - type: ndcg_at_20 value: 70.513 - type: ndcg_at_100 value: 71.709 - type: ndcg_at_1000 value: 72.139 - type: map_at_1 value: 53.335 - type: map_at_3 value: 62.0 - type: map_at_5 value: 63.467 - type: map_at_10 value: 64.306 - type: map_at_20 value: 64.608 - type: map_at_100 value: 64.776 - type: map_at_1000 value: 64.793 - type: recall_at_1 value: 53.335 - type: recall_at_3 value: 72.82600000000001 - type: recall_at_5 value: 79.199 - type: recall_at_10 value: 85.354 - type: recall_at_20 value: 89.628 - type: recall_at_100 value: 96.039 - type: recall_at_1000 value: 99.368 - type: precision_at_1 value: 53.335 - type: precision_at_3 value: 24.275 - type: precision_at_5 value: 15.840000000000002 - type: precision_at_10 value: 8.535 - type: precision_at_20 value: 4.481 - type: precision_at_100 value: 0.96 - type: precision_at_1000 value: 0.099 - type: mrr_at_1 value: 53.31249999999999 - type: mrr_at_3 value: 62.0217 - type: mrr_at_5 value: 63.489700000000006 - type: mrr_at_10 value: 64.3214 - type: mrr_at_20 value: 64.6232 - type: mrr_at_100 value: 64.7915 - type: mrr_at_1000 value: 64.8086 - type: nauc_ndcg_at_1_max value: 4.5411 - type: nauc_ndcg_at_1_std value: -27.4357 - type: nauc_ndcg_at_1_diff1 value: 70.331 - type: nauc_ndcg_at_3_max value: 9.293899999999999 - type: nauc_ndcg_at_3_std value: -30.4201 - type: nauc_ndcg_at_3_diff1 value: 64.90599999999999 - type: nauc_ndcg_at_5_max value: 9.725 - type: nauc_ndcg_at_5_std value: -30.8448 - type: nauc_ndcg_at_5_diff1 value: 64.2796 - type: nauc_ndcg_at_10_max value: 9.4302 - type: nauc_ndcg_at_10_std value: -30.5425 - type: nauc_ndcg_at_10_diff1 value: 64.5211 - type: nauc_ndcg_at_20_max value: 9.019 - type: nauc_ndcg_at_20_std value: -29.986800000000002 - type: nauc_ndcg_at_20_diff1 value: 64.7995 - type: nauc_ndcg_at_100_max value: 8.780100000000001 - type: nauc_ndcg_at_100_std value: -29.4587 - type: nauc_ndcg_at_100_diff1 value: 65.3485 - type: nauc_ndcg_at_1000_max value: 8.5933 - type: nauc_ndcg_at_1000_std value: -29.462300000000003 - type: nauc_ndcg_at_1000_diff1 value: 65.5513 - type: nauc_map_at_1_max value: 4.5411 - type: nauc_map_at_1_std value: -27.4357 - type: nauc_map_at_1_diff1 value: 70.331 - type: nauc_map_at_3_max value: 7.9982 - type: nauc_map_at_3_std value: -29.5826 - type: nauc_map_at_3_diff1 value: 66.2961 - type: nauc_map_at_5_max value: 8.1756 - type: nauc_map_at_5_std value: -29.765900000000002 - type: nauc_map_at_5_diff1 value: 66.0248 - type: nauc_map_at_10_max value: 8.0296 - type: nauc_map_at_10_std value: -29.6458 - type: nauc_map_at_10_diff1 value: 66.158 - type: nauc_map_at_20_max value: 7.919099999999999 - type: nauc_map_at_20_std value: -29.505799999999997 - type: nauc_map_at_20_diff1 value: 66.24029999999999 - type: nauc_map_at_100_max value: 7.8803 - type: nauc_map_at_100_std value: -29.442600000000002 - type: nauc_map_at_100_diff1 value: 66.3125 - type: nauc_map_at_1000_max value: 7.8752 - type: nauc_map_at_1000_std value: -29.438399999999998 - type: nauc_map_at_1000_diff1 value: 66.3195 - type: nauc_recall_at_1_max value: 4.5411 - type: nauc_recall_at_1_std value: -27.4357 - type: nauc_recall_at_1_diff1 value: 70.331 - type: nauc_recall_at_3_max value: 13.911000000000001 - type: nauc_recall_at_3_std value: -33.4167 - type: nauc_recall_at_3_diff1 value: 59.9986 - type: nauc_recall_at_5_max value: 16.401 - type: nauc_recall_at_5_std value: -35.5473 - type: nauc_recall_at_5_diff1 value: 56.781000000000006 - type: nauc_recall_at_10_max value: 17.2917 - type: nauc_recall_at_10_std value: -35.4908 - type: nauc_recall_at_10_diff1 value: 55.279199999999996 - type: nauc_recall_at_20_max value: 16.4243 - type: nauc_recall_at_20_std value: -32.2776 - type: nauc_recall_at_20_diff1 value: 54.4386 - type: nauc_recall_at_100_max value: 21.5949 - type: nauc_recall_at_100_std value: -19.9444 - type: nauc_recall_at_100_diff1 value: 54.3502 - type: nauc_recall_at_1000_max value: 35.8557 - type: nauc_recall_at_1000_std value: 18.242 - type: nauc_recall_at_1000_diff1 value: 50.969699999999996 - type: nauc_precision_at_1_max value: 4.5411 - type: nauc_precision_at_1_std value: -27.4357 - type: nauc_precision_at_1_diff1 value: 70.331 - type: nauc_precision_at_3_max value: 13.911000000000001 - type: nauc_precision_at_3_std value: -33.4167 - type: nauc_precision_at_3_diff1 value: 59.9986 - type: nauc_precision_at_5_max value: 16.401 - type: nauc_precision_at_5_std value: -35.5473 - type: nauc_precision_at_5_diff1 value: 56.781000000000006 - type: nauc_precision_at_10_max value: 17.2917 - type: nauc_precision_at_10_std value: -35.4908 - type: nauc_precision_at_10_diff1 value: 55.279199999999996 - type: nauc_precision_at_20_max value: 16.4243 - type: nauc_precision_at_20_std value: -32.2776 - type: nauc_precision_at_20_diff1 value: 54.4386 - type: nauc_precision_at_100_max value: 21.5949 - type: nauc_precision_at_100_std value: -19.9444 - type: nauc_precision_at_100_diff1 value: 54.3502 - type: nauc_precision_at_1000_max value: 35.8557 - type: nauc_precision_at_1000_std value: 18.242 - type: nauc_precision_at_1000_diff1 value: 50.969699999999996 - type: nauc_mrr_at_1_max value: 4.045 - type: nauc_mrr_at_1_std value: -27.371299999999998 - type: nauc_mrr_at_1_diff1 value: 70.3681 - type: nauc_mrr_at_3_max value: 7.7906 - type: nauc_mrr_at_3_std value: -29.488999999999997 - type: nauc_mrr_at_3_diff1 value: 66.2574 - type: nauc_mrr_at_5_max value: 7.8858999999999995 - type: nauc_mrr_at_5_std value: -29.7336 - type: nauc_mrr_at_5_diff1 value: 66.0274 - type: nauc_mrr_at_10_max value: 7.7456 - type: nauc_mrr_at_10_std value: -29.5912 - type: nauc_mrr_at_10_diff1 value: 66.1546 - type: nauc_mrr_at_20_max value: 7.6305 - type: nauc_mrr_at_20_std value: -29.4551 - type: nauc_mrr_at_20_diff1 value: 66.2342 - type: nauc_mrr_at_100_max value: 7.589799999999999 - type: nauc_mrr_at_100_std value: -29.392400000000002 - type: nauc_mrr_at_100_diff1 value: 66.3072 - type: nauc_mrr_at_1000_max value: 7.584499999999999 - type: nauc_mrr_at_1000_std value: -29.3881 - type: nauc_mrr_at_1000_diff1 value: 66.3142 - type: main_score value: 69.425 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (python) type: CoIR-Retrieval/CodeSearchNet-ccr config: python split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 39.395 - type: ndcg_at_3 value: 49.038 - type: ndcg_at_5 value: 51.398999999999994 - type: ndcg_at_10 value: 53.593999999999994 - type: ndcg_at_20 value: 55.013 - type: ndcg_at_100 value: 56.940999999999995 - type: ndcg_at_1000 value: 58.126999999999995 - type: map_at_1 value: 39.395 - type: map_at_3 value: 46.687 - type: map_at_5 value: 48.003 - type: map_at_10 value: 48.911 - type: map_at_20 value: 49.305 - type: map_at_100 value: 49.571 - type: map_at_1000 value: 49.612 - type: recall_at_1 value: 39.395 - type: recall_at_3 value: 55.832 - type: recall_at_5 value: 61.543000000000006 - type: recall_at_10 value: 68.313 - type: recall_at_20 value: 73.897 - type: recall_at_100 value: 84.308 - type: recall_at_1000 value: 93.866 - type: precision_at_1 value: 39.395 - type: precision_at_3 value: 18.611 - type: precision_at_5 value: 12.309000000000001 - type: precision_at_10 value: 6.8309999999999995 - type: precision_at_20 value: 3.695 - type: precision_at_100 value: 0.843 - type: precision_at_1000 value: 0.094 - type: mrr_at_1 value: 39.402100000000004 - type: mrr_at_3 value: 46.690799999999996 - type: mrr_at_5 value: 48.0073 - type: mrr_at_10 value: 48.9156 - type: mrr_at_20 value: 49.3097 - type: mrr_at_100 value: 49.5752 - type: mrr_at_1000 value: 49.6159 - type: nauc_ndcg_at_1_max value: 29.945899999999998 - type: nauc_ndcg_at_1_std value: -7.957 - type: nauc_ndcg_at_1_diff1 value: 55.8451 - type: nauc_ndcg_at_3_max value: 31.5415 - type: nauc_ndcg_at_3_std value: -8.2198 - type: nauc_ndcg_at_3_diff1 value: 51.75959999999999 - type: nauc_ndcg_at_5_max value: 31.6664 - type: nauc_ndcg_at_5_std value: -7.1463 - type: nauc_ndcg_at_5_diff1 value: 51.0188 - type: nauc_ndcg_at_10_max value: 31.616 - type: nauc_ndcg_at_10_std value: -6.575699999999999 - type: nauc_ndcg_at_10_diff1 value: 50.7344 - type: nauc_ndcg_at_20_max value: 31.626199999999997 - type: nauc_ndcg_at_20_std value: -6.0725 - type: nauc_ndcg_at_20_diff1 value: 50.77159999999999 - type: nauc_ndcg_at_100_max value: 31.6639 - type: nauc_ndcg_at_100_std value: -5.4948999999999995 - type: nauc_ndcg_at_100_diff1 value: 50.790800000000004 - type: nauc_ndcg_at_1000_max value: 31.5161 - type: nauc_ndcg_at_1000_std value: -5.748600000000001 - type: nauc_ndcg_at_1000_diff1 value: 51.062799999999996 - type: nauc_map_at_1_max value: 29.945899999999998 - type: nauc_map_at_1_std value: -7.957 - type: nauc_map_at_1_diff1 value: 55.8451 - type: nauc_map_at_3_max value: 31.1851 - type: nauc_map_at_3_std value: -8.1706 - type: nauc_map_at_3_diff1 value: 52.7057 - type: nauc_map_at_5_max value: 31.2519 - type: nauc_map_at_5_std value: -7.580299999999999 - type: nauc_map_at_5_diff1 value: 52.3165 - type: nauc_map_at_10_max value: 31.231399999999997 - type: nauc_map_at_10_std value: -7.360800000000001 - type: nauc_map_at_10_diff1 value: 52.23 - type: nauc_map_at_20_max value: 31.2307 - type: nauc_map_at_20_std value: -7.2384 - type: nauc_map_at_20_diff1 value: 52.2532 - type: nauc_map_at_100_max value: 31.2368 - type: nauc_map_at_100_std value: -7.1598 - type: nauc_map_at_100_diff1 value: 52.260600000000004 - type: nauc_map_at_1000_max value: 31.230900000000002 - type: nauc_map_at_1000_std value: -7.1662 - type: nauc_map_at_1000_diff1 value: 52.267300000000006 - type: nauc_recall_at_1_max value: 29.945899999999998 - type: nauc_recall_at_1_std value: -7.957 - type: nauc_recall_at_1_diff1 value: 55.8451 - type: nauc_recall_at_3_max value: 32.6121 - type: nauc_recall_at_3_std value: -8.363 - type: nauc_recall_at_3_diff1 value: 48.9016 - type: nauc_recall_at_5_max value: 33.0025 - type: nauc_recall_at_5_std value: -5.5725 - type: nauc_recall_at_5_diff1 value: 46.7352 - type: nauc_recall_at_10_max value: 32.9683 - type: nauc_recall_at_10_std value: -3.2460999999999998 - type: nauc_recall_at_10_diff1 value: 45.0443 - type: nauc_recall_at_20_max value: 33.2455 - type: nauc_recall_at_20_std value: -0.0093 - type: nauc_recall_at_20_diff1 value: 44.294200000000004 - type: nauc_recall_at_100_max value: 34.4004 - type: nauc_recall_at_100_std value: 8.996500000000001 - type: nauc_recall_at_100_diff1 value: 41.0779 - type: nauc_recall_at_1000_max value: 33.096399999999996 - type: nauc_recall_at_1000_std value: 19.266 - type: nauc_recall_at_1000_diff1 value: 38.2966 - type: nauc_precision_at_1_max value: 29.945899999999998 - type: nauc_precision_at_1_std value: -7.957 - type: nauc_precision_at_1_diff1 value: 55.8451 - type: nauc_precision_at_3_max value: 32.6121 - type: nauc_precision_at_3_std value: -8.363 - type: nauc_precision_at_3_diff1 value: 48.9016 - type: nauc_precision_at_5_max value: 33.0025 - type: nauc_precision_at_5_std value: -5.5725 - type: nauc_precision_at_5_diff1 value: 46.7352 - type: nauc_precision_at_10_max value: 32.9683 - type: nauc_precision_at_10_std value: -3.2460999999999998 - type: nauc_precision_at_10_diff1 value: 45.0443 - type: nauc_precision_at_20_max value: 33.2455 - type: nauc_precision_at_20_std value: -0.0093 - type: nauc_precision_at_20_diff1 value: 44.294200000000004 - type: nauc_precision_at_100_max value: 34.4004 - type: nauc_precision_at_100_std value: 8.996500000000001 - type: nauc_precision_at_100_diff1 value: 41.0779 - type: nauc_precision_at_1000_max value: 33.096399999999996 - type: nauc_precision_at_1000_std value: 19.266 - type: nauc_precision_at_1000_diff1 value: 38.2966 - type: nauc_mrr_at_1_max value: 29.9427 - type: nauc_mrr_at_1_std value: -7.9670000000000005 - type: nauc_mrr_at_1_diff1 value: 55.824799999999996 - type: nauc_mrr_at_3_max value: 31.1834 - type: nauc_mrr_at_3_std value: -8.175799999999999 - type: nauc_mrr_at_3_diff1 value: 52.6952 - type: nauc_mrr_at_5_max value: 31.2515 - type: nauc_mrr_at_5_std value: -7.5835 - type: nauc_mrr_at_5_diff1 value: 52.303599999999996 - type: nauc_mrr_at_10_max value: 31.2284 - type: nauc_mrr_at_10_std value: -7.3647 - type: nauc_mrr_at_10_diff1 value: 52.2177 - type: nauc_mrr_at_20_max value: 31.2274 - type: nauc_mrr_at_20_std value: -7.243399999999999 - type: nauc_mrr_at_20_diff1 value: 52.2417 - type: nauc_mrr_at_100_max value: 31.2336 - type: nauc_mrr_at_100_std value: -7.1640999999999995 - type: nauc_mrr_at_100_diff1 value: 52.2482 - type: nauc_mrr_at_1000_max value: 31.227700000000002 - type: nauc_mrr_at_1000_std value: -7.1705000000000005 - type: nauc_mrr_at_1000_diff1 value: 52.254900000000006 - type: main_score value: 53.593999999999994 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (javascript) type: CoIR-Retrieval/CodeSearchNet-ccr config: javascript split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 39.593 - type: ndcg_at_3 value: 48.759 - type: ndcg_at_5 value: 51.073 - type: ndcg_at_10 value: 53.1 - type: ndcg_at_20 value: 54.230999999999995 - type: ndcg_at_100 value: 56.289 - type: ndcg_at_1000 value: 57.67400000000001 - type: map_at_1 value: 39.593 - type: map_at_3 value: 46.536 - type: map_at_5 value: 47.826 - type: map_at_10 value: 48.676 - type: map_at_20 value: 48.983 - type: map_at_100 value: 49.268 - type: map_at_1000 value: 49.313 - type: recall_at_1 value: 39.593 - type: recall_at_3 value: 55.181000000000004 - type: recall_at_5 value: 60.772000000000006 - type: recall_at_10 value: 66.971 - type: recall_at_20 value: 71.468 - type: recall_at_100 value: 82.55799999999999 - type: recall_at_1000 value: 93.83200000000001 - type: precision_at_1 value: 39.593 - type: precision_at_3 value: 18.394 - type: precision_at_5 value: 12.154 - type: precision_at_10 value: 6.697 - type: precision_at_20 value: 3.573 - type: precision_at_100 value: 0.826 - type: precision_at_1000 value: 0.094 - type: mrr_at_1 value: 39.5624 - type: mrr_at_3 value: 46.5158 - type: mrr_at_5 value: 47.8056 - type: mrr_at_10 value: 48.654799999999994 - type: mrr_at_20 value: 48.9616 - type: mrr_at_100 value: 49.2469 - type: mrr_at_1000 value: 49.2923 - type: nauc_ndcg_at_1_max value: 26.582099999999997 - type: nauc_ndcg_at_1_std value: -14.751900000000001 - type: nauc_ndcg_at_1_diff1 value: 54.9795 - type: nauc_ndcg_at_3_max value: 30.000700000000002 - type: nauc_ndcg_at_3_std value: -13.107299999999999 - type: nauc_ndcg_at_3_diff1 value: 51.7972 - type: nauc_ndcg_at_5_max value: 29.4468 - type: nauc_ndcg_at_5_std value: -13.3189 - type: nauc_ndcg_at_5_diff1 value: 51.0062 - type: nauc_ndcg_at_10_max value: 28.6629 - type: nauc_ndcg_at_10_std value: -13.900000000000002 - type: nauc_ndcg_at_10_diff1 value: 50.4771 - type: nauc_ndcg_at_20_max value: 28.558600000000002 - type: nauc_ndcg_at_20_std value: -13.793 - type: nauc_ndcg_at_20_diff1 value: 50.720299999999995 - type: nauc_ndcg_at_100_max value: 28.7124 - type: nauc_ndcg_at_100_std value: -13.133000000000001 - type: nauc_ndcg_at_100_diff1 value: 50.7983 - type: nauc_ndcg_at_1000_max value: 28.4906 - type: nauc_ndcg_at_1000_std value: -13.5678 - type: nauc_ndcg_at_1000_diff1 value: 51.1172 - type: nauc_map_at_1_max value: 26.582099999999997 - type: nauc_map_at_1_std value: -14.751900000000001 - type: nauc_map_at_1_diff1 value: 54.9795 - type: nauc_map_at_3_max value: 29.191899999999997 - type: nauc_map_at_3_std value: -13.565299999999999 - type: nauc_map_at_3_diff1 value: 52.5372 - type: nauc_map_at_5_max value: 28.865099999999998 - type: nauc_map_at_5_std value: -13.6911 - type: nauc_map_at_5_diff1 value: 52.12520000000001 - type: nauc_map_at_10_max value: 28.5526 - type: nauc_map_at_10_std value: -13.9255 - type: nauc_map_at_10_diff1 value: 51.931400000000004 - type: nauc_map_at_20_max value: 28.520200000000003 - type: nauc_map_at_20_std value: -13.8934 - type: nauc_map_at_20_diff1 value: 51.991299999999995 - type: nauc_map_at_100_max value: 28.5184 - type: nauc_map_at_100_std value: -13.8399 - type: nauc_map_at_100_diff1 value: 52.0024 - type: nauc_map_at_1000_max value: 28.512500000000003 - type: nauc_map_at_1000_std value: -13.851700000000001 - type: nauc_map_at_1000_diff1 value: 52.0139 - type: nauc_recall_at_1_max value: 26.582099999999997 - type: nauc_recall_at_1_std value: -14.751900000000001 - type: nauc_recall_at_1_diff1 value: 54.9795 - type: nauc_recall_at_3_max value: 32.443 - type: nauc_recall_at_3_std value: -11.6927 - type: nauc_recall_at_3_diff1 value: 49.568400000000004 - type: nauc_recall_at_5_max value: 31.2258 - type: nauc_recall_at_5_std value: -12.1296 - type: nauc_recall_at_5_diff1 value: 47.3057 - type: nauc_recall_at_10_max value: 28.561999999999998 - type: nauc_recall_at_10_std value: -14.103499999999999 - type: nauc_recall_at_10_diff1 value: 44.9228 - type: nauc_recall_at_20_max value: 28.0738 - type: nauc_recall_at_20_std value: -13.632 - type: nauc_recall_at_20_diff1 value: 45.6569 - type: nauc_recall_at_100_max value: 29.9618 - type: nauc_recall_at_100_std value: -6.2382 - type: nauc_recall_at_100_diff1 value: 44.1378 - type: nauc_recall_at_1000_max value: 23.4062 - type: nauc_recall_at_1000_std value: -11.6326 - type: nauc_recall_at_1000_diff1 value: 45.130199999999995 - type: nauc_precision_at_1_max value: 26.582099999999997 - type: nauc_precision_at_1_std value: -14.751900000000001 - type: nauc_precision_at_1_diff1 value: 54.9795 - type: nauc_precision_at_3_max value: 32.443 - type: nauc_precision_at_3_std value: -11.6927 - type: nauc_precision_at_3_diff1 value: 49.568400000000004 - type: nauc_precision_at_5_max value: 31.2258 - type: nauc_precision_at_5_std value: -12.1296 - type: nauc_precision_at_5_diff1 value: 47.3057 - type: nauc_precision_at_10_max value: 28.561999999999998 - type: nauc_precision_at_10_std value: -14.103499999999999 - type: nauc_precision_at_10_diff1 value: 44.9228 - type: nauc_precision_at_20_max value: 28.0738 - type: nauc_precision_at_20_std value: -13.632 - type: nauc_precision_at_20_diff1 value: 45.6569 - type: nauc_precision_at_100_max value: 29.9618 - type: nauc_precision_at_100_std value: -6.2382 - type: nauc_precision_at_100_diff1 value: 44.1378 - type: nauc_precision_at_1000_max value: 23.4062 - type: nauc_precision_at_1000_std value: -11.6326 - type: nauc_precision_at_1000_diff1 value: 45.130199999999995 - type: nauc_mrr_at_1_max value: 26.571499999999997 - type: nauc_mrr_at_1_std value: -14.9002 - type: nauc_mrr_at_1_diff1 value: 55.071400000000004 - type: nauc_mrr_at_3_max value: 29.1956 - type: nauc_mrr_at_3_std value: -13.6331 - type: nauc_mrr_at_3_diff1 value: 52.59439999999999 - type: nauc_mrr_at_5_max value: 28.8688 - type: nauc_mrr_at_5_std value: -13.7599 - type: nauc_mrr_at_5_diff1 value: 52.1832 - type: nauc_mrr_at_10_max value: 28.556199999999997 - type: nauc_mrr_at_10_std value: -13.9924 - type: nauc_mrr_at_10_diff1 value: 51.9865 - type: nauc_mrr_at_20_max value: 28.523799999999998 - type: nauc_mrr_at_20_std value: -13.960700000000001 - type: nauc_mrr_at_20_diff1 value: 52.0466 - type: nauc_mrr_at_100_max value: 28.522 - type: nauc_mrr_at_100_std value: -13.9076 - type: nauc_mrr_at_100_diff1 value: 52.058099999999996 - type: nauc_mrr_at_1000_max value: 28.5161 - type: nauc_mrr_at_1000_std value: -13.919500000000001 - type: nauc_mrr_at_1000_diff1 value: 52.0697 - type: main_score value: 53.1 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (go) type: CoIR-Retrieval/CodeSearchNet-ccr config: go split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 30.459999999999997 - type: ndcg_at_3 value: 37.88 - type: ndcg_at_5 value: 40.11 - type: ndcg_at_10 value: 42.094 - type: ndcg_at_20 value: 43.683 - type: ndcg_at_100 value: 45.998 - type: ndcg_at_1000 value: 47.723 - type: map_at_1 value: 30.459999999999997 - type: map_at_3 value: 36.046 - type: map_at_5 value: 37.285000000000004 - type: map_at_10 value: 38.108 - type: map_at_20 value: 38.546 - type: map_at_100 value: 38.859 - type: map_at_1000 value: 38.917 - type: recall_at_1 value: 30.459999999999997 - type: recall_at_3 value: 43.191 - type: recall_at_5 value: 48.596000000000004 - type: recall_at_10 value: 54.716 - type: recall_at_20 value: 60.983 - type: recall_at_100 value: 73.566 - type: recall_at_1000 value: 87.515 - type: precision_at_1 value: 30.459999999999997 - type: precision_at_3 value: 14.396999999999998 - type: precision_at_5 value: 9.719 - type: precision_at_10 value: 5.4719999999999995 - type: precision_at_20 value: 3.049 - type: precision_at_100 value: 0.736 - type: precision_at_1000 value: 0.08800000000000001 - type: mrr_at_1 value: 30.448199999999996 - type: mrr_at_3 value: 36.042 - type: mrr_at_5 value: 37.2763 - type: mrr_at_10 value: 38.1013 - type: mrr_at_20 value: 38.5373 - type: mrr_at_100 value: 38.8506 - type: mrr_at_1000 value: 38.9093 - type: nauc_ndcg_at_1_max value: 27.284999999999997 - type: nauc_ndcg_at_1_std value: -6.6476999999999995 - type: nauc_ndcg_at_1_diff1 value: 50.871500000000005 - type: nauc_ndcg_at_3_max value: 26.6017 - type: nauc_ndcg_at_3_std value: -7.6026 - type: nauc_ndcg_at_3_diff1 value: 46.768 - type: nauc_ndcg_at_5_max value: 26.2865 - type: nauc_ndcg_at_5_std value: -7.3601 - type: nauc_ndcg_at_5_diff1 value: 45.7969 - type: nauc_ndcg_at_10_max value: 25.746599999999997 - type: nauc_ndcg_at_10_std value: -7.4333 - type: nauc_ndcg_at_10_diff1 value: 45.4115 - type: nauc_ndcg_at_20_max value: 25.5118 - type: nauc_ndcg_at_20_std value: -6.9322 - type: nauc_ndcg_at_20_diff1 value: 45.0598 - type: nauc_ndcg_at_100_max value: 25.309900000000003 - type: nauc_ndcg_at_100_std value: -6.0600000000000005 - type: nauc_ndcg_at_100_diff1 value: 44.8825 - type: nauc_ndcg_at_1000_max value: 25.521700000000003 - type: nauc_ndcg_at_1000_std value: -5.9789 - type: nauc_ndcg_at_1000_diff1 value: 45.2513 - type: nauc_map_at_1_max value: 27.284999999999997 - type: nauc_map_at_1_std value: -6.6476999999999995 - type: nauc_map_at_1_diff1 value: 50.871500000000005 - type: nauc_map_at_3_max value: 26.7721 - type: nauc_map_at_3_std value: -7.452300000000001 - type: nauc_map_at_3_diff1 value: 47.7211 - type: nauc_map_at_5_max value: 26.600600000000004 - type: nauc_map_at_5_std value: -7.3378 - type: nauc_map_at_5_diff1 value: 47.1879 - type: nauc_map_at_10_max value: 26.372 - type: nauc_map_at_10_std value: -7.3735 - type: nauc_map_at_10_diff1 value: 47.0298 - type: nauc_map_at_20_max value: 26.3071 - type: nauc_map_at_20_std value: -7.2452000000000005 - type: nauc_map_at_20_diff1 value: 46.9294 - type: nauc_map_at_100_max value: 26.281100000000002 - type: nauc_map_at_100_std value: -7.1155 - type: nauc_map_at_100_diff1 value: 46.9054 - type: nauc_map_at_1000_max value: 26.2903 - type: nauc_map_at_1000_std value: -7.1089 - type: nauc_map_at_1000_diff1 value: 46.9182 - type: nauc_recall_at_1_max value: 27.284999999999997 - type: nauc_recall_at_1_std value: -6.6476999999999995 - type: nauc_recall_at_1_diff1 value: 50.871500000000005 - type: nauc_recall_at_3_max value: 26.1146 - type: nauc_recall_at_3_std value: -7.9985 - type: nauc_recall_at_3_diff1 value: 44.0707 - type: nauc_recall_at_5_max value: 25.3292 - type: nauc_recall_at_5_std value: -7.331799999999999 - type: nauc_recall_at_5_diff1 value: 41.6571 - type: nauc_recall_at_10_max value: 23.6012 - type: nauc_recall_at_10_std value: -7.5294 - type: nauc_recall_at_10_diff1 value: 40.244099999999996 - type: nauc_recall_at_20_max value: 22.453300000000002 - type: nauc_recall_at_20_std value: -5.3024000000000004 - type: nauc_recall_at_20_diff1 value: 38.4242 - type: nauc_recall_at_100_max value: 20.069100000000002 - type: nauc_recall_at_100_std value: 1.4581 - type: nauc_recall_at_100_diff1 value: 35.1775 - type: nauc_recall_at_1000_max value: 19.4385 - type: nauc_recall_at_1000_std value: 9.0112 - type: nauc_recall_at_1000_diff1 value: 34.138000000000005 - type: nauc_precision_at_1_max value: 27.284999999999997 - type: nauc_precision_at_1_std value: -6.6476999999999995 - type: nauc_precision_at_1_diff1 value: 50.871500000000005 - type: nauc_precision_at_3_max value: 26.1146 - type: nauc_precision_at_3_std value: -7.9985 - type: nauc_precision_at_3_diff1 value: 44.0707 - type: nauc_precision_at_5_max value: 25.3292 - type: nauc_precision_at_5_std value: -7.331799999999999 - type: nauc_precision_at_5_diff1 value: 41.6571 - type: nauc_precision_at_10_max value: 23.6012 - type: nauc_precision_at_10_std value: -7.5294 - type: nauc_precision_at_10_diff1 value: 40.244099999999996 - type: nauc_precision_at_20_max value: 22.453300000000002 - type: nauc_precision_at_20_std value: -5.3024000000000004 - type: nauc_precision_at_20_diff1 value: 38.4242 - type: nauc_precision_at_100_max value: 20.069100000000002 - type: nauc_precision_at_100_std value: 1.4581 - type: nauc_precision_at_100_diff1 value: 35.1775 - type: nauc_precision_at_1000_max value: 19.4385 - type: nauc_precision_at_1000_std value: 9.0112 - type: nauc_precision_at_1000_diff1 value: 34.138000000000005 - type: nauc_mrr_at_1_max value: 27.334000000000003 - type: nauc_mrr_at_1_std value: -6.5517 - type: nauc_mrr_at_1_diff1 value: 50.9102 - type: nauc_mrr_at_3_max value: 26.807199999999998 - type: nauc_mrr_at_3_std value: -7.436800000000001 - type: nauc_mrr_at_3_diff1 value: 47.7425 - type: nauc_mrr_at_5_max value: 26.6194 - type: nauc_mrr_at_5_std value: -7.3031 - type: nauc_mrr_at_5_diff1 value: 47.2053 - type: nauc_mrr_at_10_max value: 26.3924 - type: nauc_mrr_at_10_std value: -7.324700000000001 - type: nauc_mrr_at_10_diff1 value: 47.051500000000004 - type: nauc_mrr_at_20_max value: 26.3274 - type: nauc_mrr_at_20_std value: -7.209899999999999 - type: nauc_mrr_at_20_diff1 value: 46.953 - type: nauc_mrr_at_100_max value: 26.3019 - type: nauc_mrr_at_100_std value: -7.0785 - type: nauc_mrr_at_100_diff1 value: 46.9298 - type: nauc_mrr_at_1000_max value: 26.311 - type: nauc_mrr_at_1000_std value: -7.0719 - type: nauc_mrr_at_1000_diff1 value: 46.942499999999995 - type: main_score value: 42.094 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (ruby) type: CoIR-Retrieval/CodeSearchNet-ccr config: ruby split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 37.827 - type: ndcg_at_3 value: 47.599000000000004 - type: ndcg_at_5 value: 49.687 - type: ndcg_at_10 value: 51.686 - type: ndcg_at_20 value: 53.018 - type: ndcg_at_100 value: 54.75600000000001 - type: ndcg_at_1000 value: 56.196 - type: map_at_1 value: 37.827 - type: map_at_3 value: 45.242 - type: map_at_5 value: 46.400000000000006 - type: map_at_10 value: 47.223 - type: map_at_20 value: 47.593 - type: map_at_100 value: 47.824 - type: map_at_1000 value: 47.878 - type: recall_at_1 value: 37.827 - type: recall_at_3 value: 54.400999999999996 - type: recall_at_5 value: 59.477000000000004 - type: recall_at_10 value: 65.66199999999999 - type: recall_at_20 value: 70.896 - type: recall_at_100 value: 80.41199999999999 - type: recall_at_1000 value: 91.753 - type: precision_at_1 value: 37.827 - type: precision_at_3 value: 18.134 - type: precision_at_5 value: 11.895 - type: precision_at_10 value: 6.566 - type: precision_at_20 value: 3.5450000000000004 - type: precision_at_100 value: 0.804 - type: precision_at_1000 value: 0.092 - type: mrr_at_1 value: 37.8271 - type: mrr_at_3 value: 45.2154 - type: mrr_at_5 value: 46.3931 - type: mrr_at_10 value: 47.2166 - type: mrr_at_20 value: 47.5869 - type: mrr_at_100 value: 47.8167 - type: mrr_at_1000 value: 47.8715 - type: nauc_ndcg_at_1_max value: 34.1998 - type: nauc_ndcg_at_1_std value: -15.7415 - type: nauc_ndcg_at_1_diff1 value: 61.8572 - type: nauc_ndcg_at_3_max value: 33.566 - type: nauc_ndcg_at_3_std value: -18.0058 - type: nauc_ndcg_at_3_diff1 value: 54.5929 - type: nauc_ndcg_at_5_max value: 34.0447 - type: nauc_ndcg_at_5_std value: -17.3914 - type: nauc_ndcg_at_5_diff1 value: 53.980399999999996 - type: nauc_ndcg_at_10_max value: 34.0521 - type: nauc_ndcg_at_10_std value: -17.298099999999998 - type: nauc_ndcg_at_10_diff1 value: 53.63830000000001 - type: nauc_ndcg_at_20_max value: 34.076499999999996 - type: nauc_ndcg_at_20_std value: -17.1978 - type: nauc_ndcg_at_20_diff1 value: 53.3739 - type: nauc_ndcg_at_100_max value: 33.9961 - type: nauc_ndcg_at_100_std value: -17.0232 - type: nauc_ndcg_at_100_diff1 value: 53.8714 - type: nauc_ndcg_at_1000_max value: 34.0269 - type: nauc_ndcg_at_1000_std value: -16.6124 - type: nauc_ndcg_at_1000_diff1 value: 54.286199999999994 - type: nauc_map_at_1_max value: 34.1998 - type: nauc_map_at_1_std value: -15.7415 - type: nauc_map_at_1_diff1 value: 61.8572 - type: nauc_map_at_3_max value: 33.8395 - type: nauc_map_at_3_std value: -17.529 - type: nauc_map_at_3_diff1 value: 56.4065 - type: nauc_map_at_5_max value: 34.1343 - type: nauc_map_at_5_std value: -17.1732 - type: nauc_map_at_5_diff1 value: 56.1246 - type: nauc_map_at_10_max value: 34.1717 - type: nauc_map_at_10_std value: -17.1179 - type: nauc_map_at_10_diff1 value: 56.041399999999996 - type: nauc_map_at_20_max value: 34.1895 - type: nauc_map_at_20_std value: -17.077 - type: nauc_map_at_20_diff1 value: 55.96489999999999 - type: nauc_map_at_100_max value: 34.1922 - type: nauc_map_at_100_std value: -17.0664 - type: nauc_map_at_100_diff1 value: 56.0487 - type: nauc_map_at_1000_max value: 34.186 - type: nauc_map_at_1000_std value: -17.0498 - type: nauc_map_at_1000_diff1 value: 56.0623 - type: nauc_recall_at_1_max value: 34.1998 - type: nauc_recall_at_1_std value: -15.7415 - type: nauc_recall_at_1_diff1 value: 61.8572 - type: nauc_recall_at_3_max value: 32.6911 - type: nauc_recall_at_3_std value: -19.4073 - type: nauc_recall_at_3_diff1 value: 49.1188 - type: nauc_recall_at_5_max value: 33.7416 - type: nauc_recall_at_5_std value: -17.965700000000002 - type: nauc_recall_at_5_diff1 value: 47.0821 - type: nauc_recall_at_10_max value: 33.5209 - type: nauc_recall_at_10_std value: -17.7965 - type: nauc_recall_at_10_diff1 value: 44.8874 - type: nauc_recall_at_20_max value: 33.4757 - type: nauc_recall_at_20_std value: -17.4921 - type: nauc_recall_at_20_diff1 value: 42.747 - type: nauc_recall_at_100_max value: 32.2069 - type: nauc_recall_at_100_std value: -15.6244 - type: nauc_recall_at_100_diff1 value: 43.0441 - type: nauc_recall_at_1000_max value: 32.428000000000004 - type: nauc_recall_at_1000_std value: -2.6172 - type: nauc_recall_at_1000_diff1 value: 42.1384 - type: nauc_precision_at_1_max value: 34.1998 - type: nauc_precision_at_1_std value: -15.7415 - type: nauc_precision_at_1_diff1 value: 61.8572 - type: nauc_precision_at_3_max value: 32.6911 - type: nauc_precision_at_3_std value: -19.4073 - type: nauc_precision_at_3_diff1 value: 49.1188 - type: nauc_precision_at_5_max value: 33.7416 - type: nauc_precision_at_5_std value: -17.965700000000002 - type: nauc_precision_at_5_diff1 value: 47.0821 - type: nauc_precision_at_10_max value: 33.5209 - type: nauc_precision_at_10_std value: -17.7965 - type: nauc_precision_at_10_diff1 value: 44.8874 - type: nauc_precision_at_20_max value: 33.4757 - type: nauc_precision_at_20_std value: -17.4921 - type: nauc_precision_at_20_diff1 value: 42.747 - type: nauc_precision_at_100_max value: 32.2069 - type: nauc_precision_at_100_std value: -15.6244 - type: nauc_precision_at_100_diff1 value: 43.0441 - type: nauc_precision_at_1000_max value: 32.428000000000004 - type: nauc_precision_at_1000_std value: -2.6172 - type: nauc_precision_at_1000_diff1 value: 42.1384 - type: nauc_mrr_at_1_max value: 34.5467 - type: nauc_mrr_at_1_std value: -15.676499999999999 - type: nauc_mrr_at_1_diff1 value: 61.8572 - type: nauc_mrr_at_3_max value: 34.0355 - type: nauc_mrr_at_3_std value: -17.448900000000002 - type: nauc_mrr_at_3_diff1 value: 56.4005 - type: nauc_mrr_at_5_max value: 34.319100000000006 - type: nauc_mrr_at_5_std value: -17.1276 - type: nauc_mrr_at_5_diff1 value: 56.1231 - type: nauc_mrr_at_10_max value: 34.3588 - type: nauc_mrr_at_10_std value: -17.0717 - type: nauc_mrr_at_10_diff1 value: 56.03979999999999 - type: nauc_mrr_at_20_max value: 34.3778 - type: nauc_mrr_at_20_std value: -17.0305 - type: nauc_mrr_at_20_diff1 value: 55.96339999999999 - type: nauc_mrr_at_100_max value: 34.3812 - type: nauc_mrr_at_100_std value: -17.022599999999997 - type: nauc_mrr_at_100_diff1 value: 56.0469 - type: nauc_mrr_at_1000_max value: 34.375 - type: nauc_mrr_at_1000_std value: -17.0037 - type: nauc_mrr_at_1000_diff1 value: 56.0608 - type: main_score value: 51.686 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (java) type: CoIR-Retrieval/CodeSearchNet-ccr config: java split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 39.744 - type: ndcg_at_3 value: 48.465 - type: ndcg_at_5 value: 50.615 - type: ndcg_at_10 value: 52.544000000000004 - type: ndcg_at_20 value: 53.864999999999995 - type: ndcg_at_100 value: 55.806 - type: ndcg_at_1000 value: 57.082 - type: map_at_1 value: 39.744 - type: map_at_3 value: 46.346 - type: map_at_5 value: 47.538000000000004 - type: map_at_10 value: 48.333999999999996 - type: map_at_20 value: 48.699999999999996 - type: map_at_100 value: 48.97 - type: map_at_1000 value: 49.014 - type: recall_at_1 value: 39.744 - type: recall_at_3 value: 54.586999999999996 - type: recall_at_5 value: 59.80799999999999 - type: recall_at_10 value: 65.778 - type: recall_at_20 value: 70.97200000000001 - type: recall_at_100 value: 81.415 - type: recall_at_1000 value: 91.702 - type: precision_at_1 value: 39.744 - type: precision_at_3 value: 18.196 - type: precision_at_5 value: 11.962 - type: precision_at_10 value: 6.578 - type: precision_at_20 value: 3.549 - type: precision_at_100 value: 0.814 - type: precision_at_1000 value: 0.092 - type: mrr_at_1 value: 39.7901 - type: mrr_at_3 value: 46.367000000000004 - type: mrr_at_5 value: 47.556799999999996 - type: mrr_at_10 value: 48.3531 - type: mrr_at_20 value: 48.7206 - type: mrr_at_100 value: 48.9901 - type: mrr_at_1000 value: 49.034 - type: nauc_ndcg_at_1_max value: 31.1431 - type: nauc_ndcg_at_1_std value: -10.407399999999999 - type: nauc_ndcg_at_1_diff1 value: 56.6466 - type: nauc_ndcg_at_3_max value: 33.022800000000004 - type: nauc_ndcg_at_3_std value: -9.5046 - type: nauc_ndcg_at_3_diff1 value: 52.7916 - type: nauc_ndcg_at_5_max value: 33.1721 - type: nauc_ndcg_at_5_std value: -9.0365 - type: nauc_ndcg_at_5_diff1 value: 52.317400000000006 - type: nauc_ndcg_at_10_max value: 33.1837 - type: nauc_ndcg_at_10_std value: -8.4008 - type: nauc_ndcg_at_10_diff1 value: 52.007999999999996 - type: nauc_ndcg_at_20_max value: 33.024 - type: nauc_ndcg_at_20_std value: -7.9246 - type: nauc_ndcg_at_20_diff1 value: 51.9078 - type: nauc_ndcg_at_100_max value: 32.962599999999995 - type: nauc_ndcg_at_100_std value: -7.4719 - type: nauc_ndcg_at_100_diff1 value: 51.94180000000001 - type: nauc_ndcg_at_1000_max value: 33.1905 - type: nauc_ndcg_at_1000_std value: -7.295599999999999 - type: nauc_ndcg_at_1000_diff1 value: 52.351099999999995 - type: nauc_map_at_1_max value: 31.1431 - type: nauc_map_at_1_std value: -10.407399999999999 - type: nauc_map_at_1_diff1 value: 56.6466 - type: nauc_map_at_3_max value: 32.5713 - type: nauc_map_at_3_std value: -9.734 - type: nauc_map_at_3_diff1 value: 53.703599999999994 - type: nauc_map_at_5_max value: 32.6494 - type: nauc_map_at_5_std value: -9.4813 - type: nauc_map_at_5_diff1 value: 53.4567 - type: nauc_map_at_10_max value: 32.664100000000005 - type: nauc_map_at_10_std value: -9.225999999999999 - type: nauc_map_at_10_diff1 value: 53.3589 - type: nauc_map_at_20_max value: 32.6136 - type: nauc_map_at_20_std value: -9.107899999999999 - type: nauc_map_at_20_diff1 value: 53.337 - type: nauc_map_at_100_max value: 32.6036 - type: nauc_map_at_100_std value: -9.0547 - type: nauc_map_at_100_diff1 value: 53.35339999999999 - type: nauc_map_at_1000_max value: 32.610299999999995 - type: nauc_map_at_1000_std value: -9.0493 - type: nauc_map_at_1000_diff1 value: 53.3656 - type: nauc_recall_at_1_max value: 31.1431 - type: nauc_recall_at_1_std value: -10.407399999999999 - type: nauc_recall_at_1_diff1 value: 56.6466 - type: nauc_recall_at_3_max value: 34.3846 - type: nauc_recall_at_3_std value: -8.8071 - type: nauc_recall_at_3_diff1 value: 50.047 - type: nauc_recall_at_5_max value: 34.8431 - type: nauc_recall_at_5_std value: -7.550999999999999 - type: nauc_recall_at_5_diff1 value: 48.6504 - type: nauc_recall_at_10_max value: 34.9686 - type: nauc_recall_at_10_std value: -5.1544 - type: nauc_recall_at_10_diff1 value: 47.0462 - type: nauc_recall_at_20_max value: 34.441300000000005 - type: nauc_recall_at_20_std value: -2.3698 - type: nauc_recall_at_20_diff1 value: 45.9903 - type: nauc_recall_at_100_max value: 34.4855 - type: nauc_recall_at_100_std value: 4.2675 - type: nauc_recall_at_100_diff1 value: 43.5966 - type: nauc_recall_at_1000_max value: 42.692600000000006 - type: nauc_recall_at_1000_std value: 21.8632 - type: nauc_recall_at_1000_diff1 value: 46.5143 - type: nauc_precision_at_1_max value: 31.1431 - type: nauc_precision_at_1_std value: -10.407399999999999 - type: nauc_precision_at_1_diff1 value: 56.6466 - type: nauc_precision_at_3_max value: 34.3846 - type: nauc_precision_at_3_std value: -8.8071 - type: nauc_precision_at_3_diff1 value: 50.047 - type: nauc_precision_at_5_max value: 34.8431 - type: nauc_precision_at_5_std value: -7.550999999999999 - type: nauc_precision_at_5_diff1 value: 48.6504 - type: nauc_precision_at_10_max value: 34.9686 - type: nauc_precision_at_10_std value: -5.1544 - type: nauc_precision_at_10_diff1 value: 47.0462 - type: nauc_precision_at_20_max value: 34.441300000000005 - type: nauc_precision_at_20_std value: -2.3698 - type: nauc_precision_at_20_diff1 value: 45.9903 - type: nauc_precision_at_100_max value: 34.4855 - type: nauc_precision_at_100_std value: 4.2675 - type: nauc_precision_at_100_diff1 value: 43.5966 - type: nauc_precision_at_1000_max value: 42.692600000000006 - type: nauc_precision_at_1000_std value: 21.8632 - type: nauc_precision_at_1000_diff1 value: 46.5143 - type: nauc_mrr_at_1_max value: 31.1816 - type: nauc_mrr_at_1_std value: -10.2945 - type: nauc_mrr_at_1_diff1 value: 56.5084 - type: nauc_mrr_at_3_max value: 32.609300000000005 - type: nauc_mrr_at_3_std value: -9.6538 - type: nauc_mrr_at_3_diff1 value: 53.6187 - type: nauc_mrr_at_5_max value: 32.6863 - type: nauc_mrr_at_5_std value: -9.3972 - type: nauc_mrr_at_5_diff1 value: 53.378400000000006 - type: nauc_mrr_at_10_max value: 32.697700000000005 - type: nauc_mrr_at_10_std value: -9.1456 - type: nauc_mrr_at_10_diff1 value: 53.2796 - type: nauc_mrr_at_20_max value: 32.6496 - type: nauc_mrr_at_20_std value: -9.0244 - type: nauc_mrr_at_20_diff1 value: 53.257600000000004 - type: nauc_mrr_at_100_max value: 32.6402 - type: nauc_mrr_at_100_std value: -8.970799999999999 - type: nauc_mrr_at_100_diff1 value: 53.274100000000004 - type: nauc_mrr_at_1000_max value: 32.647 - type: nauc_mrr_at_1000_std value: -8.9653 - type: nauc_mrr_at_1000_diff1 value: 53.286100000000005 - type: main_score value: 52.544000000000004 - task: type: Retrieval dataset: name: MTEB CodeSearchNetCCRetrieval (php) type: CoIR-Retrieval/CodeSearchNet-ccr config: php split: test revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8 metrics: - type: ndcg_at_1 value: 29.685 - type: ndcg_at_3 value: 37.448 - type: ndcg_at_5 value: 39.781 - type: ndcg_at_10 value: 41.814 - type: ndcg_at_20 value: 43.333 - type: ndcg_at_100 value: 45.664 - type: ndcg_at_1000 value: 47.536 - type: map_at_1 value: 29.685 - type: map_at_3 value: 35.545 - type: map_at_5 value: 36.839 - type: map_at_10 value: 37.682 - type: map_at_20 value: 38.099 - type: map_at_100 value: 38.415 - type: map_at_1000 value: 38.478 - type: recall_at_1 value: 29.685 - type: recall_at_3 value: 42.95 - type: recall_at_5 value: 48.616 - type: recall_at_10 value: 54.888000000000005 - type: recall_at_20 value: 60.895999999999994 - type: recall_at_100 value: 73.548 - type: recall_at_1000 value: 88.697 - type: precision_at_1 value: 29.685 - type: precision_at_3 value: 14.316999999999998 - type: precision_at_5 value: 9.722999999999999 - type: precision_at_10 value: 5.489 - type: precision_at_20 value: 3.045 - type: precision_at_100 value: 0.735 - type: precision_at_1000 value: 0.089 - type: mrr_at_1 value: 29.6489 - type: mrr_at_3 value: 35.5299 - type: mrr_at_5 value: 36.8133 - type: mrr_at_10 value: 37.6632 - type: mrr_at_20 value: 38.079299999999996 - type: mrr_at_100 value: 38.3951 - type: mrr_at_1000 value: 38.4584 - type: nauc_ndcg_at_1_max value: 23.1966 - type: nauc_ndcg_at_1_std value: -9.4926 - type: nauc_ndcg_at_1_diff1 value: 50.2664 - type: nauc_ndcg_at_3_max value: 22.9114 - type: nauc_ndcg_at_3_std value: -9.3945 - type: nauc_ndcg_at_3_diff1 value: 45.266400000000004 - type: nauc_ndcg_at_5_max value: 22.2736 - type: nauc_ndcg_at_5_std value: -9.1173 - type: nauc_ndcg_at_5_diff1 value: 44.1003 - type: nauc_ndcg_at_10_max value: 22.0212 - type: nauc_ndcg_at_10_std value: -8.5559 - type: nauc_ndcg_at_10_diff1 value: 43.5542 - type: nauc_ndcg_at_20_max value: 21.5977 - type: nauc_ndcg_at_20_std value: -8.236400000000001 - type: nauc_ndcg_at_20_diff1 value: 43.1564 - type: nauc_ndcg_at_100_max value: 21.4543 - type: nauc_ndcg_at_100_std value: -7.5462 - type: nauc_ndcg_at_100_diff1 value: 43.1768 - type: nauc_ndcg_at_1000_max value: 21.6202 - type: nauc_ndcg_at_1000_std value: -7.5571 - type: nauc_ndcg_at_1000_diff1 value: 43.5388 - type: nauc_map_at_1_max value: 23.1966 - type: nauc_map_at_1_std value: -9.4926 - type: nauc_map_at_1_diff1 value: 50.2664 - type: nauc_map_at_3_max value: 23.0018 - type: nauc_map_at_3_std value: -9.4391 - type: nauc_map_at_3_diff1 value: 46.428000000000004 - type: nauc_map_at_5_max value: 22.642300000000002 - type: nauc_map_at_5_std value: -9.2849 - type: nauc_map_at_5_diff1 value: 45.776 - type: nauc_map_at_10_max value: 22.551099999999998 - type: nauc_map_at_10_std value: -9.045300000000001 - type: nauc_map_at_10_diff1 value: 45.5645 - type: nauc_map_at_20_max value: 22.4407 - type: nauc_map_at_20_std value: -8.9542 - type: nauc_map_at_20_diff1 value: 45.4588 - type: nauc_map_at_100_max value: 22.4247 - type: nauc_map_at_100_std value: -8.869299999999999 - type: nauc_map_at_100_diff1 value: 45.467200000000005 - type: nauc_map_at_1000_max value: 22.429299999999998 - type: nauc_map_at_1000_std value: -8.8653 - type: nauc_map_at_1000_diff1 value: 45.479 - type: nauc_recall_at_1_max value: 23.1966 - type: nauc_recall_at_1_std value: -9.4926 - type: nauc_recall_at_1_diff1 value: 50.2664 - type: nauc_recall_at_3_max value: 22.6466 - type: nauc_recall_at_3_std value: -9.259599999999999 - type: nauc_recall_at_3_diff1 value: 41.9917 - type: nauc_recall_at_5_max value: 21.121100000000002 - type: nauc_recall_at_5_std value: -8.5882 - type: nauc_recall_at_5_diff1 value: 39.1445 - type: nauc_recall_at_10_max value: 20.191200000000002 - type: nauc_recall_at_10_std value: -6.824 - type: nauc_recall_at_10_diff1 value: 37.107 - type: nauc_recall_at_20_max value: 18.2104 - type: nauc_recall_at_20_std value: -5.3749 - type: nauc_recall_at_20_diff1 value: 34.9673 - type: nauc_recall_at_100_max value: 16.0859 - type: nauc_recall_at_100_std value: 0.7539 - type: nauc_recall_at_100_diff1 value: 32.603500000000004 - type: nauc_recall_at_1000_max value: 14.1642 - type: nauc_recall_at_1000_std value: 8.5463 - type: nauc_recall_at_1000_diff1 value: 29.5927 - type: nauc_precision_at_1_max value: 23.1966 - type: nauc_precision_at_1_std value: -9.4926 - type: nauc_precision_at_1_diff1 value: 50.2664 - type: nauc_precision_at_3_max value: 22.6466 - type: nauc_precision_at_3_std value: -9.259599999999999 - type: nauc_precision_at_3_diff1 value: 41.9917 - type: nauc_precision_at_5_max value: 21.121100000000002 - type: nauc_precision_at_5_std value: -8.5882 - type: nauc_precision_at_5_diff1 value: 39.1445 - type: nauc_precision_at_10_max value: 20.191200000000002 - type: nauc_precision_at_10_std value: -6.824 - type: nauc_precision_at_10_diff1 value: 37.107 - type: nauc_precision_at_20_max value: 18.2104 - type: nauc_precision_at_20_std value: -5.3749 - type: nauc_precision_at_20_diff1 value: 34.9673 - type: nauc_precision_at_100_max value: 16.0859 - type: nauc_precision_at_100_std value: 0.7539 - type: nauc_precision_at_100_diff1 value: 32.603500000000004 - type: nauc_precision_at_1000_max value: 14.1642 - type: nauc_precision_at_1000_std value: 8.5463 - type: nauc_precision_at_1000_diff1 value: 29.5927 - type: nauc_mrr_at_1_max value: 23.2502 - type: nauc_mrr_at_1_std value: -9.507 - type: nauc_mrr_at_1_diff1 value: 50.3997 - type: nauc_mrr_at_3_max value: 23.009 - type: nauc_mrr_at_3_std value: -9.4541 - type: nauc_mrr_at_3_diff1 value: 46.4733 - type: nauc_mrr_at_5_max value: 22.656000000000002 - type: nauc_mrr_at_5_std value: -9.2987 - type: nauc_mrr_at_5_diff1 value: 45.839999999999996 - type: nauc_mrr_at_10_max value: 22.5697 - type: nauc_mrr_at_10_std value: -9.0543 - type: nauc_mrr_at_10_diff1 value: 45.618700000000004 - type: nauc_mrr_at_20_max value: 22.461000000000002 - type: nauc_mrr_at_20_std value: -8.9628 - type: nauc_mrr_at_20_diff1 value: 45.5146 - type: nauc_mrr_at_100_max value: 22.4449 - type: nauc_mrr_at_100_std value: -8.877699999999999 - type: nauc_mrr_at_100_diff1 value: 45.5229 - type: nauc_mrr_at_1000_max value: 22.4498 - type: nauc_mrr_at_1000_std value: -8.873899999999999 - type: nauc_mrr_at_1000_diff1 value: 45.535199999999996 - type: main_score value: 41.814 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (python) type: code-search-net/code_search_net config: python split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 73.5 - type: ndcg_at_3 value: 82.35900000000001 - type: ndcg_at_5 value: 83.543 - type: ndcg_at_10 value: 84.357 - type: ndcg_at_20 value: 84.973 - type: ndcg_at_100 value: 85.449 - type: ndcg_at_1000 value: 85.591 - type: map_at_1 value: 73.5 - type: map_at_3 value: 80.2 - type: map_at_5 value: 80.85 - type: map_at_10 value: 81.189 - type: map_at_20 value: 81.364 - type: map_at_100 value: 81.434 - type: map_at_1000 value: 81.44 - type: recall_at_1 value: 73.5 - type: recall_at_3 value: 88.6 - type: recall_at_5 value: 91.5 - type: recall_at_10 value: 94.0 - type: recall_at_20 value: 96.39999999999999 - type: recall_at_100 value: 98.9 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 73.5 - type: precision_at_3 value: 29.532999999999998 - type: precision_at_5 value: 18.3 - type: precision_at_10 value: 9.4 - type: precision_at_20 value: 4.82 - type: precision_at_100 value: 0.989 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 73.5 - type: mrr_at_3 value: 80.2 - type: mrr_at_5 value: 80.85 - type: mrr_at_10 value: 81.1894 - type: mrr_at_20 value: 81.3638 - type: mrr_at_100 value: 81.43430000000001 - type: mrr_at_1000 value: 81.44 - type: nauc_ndcg_at_1_max value: 45.553 - type: nauc_ndcg_at_1_std value: -3.8149 - type: nauc_ndcg_at_1_diff1 value: 72.4638 - type: nauc_ndcg_at_3_max value: 47.8454 - type: nauc_ndcg_at_3_std value: -3.2174 - type: nauc_ndcg_at_3_diff1 value: 69.05059999999999 - type: nauc_ndcg_at_5_max value: 48.105599999999995 - type: nauc_ndcg_at_5_std value: -3.0107 - type: nauc_ndcg_at_5_diff1 value: 70.2436 - type: nauc_ndcg_at_10_max value: 48.871900000000004 - type: nauc_ndcg_at_10_std value: -2.7289 - type: nauc_ndcg_at_10_diff1 value: 70.87440000000001 - type: nauc_ndcg_at_20_max value: 49.1441 - type: nauc_ndcg_at_20_std value: -2.2193 - type: nauc_ndcg_at_20_diff1 value: 70.9602 - type: nauc_ndcg_at_100_max value: 48.2597 - type: nauc_ndcg_at_100_std value: -2.8648 - type: nauc_ndcg_at_100_diff1 value: 70.5487 - type: nauc_ndcg_at_1000_max value: 48.0576 - type: nauc_ndcg_at_1000_std value: -3.0315000000000003 - type: nauc_ndcg_at_1000_diff1 value: 70.8214 - type: nauc_map_at_1_max value: 45.553 - type: nauc_map_at_1_std value: -3.8149 - type: nauc_map_at_1_diff1 value: 72.4638 - type: nauc_map_at_3_max value: 47.143 - type: nauc_map_at_3_std value: -3.4511 - type: nauc_map_at_3_diff1 value: 70.2411 - type: nauc_map_at_5_max value: 47.2524 - type: nauc_map_at_5_std value: -3.3834999999999997 - type: nauc_map_at_5_diff1 value: 70.8691 - type: nauc_map_at_10_max value: 47.5215 - type: nauc_map_at_10_std value: -3.3042000000000002 - type: nauc_map_at_10_diff1 value: 71.1041 - type: nauc_map_at_20_max value: 47.5871 - type: nauc_map_at_20_std value: -3.1888 - type: nauc_map_at_20_diff1 value: 71.1157 - type: nauc_map_at_100_max value: 47.4746 - type: nauc_map_at_100_std value: -3.3092 - type: nauc_map_at_100_diff1 value: 71.0626 - type: nauc_map_at_1000_max value: 47.4686 - type: nauc_map_at_1000_std value: -3.3099000000000003 - type: nauc_map_at_1000_diff1 value: 71.0712 - type: nauc_recall_at_1_max value: 45.553 - type: nauc_recall_at_1_std value: -3.8149 - type: nauc_recall_at_1_diff1 value: 72.4638 - type: nauc_recall_at_3_max value: 51.09590000000001 - type: nauc_recall_at_3_std value: -2.1018 - type: nauc_recall_at_3_diff1 value: 63.4433 - type: nauc_recall_at_5_max value: 53.195499999999996 - type: nauc_recall_at_5_std value: -0.6421 - type: nauc_recall_at_5_diff1 value: 66.7381 - type: nauc_recall_at_10_max value: 60.660599999999995 - type: nauc_recall_at_10_std value: 2.5576000000000003 - type: nauc_recall_at_10_diff1 value: 69.8771 - type: nauc_recall_at_20_max value: 72.0082 - type: nauc_recall_at_20_std value: 13.519300000000001 - type: nauc_recall_at_20_diff1 value: 70.8774 - type: nauc_recall_at_100_max value: 67.6683 - type: nauc_recall_at_100_std value: 16.4757 - type: nauc_recall_at_100_diff1 value: 45.535199999999996 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 45.553 - type: nauc_precision_at_1_std value: -3.8149 - type: nauc_precision_at_1_diff1 value: 72.4638 - type: nauc_precision_at_3_max value: 51.09590000000001 - type: nauc_precision_at_3_std value: -2.1018 - type: nauc_precision_at_3_diff1 value: 63.4433 - type: nauc_precision_at_5_max value: 53.195499999999996 - type: nauc_precision_at_5_std value: -0.6421 - type: nauc_precision_at_5_diff1 value: 66.7381 - type: nauc_precision_at_10_max value: 60.660599999999995 - type: nauc_precision_at_10_std value: 2.5576000000000003 - type: nauc_precision_at_10_diff1 value: 69.8771 - type: nauc_precision_at_20_max value: 72.0082 - type: nauc_precision_at_20_std value: 13.519300000000001 - type: nauc_precision_at_20_diff1 value: 70.8774 - type: nauc_precision_at_100_max value: 67.6683 - type: nauc_precision_at_100_std value: 16.4757 - type: nauc_precision_at_100_diff1 value: 45.535199999999996 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 45.553 - type: nauc_mrr_at_1_std value: -3.8149 - type: nauc_mrr_at_1_diff1 value: 72.4638 - type: nauc_mrr_at_3_max value: 47.143 - type: nauc_mrr_at_3_std value: -3.4511 - type: nauc_mrr_at_3_diff1 value: 70.2411 - type: nauc_mrr_at_5_max value: 47.2524 - type: nauc_mrr_at_5_std value: -3.3834999999999997 - type: nauc_mrr_at_5_diff1 value: 70.8691 - type: nauc_mrr_at_10_max value: 47.5215 - type: nauc_mrr_at_10_std value: -3.3042000000000002 - type: nauc_mrr_at_10_diff1 value: 71.1041 - type: nauc_mrr_at_20_max value: 47.5871 - type: nauc_mrr_at_20_std value: -3.1888 - type: nauc_mrr_at_20_diff1 value: 71.1157 - type: nauc_mrr_at_100_max value: 47.4746 - type: nauc_mrr_at_100_std value: -3.3092 - type: nauc_mrr_at_100_diff1 value: 71.0626 - type: nauc_mrr_at_1000_max value: 47.4686 - type: nauc_mrr_at_1000_std value: -3.3099000000000003 - type: nauc_mrr_at_1000_diff1 value: 71.0712 - type: main_score value: 84.357 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (javascript) type: code-search-net/code_search_net config: javascript split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 59.4 - type: ndcg_at_3 value: 68.58800000000001 - type: ndcg_at_5 value: 70.0 - type: ndcg_at_10 value: 71.384 - type: ndcg_at_20 value: 72.505 - type: ndcg_at_100 value: 73.532 - type: ndcg_at_1000 value: 74.414 - type: map_at_1 value: 59.4 - type: map_at_3 value: 66.367 - type: map_at_5 value: 67.157 - type: map_at_10 value: 67.72399999999999 - type: map_at_20 value: 68.036 - type: map_at_100 value: 68.182 - type: map_at_1000 value: 68.208 - type: recall_at_1 value: 59.4 - type: recall_at_3 value: 75.0 - type: recall_at_5 value: 78.4 - type: recall_at_10 value: 82.69999999999999 - type: recall_at_20 value: 87.1 - type: recall_at_100 value: 92.60000000000001 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 59.4 - type: precision_at_3 value: 25.0 - type: precision_at_5 value: 15.68 - type: precision_at_10 value: 8.27 - type: precision_at_20 value: 4.3549999999999995 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 59.4 - type: mrr_at_3 value: 66.3667 - type: mrr_at_5 value: 67.1567 - type: mrr_at_10 value: 67.72399999999999 - type: mrr_at_20 value: 68.036 - type: mrr_at_100 value: 68.1821 - type: mrr_at_1000 value: 68.20779999999999 - type: nauc_ndcg_at_1_max value: 55.2077 - type: nauc_ndcg_at_1_std value: 23.8385 - type: nauc_ndcg_at_1_diff1 value: 72.8827 - type: nauc_ndcg_at_3_max value: 62.495 - type: nauc_ndcg_at_3_std value: 31.867800000000003 - type: nauc_ndcg_at_3_diff1 value: 69.8148 - type: nauc_ndcg_at_5_max value: 63.132999999999996 - type: nauc_ndcg_at_5_std value: 33.3486 - type: nauc_ndcg_at_5_diff1 value: 69.8501 - type: nauc_ndcg_at_10_max value: 64.3507 - type: nauc_ndcg_at_10_std value: 36.4767 - type: nauc_ndcg_at_10_diff1 value: 69.5995 - type: nauc_ndcg_at_20_max value: 63.930299999999995 - type: nauc_ndcg_at_20_std value: 36.8457 - type: nauc_ndcg_at_20_diff1 value: 70.0822 - type: nauc_ndcg_at_100_max value: 63.10249999999999 - type: nauc_ndcg_at_100_std value: 36.4228 - type: nauc_ndcg_at_100_diff1 value: 70.0219 - type: nauc_ndcg_at_1000_max value: 62.3826 - type: nauc_ndcg_at_1000_std value: 34.2464 - type: nauc_ndcg_at_1000_diff1 value: 70.2371 - type: nauc_map_at_1_max value: 55.2077 - type: nauc_map_at_1_std value: 23.8385 - type: nauc_map_at_1_diff1 value: 72.8827 - type: nauc_map_at_3_max value: 60.4208 - type: nauc_map_at_3_std value: 29.6445 - type: nauc_map_at_3_diff1 value: 70.58630000000001 - type: nauc_map_at_5_max value: 60.709900000000005 - type: nauc_map_at_5_std value: 30.400899999999996 - type: nauc_map_at_5_diff1 value: 70.6255 - type: nauc_map_at_10_max value: 61.152499999999996 - type: nauc_map_at_10_std value: 31.550800000000002 - type: nauc_map_at_10_diff1 value: 70.56099999999999 - type: nauc_map_at_20_max value: 61.0075 - type: nauc_map_at_20_std value: 31.585600000000003 - type: nauc_map_at_20_diff1 value: 70.6649 - type: nauc_map_at_100_max value: 60.90370000000001 - type: nauc_map_at_100_std value: 31.510700000000003 - type: nauc_map_at_100_diff1 value: 70.66839999999999 - type: nauc_map_at_1000_max value: 60.8865 - type: nauc_map_at_1000_std value: 31.4572 - type: nauc_map_at_1000_diff1 value: 70.6705 - type: nauc_recall_at_1_max value: 55.2077 - type: nauc_recall_at_1_std value: 23.8385 - type: nauc_recall_at_1_diff1 value: 72.8827 - type: nauc_recall_at_3_max value: 69.92819999999999 - type: nauc_recall_at_3_std value: 39.8045 - type: nauc_recall_at_3_diff1 value: 67.10040000000001 - type: nauc_recall_at_5_max value: 72.8013 - type: nauc_recall_at_5_std value: 45.1476 - type: nauc_recall_at_5_diff1 value: 66.84790000000001 - type: nauc_recall_at_10_max value: 80.1828 - type: nauc_recall_at_10_std value: 61.6781 - type: nauc_recall_at_10_diff1 value: 64.9272 - type: nauc_recall_at_20_max value: 82.11840000000001 - type: nauc_recall_at_20_std value: 72.1146 - type: nauc_recall_at_20_diff1 value: 67.3756 - type: nauc_recall_at_100_max value: 80.8836 - type: nauc_recall_at_100_std value: 89.47810000000001 - type: nauc_recall_at_100_diff1 value: 64.169 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 55.2077 - type: nauc_precision_at_1_std value: 23.8385 - type: nauc_precision_at_1_diff1 value: 72.8827 - type: nauc_precision_at_3_max value: 69.92819999999999 - type: nauc_precision_at_3_std value: 39.8045 - type: nauc_precision_at_3_diff1 value: 67.10040000000001 - type: nauc_precision_at_5_max value: 72.8013 - type: nauc_precision_at_5_std value: 45.1476 - type: nauc_precision_at_5_diff1 value: 66.84790000000001 - type: nauc_precision_at_10_max value: 80.1828 - type: nauc_precision_at_10_std value: 61.6781 - type: nauc_precision_at_10_diff1 value: 64.9272 - type: nauc_precision_at_20_max value: 82.11840000000001 - type: nauc_precision_at_20_std value: 72.1146 - type: nauc_precision_at_20_diff1 value: 67.3756 - type: nauc_precision_at_100_max value: 80.8836 - type: nauc_precision_at_100_std value: 89.47810000000001 - type: nauc_precision_at_100_diff1 value: 64.169 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 55.2077 - type: nauc_mrr_at_1_std value: 23.8385 - type: nauc_mrr_at_1_diff1 value: 72.8827 - type: nauc_mrr_at_3_max value: 60.4208 - type: nauc_mrr_at_3_std value: 29.6445 - type: nauc_mrr_at_3_diff1 value: 70.58630000000001 - type: nauc_mrr_at_5_max value: 60.709900000000005 - type: nauc_mrr_at_5_std value: 30.400899999999996 - type: nauc_mrr_at_5_diff1 value: 70.6255 - type: nauc_mrr_at_10_max value: 61.152499999999996 - type: nauc_mrr_at_10_std value: 31.550800000000002 - type: nauc_mrr_at_10_diff1 value: 70.56099999999999 - type: nauc_mrr_at_20_max value: 61.0075 - type: nauc_mrr_at_20_std value: 31.585600000000003 - type: nauc_mrr_at_20_diff1 value: 70.6649 - type: nauc_mrr_at_100_max value: 60.90370000000001 - type: nauc_mrr_at_100_std value: 31.510700000000003 - type: nauc_mrr_at_100_diff1 value: 70.66839999999999 - type: nauc_mrr_at_1000_max value: 60.8865 - type: nauc_mrr_at_1000_std value: 31.4572 - type: nauc_mrr_at_1000_diff1 value: 70.6705 - type: main_score value: 71.384 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (go) type: code-search-net/code_search_net config: go split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 71.39999999999999 - type: ndcg_at_3 value: 82.32000000000001 - type: ndcg_at_5 value: 84.22699999999999 - type: ndcg_at_10 value: 84.922 - type: ndcg_at_20 value: 85.226 - type: ndcg_at_100 value: 85.563 - type: ndcg_at_1000 value: 85.66 - type: map_at_1 value: 71.39999999999999 - type: map_at_3 value: 79.783 - type: map_at_5 value: 80.848 - type: map_at_10 value: 81.145 - type: map_at_20 value: 81.229 - type: map_at_100 value: 81.284 - type: map_at_1000 value: 81.286 - type: recall_at_1 value: 71.39999999999999 - type: recall_at_3 value: 89.60000000000001 - type: recall_at_5 value: 94.19999999999999 - type: recall_at_10 value: 96.3 - type: recall_at_20 value: 97.5 - type: recall_at_100 value: 99.2 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 71.39999999999999 - type: precision_at_3 value: 29.866999999999997 - type: precision_at_5 value: 18.84 - type: precision_at_10 value: 9.629999999999999 - type: precision_at_20 value: 4.875 - type: precision_at_100 value: 0.992 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 71.39999999999999 - type: mrr_at_3 value: 79.7833 - type: mrr_at_5 value: 80.8483 - type: mrr_at_10 value: 81.14489999999999 - type: mrr_at_20 value: 81.22890000000001 - type: mrr_at_100 value: 81.2836 - type: mrr_at_1000 value: 81.28649999999999 - type: nauc_ndcg_at_1_max value: 46.2744 - type: nauc_ndcg_at_1_std value: -2.9863 - type: nauc_ndcg_at_1_diff1 value: 74.0857 - type: nauc_ndcg_at_3_max value: 54.4012 - type: nauc_ndcg_at_3_std value: -3.3299000000000003 - type: nauc_ndcg_at_3_diff1 value: 70.891 - type: nauc_ndcg_at_5_max value: 54.3223 - type: nauc_ndcg_at_5_std value: -1.6239 - type: nauc_ndcg_at_5_diff1 value: 71.7397 - type: nauc_ndcg_at_10_max value: 53.629099999999994 - type: nauc_ndcg_at_10_std value: -1.8041999999999998 - type: nauc_ndcg_at_10_diff1 value: 72.8108 - type: nauc_ndcg_at_20_max value: 52.8247 - type: nauc_ndcg_at_20_std value: -2.6823 - type: nauc_ndcg_at_20_diff1 value: 72.7573 - type: nauc_ndcg_at_100_max value: 52.359 - type: nauc_ndcg_at_100_std value: -2.8805 - type: nauc_ndcg_at_100_diff1 value: 72.8282 - type: nauc_ndcg_at_1000_max value: 52.1323 - type: nauc_ndcg_at_1000_std value: -2.8353 - type: nauc_ndcg_at_1000_diff1 value: 72.6771 - type: nauc_map_at_1_max value: 46.2744 - type: nauc_map_at_1_std value: -2.9863 - type: nauc_map_at_1_diff1 value: 74.0857 - type: nauc_map_at_3_max value: 52.0957 - type: nauc_map_at_3_std value: -3.5077999999999996 - type: nauc_map_at_3_diff1 value: 71.90530000000001 - type: nauc_map_at_5_max value: 51.9209 - type: nauc_map_at_5_std value: -2.7184 - type: nauc_map_at_5_diff1 value: 72.3474 - type: nauc_map_at_10_max value: 51.642900000000004 - type: nauc_map_at_10_std value: -2.8069 - type: nauc_map_at_10_diff1 value: 72.74589999999999 - type: nauc_map_at_20_max value: 51.451800000000006 - type: nauc_map_at_20_std value: -2.9922 - type: nauc_map_at_20_diff1 value: 72.7222 - type: nauc_map_at_100_max value: 51.3795 - type: nauc_map_at_100_std value: -3.0112 - type: nauc_map_at_100_diff1 value: 72.723 - type: nauc_map_at_1000_max value: 51.3724 - type: nauc_map_at_1000_std value: -3.009 - type: nauc_map_at_1000_diff1 value: 72.7192 - type: nauc_recall_at_1_max value: 46.2744 - type: nauc_recall_at_1_std value: -2.9863 - type: nauc_recall_at_1_diff1 value: 74.0857 - type: nauc_recall_at_3_max value: 65.8657 - type: nauc_recall_at_3_std value: -2.2125 - type: nauc_recall_at_3_diff1 value: 65.75649999999999 - type: nauc_recall_at_5_max value: 74.348 - type: nauc_recall_at_5_std value: 8.7503 - type: nauc_recall_at_5_diff1 value: 66.9693 - type: nauc_recall_at_10_max value: 77.9494 - type: nauc_recall_at_10_std value: 12.8688 - type: nauc_recall_at_10_diff1 value: 75.7287 - type: nauc_recall_at_20_max value: 72.9655 - type: nauc_recall_at_20_std value: 0.8702 - type: nauc_recall_at_20_diff1 value: 76.5864 - type: nauc_recall_at_100_max value: 80.4563 - type: nauc_recall_at_100_std value: -9.278699999999999 - type: nauc_recall_at_100_diff1 value: 92.793 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 46.2744 - type: nauc_precision_at_1_std value: -2.9863 - type: nauc_precision_at_1_diff1 value: 74.0857 - type: nauc_precision_at_3_max value: 65.8657 - type: nauc_precision_at_3_std value: -2.2125 - type: nauc_precision_at_3_diff1 value: 65.75649999999999 - type: nauc_precision_at_5_max value: 74.348 - type: nauc_precision_at_5_std value: 8.7503 - type: nauc_precision_at_5_diff1 value: 66.9693 - type: nauc_precision_at_10_max value: 77.9494 - type: nauc_precision_at_10_std value: 12.8688 - type: nauc_precision_at_10_diff1 value: 75.7287 - type: nauc_precision_at_20_max value: 72.9655 - type: nauc_precision_at_20_std value: 0.8702 - type: nauc_precision_at_20_diff1 value: 76.5864 - type: nauc_precision_at_100_max value: 80.4563 - type: nauc_precision_at_100_std value: -9.278699999999999 - type: nauc_precision_at_100_diff1 value: 92.793 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 46.2744 - type: nauc_mrr_at_1_std value: -2.9863 - type: nauc_mrr_at_1_diff1 value: 74.0857 - type: nauc_mrr_at_3_max value: 52.0957 - type: nauc_mrr_at_3_std value: -3.5077999999999996 - type: nauc_mrr_at_3_diff1 value: 71.90530000000001 - type: nauc_mrr_at_5_max value: 51.9209 - type: nauc_mrr_at_5_std value: -2.7184 - type: nauc_mrr_at_5_diff1 value: 72.3474 - type: nauc_mrr_at_10_max value: 51.642900000000004 - type: nauc_mrr_at_10_std value: -2.8069 - type: nauc_mrr_at_10_diff1 value: 72.74589999999999 - type: nauc_mrr_at_20_max value: 51.451800000000006 - type: nauc_mrr_at_20_std value: -2.9922 - type: nauc_mrr_at_20_diff1 value: 72.7222 - type: nauc_mrr_at_100_max value: 51.3795 - type: nauc_mrr_at_100_std value: -3.0112 - type: nauc_mrr_at_100_diff1 value: 72.723 - type: nauc_mrr_at_1000_max value: 51.3724 - type: nauc_mrr_at_1000_std value: -3.009 - type: nauc_mrr_at_1000_diff1 value: 72.7192 - type: main_score value: 84.922 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (ruby) type: code-search-net/code_search_net config: ruby split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 61.9 - type: ndcg_at_3 value: 71.91 - type: ndcg_at_5 value: 74.11 - type: ndcg_at_10 value: 75.274 - type: ndcg_at_20 value: 75.97 - type: ndcg_at_100 value: 77.021 - type: ndcg_at_1000 value: 77.511 - type: map_at_1 value: 61.9 - type: map_at_3 value: 69.55 - type: map_at_5 value: 70.78 - type: map_at_10 value: 71.26 - type: map_at_20 value: 71.45899999999999 - type: map_at_100 value: 71.609 - type: map_at_1000 value: 71.624 - type: recall_at_1 value: 61.9 - type: recall_at_3 value: 78.7 - type: recall_at_5 value: 84.0 - type: recall_at_10 value: 87.6 - type: recall_at_20 value: 90.3 - type: recall_at_100 value: 95.89999999999999 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 61.9 - type: precision_at_3 value: 26.233 - type: precision_at_5 value: 16.8 - type: precision_at_10 value: 8.76 - type: precision_at_20 value: 4.515000000000001 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 61.9 - type: mrr_at_3 value: 69.55 - type: mrr_at_5 value: 70.78 - type: mrr_at_10 value: 71.2604 - type: mrr_at_20 value: 71.4589 - type: mrr_at_100 value: 71.609 - type: mrr_at_1000 value: 71.6242 - type: nauc_ndcg_at_1_max value: 51.8333 - type: nauc_ndcg_at_1_std value: 8.4163 - type: nauc_ndcg_at_1_diff1 value: 72.37700000000001 - type: nauc_ndcg_at_3_max value: 56.0395 - type: nauc_ndcg_at_3_std value: 12.583 - type: nauc_ndcg_at_3_diff1 value: 67.5758 - type: nauc_ndcg_at_5_max value: 56.35289999999999 - type: nauc_ndcg_at_5_std value: 13.9102 - type: nauc_ndcg_at_5_diff1 value: 68.36179999999999 - type: nauc_ndcg_at_10_max value: 55.954499999999996 - type: nauc_ndcg_at_10_std value: 14.8003 - type: nauc_ndcg_at_10_diff1 value: 68.3755 - type: nauc_ndcg_at_20_max value: 56.2808 - type: nauc_ndcg_at_20_std value: 16.0875 - type: nauc_ndcg_at_20_diff1 value: 68.3962 - type: nauc_ndcg_at_100_max value: 56.3164 - type: nauc_ndcg_at_100_std value: 15.8916 - type: nauc_ndcg_at_100_diff1 value: 69.00699999999999 - type: nauc_ndcg_at_1000_max value: 55.785700000000006 - type: nauc_ndcg_at_1000_std value: 14.3348 - type: nauc_ndcg_at_1000_diff1 value: 69.0698 - type: nauc_map_at_1_max value: 51.8333 - type: nauc_map_at_1_std value: 8.4163 - type: nauc_map_at_1_diff1 value: 72.37700000000001 - type: nauc_map_at_3_max value: 54.942800000000005 - type: nauc_map_at_3_std value: 11.2973 - type: nauc_map_at_3_diff1 value: 68.9311 - type: nauc_map_at_5_max value: 55.0587 - type: nauc_map_at_5_std value: 11.9547 - type: nauc_map_at_5_diff1 value: 69.3713 - type: nauc_map_at_10_max value: 54.9098 - type: nauc_map_at_10_std value: 12.2453 - type: nauc_map_at_10_diff1 value: 69.3958 - type: nauc_map_at_20_max value: 54.9689 - type: nauc_map_at_20_std value: 12.524799999999999 - type: nauc_map_at_20_diff1 value: 69.4109 - type: nauc_map_at_100_max value: 54.9906 - type: nauc_map_at_100_std value: 12.500300000000001 - type: nauc_map_at_100_diff1 value: 69.50319999999999 - type: nauc_map_at_1000_max value: 54.97840000000001 - type: nauc_map_at_1000_std value: 12.4639 - type: nauc_map_at_1000_diff1 value: 69.50460000000001 - type: nauc_recall_at_1_max value: 51.8333 - type: nauc_recall_at_1_std value: 8.4163 - type: nauc_recall_at_1_diff1 value: 72.37700000000001 - type: nauc_recall_at_3_max value: 60.100699999999996 - type: nauc_recall_at_3_std value: 17.4623 - type: nauc_recall_at_3_diff1 value: 62.495599999999996 - type: nauc_recall_at_5_max value: 62.3622 - type: nauc_recall_at_5_std value: 23.282700000000002 - type: nauc_recall_at_5_diff1 value: 63.8786 - type: nauc_recall_at_10_max value: 61.567899999999995 - type: nauc_recall_at_10_std value: 30.543300000000002 - type: nauc_recall_at_10_diff1 value: 62.765800000000006 - type: nauc_recall_at_20_max value: 65.8648 - type: nauc_recall_at_20_std value: 45.2891 - type: nauc_recall_at_20_diff1 value: 61.5048 - type: nauc_recall_at_100_max value: 77.73790000000001 - type: nauc_recall_at_100_std value: 78.3004 - type: nauc_recall_at_100_diff1 value: 66.54820000000001 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 51.8333 - type: nauc_precision_at_1_std value: 8.4163 - type: nauc_precision_at_1_diff1 value: 72.37700000000001 - type: nauc_precision_at_3_max value: 60.100699999999996 - type: nauc_precision_at_3_std value: 17.4623 - type: nauc_precision_at_3_diff1 value: 62.495599999999996 - type: nauc_precision_at_5_max value: 62.3622 - type: nauc_precision_at_5_std value: 23.282700000000002 - type: nauc_precision_at_5_diff1 value: 63.8786 - type: nauc_precision_at_10_max value: 61.567899999999995 - type: nauc_precision_at_10_std value: 30.543300000000002 - type: nauc_precision_at_10_diff1 value: 62.765800000000006 - type: nauc_precision_at_20_max value: 65.8648 - type: nauc_precision_at_20_std value: 45.2891 - type: nauc_precision_at_20_diff1 value: 61.5048 - type: nauc_precision_at_100_max value: 77.73790000000001 - type: nauc_precision_at_100_std value: 78.3004 - type: nauc_precision_at_100_diff1 value: 66.54820000000001 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 51.8333 - type: nauc_mrr_at_1_std value: 8.4163 - type: nauc_mrr_at_1_diff1 value: 72.37700000000001 - type: nauc_mrr_at_3_max value: 54.942800000000005 - type: nauc_mrr_at_3_std value: 11.2973 - type: nauc_mrr_at_3_diff1 value: 68.9311 - type: nauc_mrr_at_5_max value: 55.0587 - type: nauc_mrr_at_5_std value: 11.9547 - type: nauc_mrr_at_5_diff1 value: 69.3713 - type: nauc_mrr_at_10_max value: 54.9098 - type: nauc_mrr_at_10_std value: 12.2453 - type: nauc_mrr_at_10_diff1 value: 69.3958 - type: nauc_mrr_at_20_max value: 54.9689 - type: nauc_mrr_at_20_std value: 12.524799999999999 - type: nauc_mrr_at_20_diff1 value: 69.4109 - type: nauc_mrr_at_100_max value: 54.9906 - type: nauc_mrr_at_100_std value: 12.500300000000001 - type: nauc_mrr_at_100_diff1 value: 69.50319999999999 - type: nauc_mrr_at_1000_max value: 54.97840000000001 - type: nauc_mrr_at_1000_std value: 12.4639 - type: nauc_mrr_at_1000_diff1 value: 69.50460000000001 - type: main_score value: 75.274 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (java) type: code-search-net/code_search_net config: java split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 52.6 - type: ndcg_at_3 value: 64.044 - type: ndcg_at_5 value: 67.202 - type: ndcg_at_10 value: 69.447 - type: ndcg_at_20 value: 70.488 - type: ndcg_at_100 value: 71.481 - type: ndcg_at_1000 value: 71.995 - type: map_at_1 value: 52.6 - type: map_at_3 value: 61.317 - type: map_at_5 value: 63.062 - type: map_at_10 value: 64.01400000000001 - type: map_at_20 value: 64.302 - type: map_at_100 value: 64.443 - type: map_at_1000 value: 64.459 - type: recall_at_1 value: 52.6 - type: recall_at_3 value: 71.89999999999999 - type: recall_at_5 value: 79.60000000000001 - type: recall_at_10 value: 86.4 - type: recall_at_20 value: 90.5 - type: recall_at_100 value: 95.8 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 52.6 - type: precision_at_3 value: 23.967 - type: precision_at_5 value: 15.920000000000002 - type: precision_at_10 value: 8.64 - type: precision_at_20 value: 4.5249999999999995 - type: precision_at_100 value: 0.958 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 52.6 - type: mrr_at_3 value: 61.316700000000004 - type: mrr_at_5 value: 63.0617 - type: mrr_at_10 value: 64.01400000000001 - type: mrr_at_20 value: 64.3022 - type: mrr_at_100 value: 64.443 - type: mrr_at_1000 value: 64.4595 - type: nauc_ndcg_at_1_max value: 38.4317 - type: nauc_ndcg_at_1_std value: -18.9677 - type: nauc_ndcg_at_1_diff1 value: 62.74570000000001 - type: nauc_ndcg_at_3_max value: 43.612 - type: nauc_ndcg_at_3_std value: -14.6587 - type: nauc_ndcg_at_3_diff1 value: 56.92230000000001 - type: nauc_ndcg_at_5_max value: 44.840999999999994 - type: nauc_ndcg_at_5_std value: -12.328600000000002 - type: nauc_ndcg_at_5_diff1 value: 56.998000000000005 - type: nauc_ndcg_at_10_max value: 45.5768 - type: nauc_ndcg_at_10_std value: -10.871 - type: nauc_ndcg_at_10_diff1 value: 57.36130000000001 - type: nauc_ndcg_at_20_max value: 45.1125 - type: nauc_ndcg_at_20_std value: -10.575 - type: nauc_ndcg_at_20_diff1 value: 57.2132 - type: nauc_ndcg_at_100_max value: 45.4087 - type: nauc_ndcg_at_100_std value: -10.356300000000001 - type: nauc_ndcg_at_100_diff1 value: 57.607 - type: nauc_ndcg_at_1000_max value: 44.2686 - type: nauc_ndcg_at_1000_std value: -12.2661 - type: nauc_ndcg_at_1000_diff1 value: 58.0082 - type: nauc_map_at_1_max value: 38.4317 - type: nauc_map_at_1_std value: -18.9677 - type: nauc_map_at_1_diff1 value: 62.74570000000001 - type: nauc_map_at_3_max value: 42.278 - type: nauc_map_at_3_std value: -15.937499999999998 - type: nauc_map_at_3_diff1 value: 58.4671 - type: nauc_map_at_5_max value: 42.8414 - type: nauc_map_at_5_std value: -14.7742 - type: nauc_map_at_5_diff1 value: 58.582100000000004 - type: nauc_map_at_10_max value: 43.0236 - type: nauc_map_at_10_std value: -14.3595 - type: nauc_map_at_10_diff1 value: 58.765100000000004 - type: nauc_map_at_20_max value: 42.8918 - type: nauc_map_at_20_std value: -14.335500000000001 - type: nauc_map_at_20_diff1 value: 58.746500000000005 - type: nauc_map_at_100_max value: 42.9383 - type: nauc_map_at_100_std value: -14.296600000000002 - type: nauc_map_at_100_diff1 value: 58.796099999999996 - type: nauc_map_at_1000_max value: 42.9079 - type: nauc_map_at_1000_std value: -14.3452 - type: nauc_map_at_1000_diff1 value: 58.8048 - type: nauc_recall_at_1_max value: 38.4317 - type: nauc_recall_at_1_std value: -18.9677 - type: nauc_recall_at_1_diff1 value: 62.74570000000001 - type: nauc_recall_at_3_max value: 48.255199999999995 - type: nauc_recall_at_3_std value: -10.116999999999999 - type: nauc_recall_at_3_diff1 value: 51.5211 - type: nauc_recall_at_5_max value: 53.7581 - type: nauc_recall_at_5_std value: -1.1828 - type: nauc_recall_at_5_diff1 value: 50.139199999999995 - type: nauc_recall_at_10_max value: 62.2138 - type: nauc_recall_at_10_std value: 12.5761 - type: nauc_recall_at_10_diff1 value: 49.091499999999996 - type: nauc_recall_at_20_max value: 64.05619999999999 - type: nauc_recall_at_20_std value: 24.6892 - type: nauc_recall_at_20_diff1 value: 44.4292 - type: nauc_recall_at_100_max value: 94.1543 - type: nauc_recall_at_100_std value: 72.2889 - type: nauc_recall_at_100_diff1 value: 39.8115 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 38.4317 - type: nauc_precision_at_1_std value: -18.9677 - type: nauc_precision_at_1_diff1 value: 62.74570000000001 - type: nauc_precision_at_3_max value: 48.255199999999995 - type: nauc_precision_at_3_std value: -10.116999999999999 - type: nauc_precision_at_3_diff1 value: 51.5211 - type: nauc_precision_at_5_max value: 53.7581 - type: nauc_precision_at_5_std value: -1.1828 - type: nauc_precision_at_5_diff1 value: 50.139199999999995 - type: nauc_precision_at_10_max value: 62.2138 - type: nauc_precision_at_10_std value: 12.5761 - type: nauc_precision_at_10_diff1 value: 49.091499999999996 - type: nauc_precision_at_20_max value: 64.05619999999999 - type: nauc_precision_at_20_std value: 24.6892 - type: nauc_precision_at_20_diff1 value: 44.4292 - type: nauc_precision_at_100_max value: 94.1543 - type: nauc_precision_at_100_std value: 72.2889 - type: nauc_precision_at_100_diff1 value: 39.8115 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 38.4317 - type: nauc_mrr_at_1_std value: -18.9677 - type: nauc_mrr_at_1_diff1 value: 62.74570000000001 - type: nauc_mrr_at_3_max value: 42.278 - type: nauc_mrr_at_3_std value: -15.937499999999998 - type: nauc_mrr_at_3_diff1 value: 58.4671 - type: nauc_mrr_at_5_max value: 42.8414 - type: nauc_mrr_at_5_std value: -14.7742 - type: nauc_mrr_at_5_diff1 value: 58.582100000000004 - type: nauc_mrr_at_10_max value: 43.0236 - type: nauc_mrr_at_10_std value: -14.3595 - type: nauc_mrr_at_10_diff1 value: 58.765100000000004 - type: nauc_mrr_at_20_max value: 42.8918 - type: nauc_mrr_at_20_std value: -14.335500000000001 - type: nauc_mrr_at_20_diff1 value: 58.746500000000005 - type: nauc_mrr_at_100_max value: 42.9383 - type: nauc_mrr_at_100_std value: -14.296600000000002 - type: nauc_mrr_at_100_diff1 value: 58.796099999999996 - type: nauc_mrr_at_1000_max value: 42.9079 - type: nauc_mrr_at_1000_std value: -14.3452 - type: nauc_mrr_at_1000_diff1 value: 58.8048 - type: main_score value: 69.447 - task: type: Retrieval dataset: name: MTEB CodeSearchNetRetrieval (php) type: code-search-net/code_search_net config: php split: test revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759 metrics: - type: ndcg_at_1 value: 57.699999999999996 - type: ndcg_at_3 value: 69.071 - type: ndcg_at_5 value: 71.331 - type: ndcg_at_10 value: 73.455 - type: ndcg_at_20 value: 74.298 - type: ndcg_at_100 value: 74.842 - type: ndcg_at_1000 value: 75.411 - type: map_at_1 value: 57.699999999999996 - type: map_at_3 value: 66.233 - type: map_at_5 value: 67.508 - type: map_at_10 value: 68.398 - type: map_at_20 value: 68.634 - type: map_at_100 value: 68.718 - type: map_at_1000 value: 68.735 - type: recall_at_1 value: 57.699999999999996 - type: recall_at_3 value: 77.3 - type: recall_at_5 value: 82.69999999999999 - type: recall_at_10 value: 89.2 - type: recall_at_20 value: 92.5 - type: recall_at_100 value: 95.3 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 57.699999999999996 - type: precision_at_3 value: 25.767 - type: precision_at_5 value: 16.54 - type: precision_at_10 value: 8.92 - type: precision_at_20 value: 4.625 - type: precision_at_100 value: 0.9530000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 57.699999999999996 - type: mrr_at_3 value: 66.2333 - type: mrr_at_5 value: 67.5083 - type: mrr_at_10 value: 68.398 - type: mrr_at_20 value: 68.6345 - type: mrr_at_100 value: 68.71770000000001 - type: mrr_at_1000 value: 68.7351 - type: nauc_ndcg_at_1_max value: 47.0017 - type: nauc_ndcg_at_1_std value: 7.702000000000001 - type: nauc_ndcg_at_1_diff1 value: 65.5265 - type: nauc_ndcg_at_3_max value: 53.1223 - type: nauc_ndcg_at_3_std value: 14.5277 - type: nauc_ndcg_at_3_diff1 value: 60.5267 - type: nauc_ndcg_at_5_max value: 55.99570000000001 - type: nauc_ndcg_at_5_std value: 17.467 - type: nauc_ndcg_at_5_diff1 value: 63.1188 - type: nauc_ndcg_at_10_max value: 55.7826 - type: nauc_ndcg_at_10_std value: 19.1279 - type: nauc_ndcg_at_10_diff1 value: 63.463 - type: nauc_ndcg_at_20_max value: 55.2338 - type: nauc_ndcg_at_20_std value: 19.5684 - type: nauc_ndcg_at_20_diff1 value: 63.7312 - type: nauc_ndcg_at_100_max value: 54.898199999999996 - type: nauc_ndcg_at_100_std value: 19.1172 - type: nauc_ndcg_at_100_diff1 value: 63.7935 - type: nauc_ndcg_at_1000_max value: 53.9486 - type: nauc_ndcg_at_1000_std value: 17.0841 - type: nauc_ndcg_at_1000_diff1 value: 63.5189 - type: nauc_map_at_1_max value: 47.0017 - type: nauc_map_at_1_std value: 7.702000000000001 - type: nauc_map_at_1_diff1 value: 65.5265 - type: nauc_map_at_3_max value: 51.3811 - type: nauc_map_at_3_std value: 12.6201 - type: nauc_map_at_3_diff1 value: 61.781299999999995 - type: nauc_map_at_5_max value: 52.788599999999995 - type: nauc_map_at_5_std value: 13.9926 - type: nauc_map_at_5_diff1 value: 63.155300000000004 - type: nauc_map_at_10_max value: 52.630900000000004 - type: nauc_map_at_10_std value: 14.5419 - type: nauc_map_at_10_diff1 value: 63.299499999999995 - type: nauc_map_at_20_max value: 52.4779 - type: nauc_map_at_20_std value: 14.615300000000001 - type: nauc_map_at_20_diff1 value: 63.360099999999996 - type: nauc_map_at_100_max value: 52.434999999999995 - type: nauc_map_at_100_std value: 14.5613 - type: nauc_map_at_100_diff1 value: 63.362700000000004 - type: nauc_map_at_1000_max value: 52.412000000000006 - type: nauc_map_at_1000_std value: 14.5121 - type: nauc_map_at_1000_diff1 value: 63.361000000000004 - type: nauc_recall_at_1_max value: 47.0017 - type: nauc_recall_at_1_std value: 7.702000000000001 - type: nauc_recall_at_1_diff1 value: 65.5265 - type: nauc_recall_at_3_max value: 59.7842 - type: nauc_recall_at_3_std value: 21.8077 - type: nauc_recall_at_3_diff1 value: 55.81850000000001 - type: nauc_recall_at_5_max value: 71.5097 - type: nauc_recall_at_5_std value: 34.341899999999995 - type: nauc_recall_at_5_diff1 value: 63.604000000000006 - type: nauc_recall_at_10_max value: 78.1568 - type: nauc_recall_at_10_std value: 53.016600000000004 - type: nauc_recall_at_10_diff1 value: 65.779 - type: nauc_recall_at_20_max value: 81.5145 - type: nauc_recall_at_20_std value: 72.038 - type: nauc_recall_at_20_diff1 value: 69.7603 - type: nauc_recall_at_100_max value: 89.0587 - type: nauc_recall_at_100_std value: 91.89070000000001 - type: nauc_recall_at_100_diff1 value: 75.1088 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 47.0017 - type: nauc_precision_at_1_std value: 7.702000000000001 - type: nauc_precision_at_1_diff1 value: 65.5265 - type: nauc_precision_at_3_max value: 59.7842 - type: nauc_precision_at_3_std value: 21.8077 - type: nauc_precision_at_3_diff1 value: 55.81850000000001 - type: nauc_precision_at_5_max value: 71.5097 - type: nauc_precision_at_5_std value: 34.341899999999995 - type: nauc_precision_at_5_diff1 value: 63.604000000000006 - type: nauc_precision_at_10_max value: 78.1568 - type: nauc_precision_at_10_std value: 53.016600000000004 - type: nauc_precision_at_10_diff1 value: 65.779 - type: nauc_precision_at_20_max value: 81.5145 - type: nauc_precision_at_20_std value: 72.038 - type: nauc_precision_at_20_diff1 value: 69.7603 - type: nauc_precision_at_100_max value: 89.0587 - type: nauc_precision_at_100_std value: 91.89070000000001 - type: nauc_precision_at_100_diff1 value: 75.1088 - type: nauc_precision_at_1000_max value: .nan - type: nauc_precision_at_1000_std value: .nan - type: nauc_precision_at_1000_diff1 value: .nan - type: nauc_mrr_at_1_max value: 47.0017 - type: nauc_mrr_at_1_std value: 7.702000000000001 - type: nauc_mrr_at_1_diff1 value: 65.5265 - type: nauc_mrr_at_3_max value: 51.3811 - type: nauc_mrr_at_3_std value: 12.6201 - type: nauc_mrr_at_3_diff1 value: 61.781299999999995 - type: nauc_mrr_at_5_max value: 52.788599999999995 - type: nauc_mrr_at_5_std value: 13.9926 - type: nauc_mrr_at_5_diff1 value: 63.155300000000004 - type: nauc_mrr_at_10_max value: 52.630900000000004 - type: nauc_mrr_at_10_std value: 14.5419 - type: nauc_mrr_at_10_diff1 value: 63.299499999999995 - type: nauc_mrr_at_20_max value: 52.4779 - type: nauc_mrr_at_20_std value: 14.615300000000001 - type: nauc_mrr_at_20_diff1 value: 63.360099999999996 - type: nauc_mrr_at_100_max value: 52.434999999999995 - type: nauc_mrr_at_100_std value: 14.5613 - type: nauc_mrr_at_100_diff1 value: 63.362700000000004 - type: nauc_mrr_at_1000_max value: 52.412000000000006 - type: nauc_mrr_at_1000_std value: 14.5121 - type: nauc_mrr_at_1000_diff1 value: 63.361000000000004 - type: main_score value: 73.455 - task: type: Retrieval dataset: name: MTEB CodeTransOceanContest (default) type: CoIR-Retrieval/codetrans-contest config: default split: test revision: 20da4eb20a4b17300c0986ee148c90867a7f2a4d metrics: - type: ndcg_at_1 value: 46.154 - type: ndcg_at_3 value: 52.019999999999996 - type: ndcg_at_5 value: 53.929 - type: ndcg_at_10 value: 57.475 - type: ndcg_at_20 value: 59.861 - type: ndcg_at_100 value: 61.577000000000005 - type: ndcg_at_1000 value: 62.755 - type: map_at_1 value: 46.154 - type: map_at_3 value: 50.602999999999994 - type: map_at_5 value: 51.68899999999999 - type: map_at_10 value: 53.174 - type: map_at_20 value: 53.818 - type: map_at_100 value: 54.041 - type: map_at_1000 value: 54.081 - type: recall_at_1 value: 46.154 - type: recall_at_3 value: 56.108999999999995 - type: recall_at_5 value: 60.633 - type: recall_at_10 value: 71.493 - type: recall_at_20 value: 80.99499999999999 - type: recall_at_100 value: 90.498 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 46.154 - type: precision_at_3 value: 18.703 - type: precision_at_5 value: 12.127 - type: precision_at_10 value: 7.149 - type: precision_at_20 value: 4.05 - type: precision_at_100 value: 0.905 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 46.153800000000004 - type: mrr_at_3 value: 50.6033 - type: mrr_at_5 value: 51.6893 - type: mrr_at_10 value: 53.173899999999996 - type: mrr_at_20 value: 53.8181 - type: mrr_at_100 value: 54.0405 - type: mrr_at_1000 value: 54.081199999999995 - type: nauc_ndcg_at_1_max value: 59.032 - type: nauc_ndcg_at_1_std value: 8.2815 - type: nauc_ndcg_at_1_diff1 value: 80.5428 - type: nauc_ndcg_at_3_max value: 55.47410000000001 - type: nauc_ndcg_at_3_std value: 4.4284 - type: nauc_ndcg_at_3_diff1 value: 77.2405 - type: nauc_ndcg_at_5_max value: 54.6337 - type: nauc_ndcg_at_5_std value: 5.3048 - type: nauc_ndcg_at_5_diff1 value: 76.5969 - type: nauc_ndcg_at_10_max value: 51.8584 - type: nauc_ndcg_at_10_std value: 3.5628 - type: nauc_ndcg_at_10_diff1 value: 74.6966 - type: nauc_ndcg_at_20_max value: 54.3478 - type: nauc_ndcg_at_20_std value: 4.3697 - type: nauc_ndcg_at_20_diff1 value: 75.6032 - type: nauc_ndcg_at_100_max value: 55.488400000000006 - type: nauc_ndcg_at_100_std value: 6.101 - type: nauc_ndcg_at_100_diff1 value: 76.0249 - type: nauc_ndcg_at_1000_max value: 55.1091 - type: nauc_ndcg_at_1000_std value: 5.5951 - type: nauc_ndcg_at_1000_diff1 value: 76.3907 - type: nauc_map_at_1_max value: 59.032 - type: nauc_map_at_1_std value: 8.2815 - type: nauc_map_at_1_diff1 value: 80.5428 - type: nauc_map_at_3_max value: 56.261700000000005 - type: nauc_map_at_3_std value: 5.3123 - type: nauc_map_at_3_diff1 value: 77.823 - type: nauc_map_at_5_max value: 55.7926 - type: nauc_map_at_5_std value: 5.8055 - type: nauc_map_at_5_diff1 value: 77.4779 - type: nauc_map_at_10_max value: 54.77459999999999 - type: nauc_map_at_10_std value: 5.1733 - type: nauc_map_at_10_diff1 value: 76.79249999999999 - type: nauc_map_at_20_max value: 55.4426 - type: nauc_map_at_20_std value: 5.4346 - type: nauc_map_at_20_diff1 value: 77.0378 - type: nauc_map_at_100_max value: 55.6049 - type: nauc_map_at_100_std value: 5.7131 - type: nauc_map_at_100_diff1 value: 77.0756 - type: nauc_map_at_1000_max value: 55.5915 - type: nauc_map_at_1000_std value: 5.7007 - type: nauc_map_at_1000_diff1 value: 77.0939 - type: nauc_recall_at_1_max value: 59.032 - type: nauc_recall_at_1_std value: 8.2815 - type: nauc_recall_at_1_diff1 value: 80.5428 - type: nauc_recall_at_3_max value: 53.1398 - type: nauc_recall_at_3_std value: 1.7934999999999999 - type: nauc_recall_at_3_diff1 value: 75.5862 - type: nauc_recall_at_5_max value: 50.9304 - type: nauc_recall_at_5_std value: 3.8924 - type: nauc_recall_at_5_diff1 value: 73.8369 - type: nauc_recall_at_10_max value: 38.9905 - type: nauc_recall_at_10_std value: -3.4564999999999997 - type: nauc_recall_at_10_diff1 value: 65.5567 - type: nauc_recall_at_20_max value: 50.0429 - type: nauc_recall_at_20_std value: -1.4551 - type: nauc_recall_at_20_diff1 value: 67.9871 - type: nauc_recall_at_100_max value: 63.44030000000001 - type: nauc_recall_at_100_std value: 17.8876 - type: nauc_recall_at_100_diff1 value: 68.9388 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: 59.032 - type: nauc_precision_at_1_std value: 8.2815 - type: nauc_precision_at_1_diff1 value: 80.5428 - type: nauc_precision_at_3_max value: 53.1398 - type: nauc_precision_at_3_std value: 1.7934999999999999 - type: nauc_precision_at_3_diff1 value: 75.5862 - type: nauc_precision_at_5_max value: 50.9304 - type: nauc_precision_at_5_std value: 3.8924 - type: nauc_precision_at_5_diff1 value: 73.8369 - type: nauc_precision_at_10_max value: 38.9905 - type: nauc_precision_at_10_std value: -3.4564999999999997 - type: nauc_precision_at_10_diff1 value: 65.5567 - type: nauc_precision_at_20_max value: 50.0429 - type: nauc_precision_at_20_std value: -1.4551 - type: nauc_precision_at_20_diff1 value: 67.9871 - type: nauc_precision_at_100_max value: 63.44030000000001 - type: nauc_precision_at_100_std value: 17.8876 - type: nauc_precision_at_100_diff1 value: 68.9388 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_mrr_at_1_max value: 59.032 - type: nauc_mrr_at_1_std value: 8.2815 - type: nauc_mrr_at_1_diff1 value: 80.5428 - type: nauc_mrr_at_3_max value: 56.261700000000005 - type: nauc_mrr_at_3_std value: 5.3123 - type: nauc_mrr_at_3_diff1 value: 77.823 - type: nauc_mrr_at_5_max value: 55.7926 - type: nauc_mrr_at_5_std value: 5.8055 - type: nauc_mrr_at_5_diff1 value: 77.4779 - type: nauc_mrr_at_10_max value: 54.77459999999999 - type: nauc_mrr_at_10_std value: 5.1733 - type: nauc_mrr_at_10_diff1 value: 76.79249999999999 - type: nauc_mrr_at_20_max value: 55.4426 - type: nauc_mrr_at_20_std value: 5.4346 - type: nauc_mrr_at_20_diff1 value: 77.0378 - type: nauc_mrr_at_100_max value: 55.6049 - type: nauc_mrr_at_100_std value: 5.7131 - type: nauc_mrr_at_100_diff1 value: 77.0756 - type: nauc_mrr_at_1000_max value: 55.5915 - type: nauc_mrr_at_1000_std value: 5.7007 - type: nauc_mrr_at_1000_diff1 value: 77.0939 - type: main_score value: 57.475 - task: type: Retrieval dataset: name: MTEB CodeTransOceanDL (default) type: CoIR-Retrieval/codetrans-dl config: default split: test revision: 281562cb8a1265ab5c0824bfa6ddcd9b0a15618f metrics: - type: ndcg_at_1 value: 8.889 - type: ndcg_at_3 value: 10.700999999999999 - type: ndcg_at_5 value: 16.082 - type: ndcg_at_10 value: 26.888 - type: ndcg_at_20 value: 35.608000000000004 - type: ndcg_at_100 value: 36.459 - type: ndcg_at_1000 value: 36.775999999999996 - type: map_at_1 value: 8.889 - type: map_at_3 value: 10.184999999999999 - type: map_at_5 value: 13.241 - type: map_at_10 value: 17.502000000000002 - type: map_at_20 value: 19.978 - type: map_at_100 value: 20.108 - type: map_at_1000 value: 20.125 - type: recall_at_1 value: 8.889 - type: recall_at_3 value: 12.222 - type: recall_at_5 value: 25.0 - type: recall_at_10 value: 59.443999999999996 - type: recall_at_20 value: 93.333 - type: recall_at_100 value: 97.77799999999999 - type: recall_at_1000 value: 100.0 - type: precision_at_1 value: 8.889 - type: precision_at_3 value: 4.074 - type: precision_at_5 value: 5.0 - type: precision_at_10 value: 5.944 - type: precision_at_20 value: 4.667000000000001 - type: precision_at_100 value: 0.9780000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 3.8889 - type: mrr_at_3 value: 8.9815 - type: mrr_at_5 value: 10.2593 - type: mrr_at_10 value: 15.263399999999999 - type: mrr_at_20 value: 17.711 - type: mrr_at_100 value: 17.8421 - type: mrr_at_1000 value: 17.8596 - type: nauc_ndcg_at_1_max value: -40.8791 - type: nauc_ndcg_at_1_std value: -22.7629 - type: nauc_ndcg_at_1_diff1 value: -23.105 - type: nauc_ndcg_at_3_max value: -43.187599999999996 - type: nauc_ndcg_at_3_std value: -26.9994 - type: nauc_ndcg_at_3_diff1 value: -15.4181 - type: nauc_ndcg_at_5_max value: -37.2549 - type: nauc_ndcg_at_5_std value: -24.4115 - type: nauc_ndcg_at_5_diff1 value: -5.7322999999999995 - type: nauc_ndcg_at_10_max value: -36.3471 - type: nauc_ndcg_at_10_std value: -22.8065 - type: nauc_ndcg_at_10_diff1 value: -5.3767000000000005 - type: nauc_ndcg_at_20_max value: -35.829100000000004 - type: nauc_ndcg_at_20_std value: -20.787300000000002 - type: nauc_ndcg_at_20_diff1 value: -9.6038 - type: nauc_ndcg_at_100_max value: -36.5805 - type: nauc_ndcg_at_100_std value: -20.1283 - type: nauc_ndcg_at_100_diff1 value: -8.9448 - type: nauc_ndcg_at_1000_max value: -38.1158 - type: nauc_ndcg_at_1000_std value: -22.2744 - type: nauc_ndcg_at_1000_diff1 value: -9.8704 - type: nauc_map_at_1_max value: -40.8791 - type: nauc_map_at_1_std value: -22.7629 - type: nauc_map_at_1_diff1 value: -23.105 - type: nauc_map_at_3_max value: -42.559200000000004 - type: nauc_map_at_3_std value: -25.8594 - type: nauc_map_at_3_diff1 value: -17.2362 - type: nauc_map_at_5_max value: -38.595800000000004 - type: nauc_map_at_5_std value: -24.1339 - type: nauc_map_at_5_diff1 value: -10.4452 - type: nauc_map_at_10_max value: -38.2389 - type: nauc_map_at_10_std value: -23.453599999999998 - type: nauc_map_at_10_diff1 value: -10.2748 - type: nauc_map_at_20_max value: -38.8856 - type: nauc_map_at_20_std value: -23.095499999999998 - type: nauc_map_at_20_diff1 value: -11.695500000000001 - type: nauc_map_at_100_max value: -38.9696 - type: nauc_map_at_100_std value: -23.0057 - type: nauc_map_at_100_diff1 value: -11.635900000000001 - type: nauc_map_at_1000_max value: -39.035399999999996 - type: nauc_map_at_1000_std value: -23.1075 - type: nauc_map_at_1000_diff1 value: -11.6855 - type: nauc_recall_at_1_max value: -40.8791 - type: nauc_recall_at_1_std value: -22.7629 - type: nauc_recall_at_1_diff1 value: -23.105 - type: nauc_recall_at_3_max value: -44.8047 - type: nauc_recall_at_3_std value: -29.9296 - type: nauc_recall_at_3_diff1 value: -10.8169 - type: nauc_recall_at_5_max value: -34.5699 - type: nauc_recall_at_5_std value: -24.9544 - type: nauc_recall_at_5_diff1 value: 3.4269000000000003 - type: nauc_recall_at_10_max value: -32.149699999999996 - type: nauc_recall_at_10_std value: -21.0142 - type: nauc_recall_at_10_diff1 value: 4.358 - type: nauc_recall_at_20_max value: 0.7547 - type: nauc_recall_at_20_std value: 7.1739999999999995 - type: nauc_recall_at_20_diff1 value: -3.2252 - type: nauc_recall_at_100_max value: 41.4332 - type: nauc_recall_at_100_std value: 86.1111 - type: nauc_recall_at_100_diff1 value: 35.7143 - type: nauc_recall_at_1000_max value: .nan - type: nauc_recall_at_1000_std value: .nan - type: nauc_recall_at_1000_diff1 value: .nan - type: nauc_precision_at_1_max value: -40.8791 - type: nauc_precision_at_1_std value: -22.7629 - type: nauc_precision_at_1_diff1 value: -23.105 - type: nauc_precision_at_3_max value: -44.8047 - type: nauc_precision_at_3_std value: -29.9296 - type: nauc_precision_at_3_diff1 value: -10.8169 - type: nauc_precision_at_5_max value: -34.5699 - type: nauc_precision_at_5_std value: -24.9544 - type: nauc_precision_at_5_diff1 value: 3.4269000000000003 - type: nauc_precision_at_10_max value: -32.149699999999996 - type: nauc_precision_at_10_std value: -21.0142 - type: nauc_precision_at_10_diff1 value: 4.358 - type: nauc_precision_at_20_max value: 0.7547 - type: nauc_precision_at_20_std value: 7.1739999999999995 - type: nauc_precision_at_20_diff1 value: -3.2252 - type: nauc_precision_at_100_max value: 41.4332 - type: nauc_precision_at_100_std value: 86.1111 - type: nauc_precision_at_100_diff1 value: 35.7143 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 100.0 - type: nauc_precision_at_1000_diff1 value: 100.0 - type: nauc_mrr_at_1_max value: -42.7345 - type: nauc_mrr_at_1_std value: -35.9194 - type: nauc_mrr_at_1_diff1 value: -3.8369 - type: nauc_mrr_at_3_max value: -35.497099999999996 - type: nauc_mrr_at_3_std value: -28.1283 - type: nauc_mrr_at_3_diff1 value: 22.5336 - type: nauc_mrr_at_5_max value: -34.9895 - type: nauc_mrr_at_5_std value: -26.9499 - type: nauc_mrr_at_5_diff1 value: 16.9652 - type: nauc_mrr_at_10_max value: -36.7778 - type: nauc_mrr_at_10_std value: -28.069 - type: nauc_mrr_at_10_diff1 value: 18.806700000000003 - type: nauc_mrr_at_20_max value: -36.2726 - type: nauc_mrr_at_20_std value: -26.359500000000004 - type: nauc_mrr_at_20_diff1 value: 18.1655 - type: nauc_mrr_at_100_max value: -36.361 - type: nauc_mrr_at_100_std value: -26.280900000000003 - type: nauc_mrr_at_100_diff1 value: 18.5228 - type: nauc_mrr_at_1000_max value: -36.4424 - type: nauc_mrr_at_1000_std value: -26.415699999999998 - type: nauc_mrr_at_1000_diff1 value: 18.496499999999997 - type: main_score value: 26.888 - task: type: Retrieval dataset: name: MTEB CosQA (default) type: CoIR-Retrieval/cosqa config: default split: test revision: bc5efb7e9d437246ce393ed19d772e08e4a79535 metrics: - type: ndcg_at_1 value: 15.4 - type: ndcg_at_3 value: 23.59 - type: ndcg_at_5 value: 29.779 - type: ndcg_at_10 value: 35.449999999999996 - type: ndcg_at_20 value: 38.309 - type: ndcg_at_100 value: 41.980000000000004 - type: ndcg_at_1000 value: 42.917 - type: map_at_1 value: 15.4 - type: map_at_3 value: 21.4 - type: map_at_5 value: 24.84 - type: map_at_10 value: 27.245 - type: map_at_20 value: 28.043000000000003 - type: map_at_100 value: 28.592000000000002 - type: map_at_1000 value: 28.63 - type: recall_at_1 value: 15.4 - type: recall_at_3 value: 30.0 - type: recall_at_5 value: 45.0 - type: recall_at_10 value: 62.2 - type: recall_at_20 value: 73.4 - type: recall_at_100 value: 92.60000000000001 - type: recall_at_1000 value: 99.8 - type: precision_at_1 value: 15.4 - type: precision_at_3 value: 10.0 - type: precision_at_5 value: 9.0 - type: precision_at_10 value: 6.22 - type: precision_at_20 value: 3.6700000000000004 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 13.600000000000001 - type: mrr_at_3 value: 19.666700000000002 - type: mrr_at_5 value: 22.0867 - type: mrr_at_10 value: 25.020799999999998 - type: mrr_at_20 value: 25.8896 - type: mrr_at_100 value: 26.434400000000004 - type: mrr_at_1000 value: 26.4729 - type: nauc_ndcg_at_1_max value: 7.9282 - type: nauc_ndcg_at_1_std value: -14.053299999999998 - type: nauc_ndcg_at_1_diff1 value: 36.687799999999996 - type: nauc_ndcg_at_3_max value: 11.969899999999999 - type: nauc_ndcg_at_3_std value: -13.7404 - type: nauc_ndcg_at_3_diff1 value: 22.2386 - type: nauc_ndcg_at_5_max value: 13.4812 - type: nauc_ndcg_at_5_std value: -13.2079 - type: nauc_ndcg_at_5_diff1 value: 15.8384 - type: nauc_ndcg_at_10_max value: 12.061399999999999 - type: nauc_ndcg_at_10_std value: -15.1337 - type: nauc_ndcg_at_10_diff1 value: 18.804399999999998 - type: nauc_ndcg_at_20_max value: 14.027000000000001 - type: nauc_ndcg_at_20_std value: -13.123899999999999 - type: nauc_ndcg_at_20_diff1 value: 18.546499999999998 - type: nauc_ndcg_at_100_max value: 15.4228 - type: nauc_ndcg_at_100_std value: -9.7982 - type: nauc_ndcg_at_100_diff1 value: 20.637900000000002 - type: nauc_ndcg_at_1000_max value: 13.3878 - type: nauc_ndcg_at_1000_std value: -12.3766 - type: nauc_ndcg_at_1000_diff1 value: 21.2979 - type: nauc_map_at_1_max value: 7.9282 - type: nauc_map_at_1_std value: -14.053299999999998 - type: nauc_map_at_1_diff1 value: 36.687799999999996 - type: nauc_map_at_3_max value: 11.2376 - type: nauc_map_at_3_std value: -13.882800000000001 - type: nauc_map_at_3_diff1 value: 25.4638 - type: nauc_map_at_5_max value: 12.0973 - type: nauc_map_at_5_std value: -13.581399999999999 - type: nauc_map_at_5_diff1 value: 21.6642 - type: nauc_map_at_10_max value: 11.4818 - type: nauc_map_at_10_std value: -14.3841 - type: nauc_map_at_10_diff1 value: 23.0484 - type: nauc_map_at_20_max value: 11.9802 - type: nauc_map_at_20_std value: -13.8687 - type: nauc_map_at_20_diff1 value: 23.0349 - type: nauc_map_at_100_max value: 12.112 - type: nauc_map_at_100_std value: -13.423099999999998 - type: nauc_map_at_100_diff1 value: 23.385 - type: nauc_map_at_1000_max value: 12.034 - type: nauc_map_at_1000_std value: -13.5156 - type: nauc_map_at_1000_diff1 value: 23.4084 - type: nauc_recall_at_1_max value: 7.9282 - type: nauc_recall_at_1_std value: -14.053299999999998 - type: nauc_recall_at_1_diff1 value: 36.687799999999996 - type: nauc_recall_at_3_max value: 13.6773 - type: nauc_recall_at_3_std value: -13.376299999999999 - type: nauc_recall_at_3_diff1 value: 14.4918 - type: nauc_recall_at_5_max value: 16.8852 - type: nauc_recall_at_5_std value: -12.237499999999999 - type: nauc_recall_at_5_diff1 value: 1.4449 - type: nauc_recall_at_10_max value: 13.234499999999999 - type: nauc_recall_at_10_std value: -17.8241 - type: nauc_recall_at_10_diff1 value: 7.6404 - type: nauc_recall_at_20_max value: 22.708000000000002 - type: nauc_recall_at_20_std value: -9.111600000000001 - type: nauc_recall_at_20_diff1 value: 3.4109 - type: nauc_recall_at_100_max value: 66.1165 - type: nauc_recall_at_100_std value: 55.2477 - type: nauc_recall_at_100_diff1 value: 5.7612 - type: nauc_recall_at_1000_max value: 100.0 - type: nauc_recall_at_1000_std value: 86.9281 - type: nauc_recall_at_1000_diff1 value: 72.2222 - type: nauc_precision_at_1_max value: 7.9282 - type: nauc_precision_at_1_std value: -14.053299999999998 - type: nauc_precision_at_1_diff1 value: 36.687799999999996 - type: nauc_precision_at_3_max value: 13.6773 - type: nauc_precision_at_3_std value: -13.376299999999999 - type: nauc_precision_at_3_diff1 value: 14.4918 - type: nauc_precision_at_5_max value: 16.8852 - type: nauc_precision_at_5_std value: -12.237499999999999 - type: nauc_precision_at_5_diff1 value: 1.4449 - type: nauc_precision_at_10_max value: 13.234499999999999 - type: nauc_precision_at_10_std value: -17.8241 - type: nauc_precision_at_10_diff1 value: 7.6404 - type: nauc_precision_at_20_max value: 22.708000000000002 - type: nauc_precision_at_20_std value: -9.111600000000001 - type: nauc_precision_at_20_diff1 value: 3.4109 - type: nauc_precision_at_100_max value: 66.1165 - type: nauc_precision_at_100_std value: 55.2477 - type: nauc_precision_at_100_diff1 value: 5.7612 - type: nauc_precision_at_1000_max value: 100.0 - type: nauc_precision_at_1000_std value: 86.9281 - type: nauc_precision_at_1000_diff1 value: 72.2222 - type: nauc_mrr_at_1_max value: 13.238199999999999 - type: nauc_mrr_at_1_std value: -21.1942 - type: nauc_mrr_at_1_diff1 value: 47.1481 - type: nauc_mrr_at_3_max value: 13.370999999999999 - type: nauc_mrr_at_3_std value: -18.0171 - type: nauc_mrr_at_3_diff1 value: 31.3232 - type: nauc_mrr_at_5_max value: 12.646099999999999 - type: nauc_mrr_at_5_std value: -18.5601 - type: nauc_mrr_at_5_diff1 value: 28.8561 - type: nauc_mrr_at_10_max value: 13.1101 - type: nauc_mrr_at_10_std value: -18.915000000000003 - type: nauc_mrr_at_10_diff1 value: 28.9512 - type: nauc_mrr_at_20_max value: 13.0191 - type: nauc_mrr_at_20_std value: -18.501 - type: nauc_mrr_at_20_diff1 value: 29.102299999999996 - type: nauc_mrr_at_100_max value: 13.475699999999998 - type: nauc_mrr_at_100_std value: -17.9907 - type: nauc_mrr_at_100_diff1 value: 29.549999999999997 - type: nauc_mrr_at_1000_max value: 13.3963 - type: nauc_mrr_at_1000_std value: -18.093999999999998 - type: nauc_mrr_at_1000_diff1 value: 29.583 - type: main_score value: 35.449999999999996 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: ndcg_at_1 value: 51.37500000000001 - type: ndcg_at_3 value: 41.275 - type: ndcg_at_5 value: 38.297 - type: ndcg_at_10 value: 35.96 - type: ndcg_at_20 value: 35.117 - type: ndcg_at_100 value: 39.878 - type: ndcg_at_1000 value: 47.931000000000004 - type: map_at_1 value: 8.651 - type: map_at_3 value: 13.51 - type: map_at_5 value: 15.468000000000002 - type: map_at_10 value: 17.628 - type: map_at_20 value: 19.786 - type: map_at_100 value: 23.354 - type: map_at_1000 value: 24.826 - type: recall_at_1 value: 8.651 - type: recall_at_3 value: 14.847 - type: recall_at_5 value: 18.04 - type: recall_at_10 value: 22.416 - type: recall_at_20 value: 28.136 - type: recall_at_100 value: 46.381 - type: recall_at_1000 value: 71.557 - type: precision_at_1 value: 64.5 - type: precision_at_3 value: 44.417 - type: precision_at_5 value: 36.6 - type: precision_at_10 value: 27.450000000000003 - type: precision_at_20 value: 19.811999999999998 - type: precision_at_100 value: 8.405 - type: precision_at_1000 value: 1.923 - type: mrr_at_1 value: 64.5 - type: mrr_at_3 value: 70.25 - type: mrr_at_5 value: 71.275 - type: mrr_at_10 value: 71.9889 - type: mrr_at_20 value: 72.207 - type: mrr_at_100 value: 72.33239999999999 - type: mrr_at_1000 value: 72.3461 - type: nauc_ndcg_at_1_max value: 31.932100000000002 - type: nauc_ndcg_at_1_std value: 10.2841 - type: nauc_ndcg_at_1_diff1 value: 36.07 - type: nauc_ndcg_at_3_max value: 29.2531 - type: nauc_ndcg_at_3_std value: 11.178799999999999 - type: nauc_ndcg_at_3_diff1 value: 25.764799999999997 - type: nauc_ndcg_at_5_max value: 27.1826 - type: nauc_ndcg_at_5_std value: 12.5 - type: nauc_ndcg_at_5_diff1 value: 24.9511 - type: nauc_ndcg_at_10_max value: 24.1388 - type: nauc_ndcg_at_10_std value: 11.350200000000001 - type: nauc_ndcg_at_10_diff1 value: 23.7319 - type: nauc_ndcg_at_20_max value: 19.1396 - type: nauc_ndcg_at_20_std value: 9.464699999999999 - type: nauc_ndcg_at_20_diff1 value: 20.9192 - type: nauc_ndcg_at_100_max value: 20.1158 - type: nauc_ndcg_at_100_std value: 13.2815 - type: nauc_ndcg_at_100_diff1 value: 21.221400000000003 - type: nauc_ndcg_at_1000_max value: 26.648899999999998 - type: nauc_ndcg_at_1000_std value: 22.5347 - type: nauc_ndcg_at_1000_diff1 value: 19.6168 - type: nauc_map_at_1_max value: -4.3177 - type: nauc_map_at_1_std value: -24.5562 - type: nauc_map_at_1_diff1 value: 29.4423 - type: nauc_map_at_3_max value: -3.3966000000000003 - type: nauc_map_at_3_std value: -21.9222 - type: nauc_map_at_3_diff1 value: 21.2481 - type: nauc_map_at_5_max value: -1.1166 - type: nauc_map_at_5_std value: -17.1077 - type: nauc_map_at_5_diff1 value: 19.9608 - type: nauc_map_at_10_max value: 2.8669000000000002 - type: nauc_map_at_10_std value: -11.6119 - type: nauc_map_at_10_diff1 value: 19.6247 - type: nauc_map_at_20_max value: 6.4855 - type: nauc_map_at_20_std value: -4.1277 - type: nauc_map_at_20_diff1 value: 18.1824 - type: nauc_map_at_100_max value: 12.971499999999999 - type: nauc_map_at_100_std value: 7.603400000000001 - type: nauc_map_at_100_diff1 value: 17.5644 - type: nauc_map_at_1000_max value: 15.277299999999999 - type: nauc_map_at_1000_std value: 10.5578 - type: nauc_map_at_1000_diff1 value: 17.1155 - type: nauc_recall_at_1_max value: -4.3177 - type: nauc_recall_at_1_std value: -24.5562 - type: nauc_recall_at_1_diff1 value: 29.4423 - type: nauc_recall_at_3_max value: -6.2376000000000005 - type: nauc_recall_at_3_std value: -23.4233 - type: nauc_recall_at_3_diff1 value: 17.329800000000002 - type: nauc_recall_at_5_max value: -3.4825000000000004 - type: nauc_recall_at_5_std value: -17.4895 - type: nauc_recall_at_5_diff1 value: 16.2379 - type: nauc_recall_at_10_max value: 0.9988 - type: nauc_recall_at_10_std value: -11.1992 - type: nauc_recall_at_10_diff1 value: 16.225 - type: nauc_recall_at_20_max value: 4.693300000000001 - type: nauc_recall_at_20_std value: -1.8259999999999998 - type: nauc_recall_at_20_diff1 value: 12.612400000000001 - type: nauc_recall_at_100_max value: 13.420599999999999 - type: nauc_recall_at_100_std value: 14.4476 - type: nauc_recall_at_100_diff1 value: 14.5736 - type: nauc_recall_at_1000_max value: 18.4052 - type: nauc_recall_at_1000_std value: 32.6262 - type: nauc_recall_at_1000_diff1 value: 6.2448 - type: nauc_precision_at_1_max value: 44.2395 - type: nauc_precision_at_1_std value: 16.9766 - type: nauc_precision_at_1_diff1 value: 42.981 - type: nauc_precision_at_3_max value: 37.5078 - type: nauc_precision_at_3_std value: 24.46 - type: nauc_precision_at_3_diff1 value: 16.700799999999997 - type: nauc_precision_at_5_max value: 39.9766 - type: nauc_precision_at_5_std value: 35.1485 - type: nauc_precision_at_5_diff1 value: 13.0716 - type: nauc_precision_at_10_max value: 39.642500000000005 - type: nauc_precision_at_10_std value: 41.8067 - type: nauc_precision_at_10_diff1 value: 8.864700000000001 - type: nauc_precision_at_20_max value: 36.7342 - type: nauc_precision_at_20_std value: 47.144200000000005 - type: nauc_precision_at_20_diff1 value: 3.6226000000000003 - type: nauc_precision_at_100_max value: 35.3062 - type: nauc_precision_at_100_std value: 47.2687 - type: nauc_precision_at_100_diff1 value: 0.0039 - type: nauc_precision_at_1000_max value: 27.387099999999997 - type: nauc_precision_at_1000_std value: 24.4162 - type: nauc_precision_at_1000_diff1 value: -13.5 - type: nauc_mrr_at_1_max value: 44.2395 - type: nauc_mrr_at_1_std value: 16.9766 - type: nauc_mrr_at_1_diff1 value: 42.981 - type: nauc_mrr_at_3_max value: 45.9027 - type: nauc_mrr_at_3_std value: 16.3998 - type: nauc_mrr_at_3_diff1 value: 42.7201 - type: nauc_mrr_at_5_max value: 46.7905 - type: nauc_mrr_at_5_std value: 17.921599999999998 - type: nauc_mrr_at_5_diff1 value: 42.4334 - type: nauc_mrr_at_10_max value: 46.775 - type: nauc_mrr_at_10_std value: 18.282899999999998 - type: nauc_mrr_at_10_diff1 value: 42.4501 - type: nauc_mrr_at_20_max value: 46.671600000000005 - type: nauc_mrr_at_20_std value: 18.064700000000002 - type: nauc_mrr_at_20_diff1 value: 42.4331 - type: nauc_mrr_at_100_max value: 46.7118 - type: nauc_mrr_at_100_std value: 18.2135 - type: nauc_mrr_at_100_diff1 value: 42.4809 - type: nauc_mrr_at_1000_max value: 46.6966 - type: nauc_mrr_at_1000_std value: 18.185200000000002 - type: nauc_mrr_at_1000_diff1 value: 42.4844 - type: main_score value: 35.96 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 38.795 - type: f1 value: 35.2399 - type: f1_weighted value: 40.7945 - type: main_score value: 38.795 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: ndcg_at_1 value: 79.08800000000001 - type: ndcg_at_3 value: 83.943 - type: ndcg_at_5 value: 84.878 - type: ndcg_at_10 value: 85.528 - type: ndcg_at_20 value: 85.842 - type: ndcg_at_100 value: 86.134 - type: ndcg_at_1000 value: 86.367 - type: map_at_1 value: 73.211 - type: map_at_3 value: 80.5 - type: map_at_5 value: 81.134 - type: map_at_10 value: 81.463 - type: map_at_20 value: 81.566 - type: map_at_100 value: 81.622 - type: map_at_1000 value: 81.634 - type: recall_at_1 value: 73.211 - type: recall_at_3 value: 88.32799999999999 - type: recall_at_5 value: 90.821 - type: recall_at_10 value: 92.797 - type: recall_at_20 value: 93.932 - type: recall_at_100 value: 95.26299999999999 - type: recall_at_1000 value: 96.738 - type: precision_at_1 value: 79.08800000000001 - type: precision_at_3 value: 31.963 - type: precision_at_5 value: 19.769000000000002 - type: precision_at_10 value: 10.132 - type: precision_at_20 value: 5.149 - type: precision_at_100 value: 1.055 - type: precision_at_1000 value: 0.109 - type: mrr_at_1 value: 79.0879 - type: mrr_at_3 value: 86.1536 - type: mrr_at_5 value: 86.7004 - type: mrr_at_10 value: 86.9425 - type: mrr_at_20 value: 87.00099999999999 - type: mrr_at_100 value: 87.01719999999999 - type: mrr_at_1000 value: 87.01769999999999 - type: nauc_ndcg_at_1_max value: 28.2184 - type: nauc_ndcg_at_1_std value: -20.374200000000002 - type: nauc_ndcg_at_1_diff1 value: 64.4185 - type: nauc_ndcg_at_3_max value: 22.014 - type: nauc_ndcg_at_3_std value: -15.221699999999998 - type: nauc_ndcg_at_3_diff1 value: 47.511700000000005 - type: nauc_ndcg_at_5_max value: 21.381700000000002 - type: nauc_ndcg_at_5_std value: -14.3711 - type: nauc_ndcg_at_5_diff1 value: 46.6271 - type: nauc_ndcg_at_10_max value: 20.4251 - type: nauc_ndcg_at_10_std value: -13.3096 - type: nauc_ndcg_at_10_diff1 value: 46.1205 - type: nauc_ndcg_at_20_max value: 20.686 - type: nauc_ndcg_at_20_std value: -12.6058 - type: nauc_ndcg_at_20_diff1 value: 46.14 - type: nauc_ndcg_at_100_max value: 20.657700000000002 - type: nauc_ndcg_at_100_std value: -12.5531 - type: nauc_ndcg_at_100_diff1 value: 46.3788 - type: nauc_ndcg_at_1000_max value: 21.0177 - type: nauc_ndcg_at_1000_std value: -12.8318 - type: nauc_ndcg_at_1000_diff1 value: 46.8648 - type: nauc_map_at_1_max value: 21.4975 - type: nauc_map_at_1_std value: -14.5207 - type: nauc_map_at_1_diff1 value: 51.53959999999999 - type: nauc_map_at_3_max value: 20.322699999999998 - type: nauc_map_at_3_std value: -13.8986 - type: nauc_map_at_3_diff1 value: 46.3932 - type: nauc_map_at_5_max value: 20.3296 - type: nauc_map_at_5_std value: -13.5416 - type: nauc_map_at_5_diff1 value: 46.1518 - type: nauc_map_at_10_max value: 20.0385 - type: nauc_map_at_10_std value: -13.239999999999998 - type: nauc_map_at_10_diff1 value: 46.061800000000005 - type: nauc_map_at_20_max value: 20.113300000000002 - type: nauc_map_at_20_std value: -13.0931 - type: nauc_map_at_20_diff1 value: 46.091 - type: nauc_map_at_100_max value: 20.1262 - type: nauc_map_at_100_std value: -13.0646 - type: nauc_map_at_100_diff1 value: 46.1321 - type: nauc_map_at_1000_max value: 20.1391 - type: nauc_map_at_1000_std value: -13.069600000000001 - type: nauc_map_at_1000_diff1 value: 46.1501 - type: nauc_recall_at_1_max value: 21.4975 - type: nauc_recall_at_1_std value: -14.5207 - type: nauc_recall_at_1_diff1 value: 51.53959999999999 - type: nauc_recall_at_3_max value: 15.379399999999999 - type: nauc_recall_at_3_std value: -9.9735 - type: nauc_recall_at_3_diff1 value: 30.6769 - type: nauc_recall_at_5_max value: 13.104099999999999 - type: nauc_recall_at_5_std value: -6.2273000000000005 - type: nauc_recall_at_5_diff1 value: 24.4602 - type: nauc_recall_at_10_max value: 6.4093 - type: nauc_recall_at_10_std value: 0.9238 - type: nauc_recall_at_10_diff1 value: 16.2715 - type: nauc_recall_at_20_max value: 5.5285 - type: nauc_recall_at_20_std value: 9.1474 - type: nauc_recall_at_20_diff1 value: 10.8034 - type: nauc_recall_at_100_max value: -0.116 - type: nauc_recall_at_100_std value: 14.4612 - type: nauc_recall_at_100_diff1 value: 4.6372 - type: nauc_recall_at_1000_max value: -1.595 - type: nauc_recall_at_1000_std value: 18.1495 - type: nauc_recall_at_1000_diff1 value: -0.022000000000000002 - type: nauc_precision_at_1_max value: 28.2184 - type: nauc_precision_at_1_std value: -20.374200000000002 - type: nauc_precision_at_1_diff1 value: 64.4185 - type: nauc_precision_at_3_max value: 24.238799999999998 - type: nauc_precision_at_3_std value: -19.7064 - type: nauc_precision_at_3_diff1 value: 37.7498 - type: nauc_precision_at_5_max value: 20.8308 - type: nauc_precision_at_5_std value: -13.6486 - type: nauc_precision_at_5_diff1 value: 23.3404 - type: nauc_precision_at_10_max value: 9.4386 - type: nauc_precision_at_10_std value: -4.8239 - type: nauc_precision_at_10_diff1 value: 6.8594 - type: nauc_precision_at_20_max value: 9.0063 - type: nauc_precision_at_20_std value: 4.0311 - type: nauc_precision_at_20_diff1 value: -2.9298 - type: nauc_precision_at_100_max value: 5.1057 - type: nauc_precision_at_100_std value: 7.3903 - type: nauc_precision_at_100_diff1 value: -8.7148 - type: nauc_precision_at_1000_max value: 6.3359 - type: nauc_precision_at_1000_std value: 3.9797 - type: nauc_precision_at_1000_diff1 value: -8.3131 - type: nauc_mrr_at_1_max value: 28.2184 - type: nauc_mrr_at_1_std value: -20.374200000000002 - type: nauc_mrr_at_1_diff1 value: 64.4185 - type: nauc_mrr_at_3_max value: 29.7481 - type: nauc_mrr_at_3_std value: -21.9924 - type: nauc_mrr_at_3_diff1 value: 62.5737 - type: nauc_mrr_at_5_max value: 29.8062 - type: nauc_mrr_at_5_std value: -22.078 - type: nauc_mrr_at_5_diff1 value: 62.9 - type: nauc_mrr_at_10_max value: 29.641000000000002 - type: nauc_mrr_at_10_std value: -21.6827 - type: nauc_mrr_at_10_diff1 value: 62.944599999999994 - type: nauc_mrr_at_20_max value: 29.6535 - type: nauc_mrr_at_20_std value: -21.520400000000002 - type: nauc_mrr_at_20_diff1 value: 62.9583 - type: nauc_mrr_at_100_max value: 29.622799999999998 - type: nauc_mrr_at_100_std value: -21.5393 - type: nauc_mrr_at_100_diff1 value: 62.9658 - type: nauc_mrr_at_1000_max value: 29.619400000000002 - type: nauc_mrr_at_1000_std value: -21.5417 - type: nauc_mrr_at_1000_diff1 value: 62.96469999999999 - type: main_score value: 85.528 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: ndcg_at_1 value: 35.494 - type: ndcg_at_3 value: 32.305 - type: ndcg_at_5 value: 34.332 - type: ndcg_at_10 value: 36.851 - type: ndcg_at_20 value: 39.31 - type: ndcg_at_100 value: 43.462 - type: ndcg_at_1000 value: 46.766000000000005 - type: map_at_1 value: 18.311 - type: map_at_3 value: 24.778 - type: map_at_5 value: 27.453 - type: map_at_10 value: 29.198 - type: map_at_20 value: 30.118000000000002 - type: map_at_100 value: 30.930000000000003 - type: map_at_1000 value: 31.115 - type: recall_at_1 value: 18.311 - type: recall_at_3 value: 28.823999999999998 - type: recall_at_5 value: 36.178 - type: recall_at_10 value: 43.842 - type: recall_at_20 value: 51.370000000000005 - type: recall_at_100 value: 68.593 - type: recall_at_1000 value: 88.55 - type: precision_at_1 value: 35.494 - type: precision_at_3 value: 21.142 - type: precision_at_5 value: 16.326999999999998 - type: precision_at_10 value: 10.309 - type: precision_at_20 value: 6.211 - type: precision_at_100 value: 1.7069999999999999 - type: precision_at_1000 value: 0.22899999999999998 - type: mrr_at_1 value: 35.4938 - type: mrr_at_3 value: 41.6667 - type: mrr_at_5 value: 43.4182 - type: mrr_at_10 value: 44.4732 - type: mrr_at_20 value: 44.969 - type: mrr_at_100 value: 45.318599999999996 - type: mrr_at_1000 value: 45.3674 - type: nauc_ndcg_at_1_max value: 33.946799999999996 - type: nauc_ndcg_at_1_std value: -5.282 - type: nauc_ndcg_at_1_diff1 value: 47.413 - type: nauc_ndcg_at_3_max value: 30.9073 - type: nauc_ndcg_at_3_std value: -2.2498 - type: nauc_ndcg_at_3_diff1 value: 38.548500000000004 - type: nauc_ndcg_at_5_max value: 30.2537 - type: nauc_ndcg_at_5_std value: -0.9919000000000001 - type: nauc_ndcg_at_5_diff1 value: 37.988499999999995 - type: nauc_ndcg_at_10_max value: 30.5224 - type: nauc_ndcg_at_10_std value: 0.0762 - type: nauc_ndcg_at_10_diff1 value: 38.2531 - type: nauc_ndcg_at_20_max value: 32.173 - type: nauc_ndcg_at_20_std value: 3.3266999999999998 - type: nauc_ndcg_at_20_diff1 value: 37.5071 - type: nauc_ndcg_at_100_max value: 33.551700000000004 - type: nauc_ndcg_at_100_std value: 5.8902 - type: nauc_ndcg_at_100_diff1 value: 37.3363 - type: nauc_ndcg_at_1000_max value: 34.1671 - type: nauc_ndcg_at_1000_std value: 5.4682 - type: nauc_ndcg_at_1000_diff1 value: 37.5779 - type: nauc_map_at_1_max value: 20.0425 - type: nauc_map_at_1_std value: -7.41 - type: nauc_map_at_1_diff1 value: 40.725699999999996 - type: nauc_map_at_3_max value: 25.380799999999997 - type: nauc_map_at_3_std value: -4.5524000000000004 - type: nauc_map_at_3_diff1 value: 38.960699999999996 - type: nauc_map_at_5_max value: 27.208900000000003 - type: nauc_map_at_5_std value: -3.034 - type: nauc_map_at_5_diff1 value: 38.475500000000004 - type: nauc_map_at_10_max value: 28.6066 - type: nauc_map_at_10_std value: -2.1042 - type: nauc_map_at_10_diff1 value: 38.4411 - type: nauc_map_at_20_max value: 29.3931 - type: nauc_map_at_20_std value: -0.8289 - type: nauc_map_at_20_diff1 value: 38.137 - type: nauc_map_at_100_max value: 29.8041 - type: nauc_map_at_100_std value: -0.1992 - type: nauc_map_at_100_diff1 value: 38.0546 - type: nauc_map_at_1000_max value: 29.886400000000002 - type: nauc_map_at_1000_std value: -0.1638 - type: nauc_map_at_1000_diff1 value: 38.0646 - type: nauc_recall_at_1_max value: 20.0425 - type: nauc_recall_at_1_std value: -7.41 - type: nauc_recall_at_1_diff1 value: 40.725699999999996 - type: nauc_recall_at_3_max value: 20.8038 - type: nauc_recall_at_3_std value: -4.1075 - type: nauc_recall_at_3_diff1 value: 33.0009 - type: nauc_recall_at_5_max value: 23.1816 - type: nauc_recall_at_5_std value: 0.2681 - type: nauc_recall_at_5_diff1 value: 30.1663 - type: nauc_recall_at_10_max value: 23.754 - type: nauc_recall_at_10_std value: 2.4185000000000003 - type: nauc_recall_at_10_diff1 value: 28.475499999999997 - type: nauc_recall_at_20_max value: 27.711599999999997 - type: nauc_recall_at_20_std value: 12.509700000000002 - type: nauc_recall_at_20_diff1 value: 25.172299999999996 - type: nauc_recall_at_100_max value: 29.3806 - type: nauc_recall_at_100_std value: 25.1963 - type: nauc_recall_at_100_diff1 value: 21.849 - type: nauc_recall_at_1000_max value: 34.1492 - type: nauc_recall_at_1000_std value: 40.4872 - type: nauc_recall_at_1000_diff1 value: 17.0167 - type: nauc_precision_at_1_max value: 33.946799999999996 - type: nauc_precision_at_1_std value: -5.282 - type: nauc_precision_at_1_diff1 value: 47.413 - type: nauc_precision_at_3_max value: 36.6837 - type: nauc_precision_at_3_std value: 3.7282 - type: nauc_precision_at_3_diff1 value: 31.0152 - type: nauc_precision_at_5_max value: 37.6087 - type: nauc_precision_at_5_std value: 7.3439000000000005 - type: nauc_precision_at_5_diff1 value: 27.2321 - type: nauc_precision_at_10_max value: 38.2792 - type: nauc_precision_at_10_std value: 11.3814 - type: nauc_precision_at_10_diff1 value: 22.6494 - type: nauc_precision_at_20_max value: 38.455 - type: nauc_precision_at_20_std value: 17.4053 - type: nauc_precision_at_20_diff1 value: 16.8265 - type: nauc_precision_at_100_max value: 36.203 - type: nauc_precision_at_100_std value: 22.2758 - type: nauc_precision_at_100_diff1 value: 8.3908 - type: nauc_precision_at_1000_max value: 29.599700000000002 - type: nauc_precision_at_1000_std value: 17.186899999999998 - type: nauc_precision_at_1000_diff1 value: 0.0332 - type: nauc_mrr_at_1_max value: 33.946799999999996 - type: nauc_mrr_at_1_std value: -5.282 - type: nauc_mrr_at_1_diff1 value: 47.413 - type: nauc_mrr_at_3_max value: 34.0785 - type: nauc_mrr_at_3_std value: -2.1323000000000003 - type: nauc_mrr_at_3_diff1 value: 43.8661 - type: nauc_mrr_at_5_max value: 34.244 - type: nauc_mrr_at_5_std value: -1.5425 - type: nauc_mrr_at_5_diff1 value: 43.7631 - type: nauc_mrr_at_10_max value: 34.265299999999996 - type: nauc_mrr_at_10_std value: -1.1494 - type: nauc_mrr_at_10_diff1 value: 43.639 - type: nauc_mrr_at_20_max value: 34.5648 - type: nauc_mrr_at_20_std value: -0.6076 - type: nauc_mrr_at_20_diff1 value: 43.431 - type: nauc_mrr_at_100_max value: 34.571400000000004 - type: nauc_mrr_at_100_std value: -0.5074000000000001 - type: nauc_mrr_at_100_diff1 value: 43.4003 - type: nauc_mrr_at_1000_max value: 34.5576 - type: nauc_mrr_at_1000_std value: -0.534 - type: nauc_mrr_at_1000_diff1 value: 43.4086 - type: main_score value: 36.851 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: ndcg_at_1 value: 73.531 - type: ndcg_at_3 value: 58.24700000000001 - type: ndcg_at_5 value: 60.905 - type: ndcg_at_10 value: 62.918 - type: ndcg_at_20 value: 64.297 - type: ndcg_at_100 value: 66.056 - type: ndcg_at_1000 value: 67.554 - type: map_at_1 value: 36.766 - type: map_at_3 value: 50.427 - type: map_at_5 value: 52.449999999999996 - type: map_at_10 value: 53.639 - type: map_at_20 value: 54.17999999999999 - type: map_at_100 value: 54.532000000000004 - type: map_at_1000 value: 54.608000000000004 - type: recall_at_1 value: 36.766 - type: recall_at_3 value: 54.835 - type: recall_at_5 value: 60.080999999999996 - type: recall_at_10 value: 65.098 - type: recall_at_20 value: 69.541 - type: recall_at_100 value: 77.306 - type: recall_at_1000 value: 87.252 - type: precision_at_1 value: 73.531 - type: precision_at_3 value: 36.556 - type: precision_at_5 value: 24.032 - type: precision_at_10 value: 13.020000000000001 - type: precision_at_20 value: 6.954000000000001 - type: precision_at_100 value: 1.546 - type: precision_at_1000 value: 0.17500000000000002 - type: mrr_at_1 value: 73.5314 - type: mrr_at_3 value: 78.9489 - type: mrr_at_5 value: 79.7288 - type: mrr_at_10 value: 80.1036 - type: mrr_at_20 value: 80.2602 - type: mrr_at_100 value: 80.3412 - type: mrr_at_1000 value: 80.3512 - type: nauc_ndcg_at_1_max value: 49.4087 - type: nauc_ndcg_at_1_std value: -8.233 - type: nauc_ndcg_at_1_diff1 value: 69.19380000000001 - type: nauc_ndcg_at_3_max value: 29.407899999999998 - type: nauc_ndcg_at_3_std value: -2.1144 - type: nauc_ndcg_at_3_diff1 value: 27.245599999999996 - type: nauc_ndcg_at_5_max value: 27.483 - type: nauc_ndcg_at_5_std value: -0.7036 - type: nauc_ndcg_at_5_diff1 value: 24.2534 - type: nauc_ndcg_at_10_max value: 26.766499999999997 - type: nauc_ndcg_at_10_std value: 0.5583 - type: nauc_ndcg_at_10_diff1 value: 22.822300000000002 - type: nauc_ndcg_at_20_max value: 26.339800000000004 - type: nauc_ndcg_at_20_std value: 1.3486 - type: nauc_ndcg_at_20_diff1 value: 22.3499 - type: nauc_ndcg_at_100_max value: 26.436799999999998 - type: nauc_ndcg_at_100_std value: 2.5304 - type: nauc_ndcg_at_100_diff1 value: 22.372700000000002 - type: nauc_ndcg_at_1000_max value: 26.9472 - type: nauc_ndcg_at_1000_std value: 2.3277 - type: nauc_ndcg_at_1000_diff1 value: 23.3345 - type: nauc_map_at_1_max value: 49.4087 - type: nauc_map_at_1_std value: -8.233 - type: nauc_map_at_1_diff1 value: 69.19380000000001 - type: nauc_map_at_3_max value: 25.2676 - type: nauc_map_at_3_std value: -1.8659999999999999 - type: nauc_map_at_3_diff1 value: 21.0961 - type: nauc_map_at_5_max value: 24.0651 - type: nauc_map_at_5_std value: -0.8111 - type: nauc_map_at_5_diff1 value: 19.237099999999998 - type: nauc_map_at_10_max value: 23.785 - type: nauc_map_at_10_std value: -0.1037 - type: nauc_map_at_10_diff1 value: 18.5973 - type: nauc_map_at_20_max value: 23.6813 - type: nauc_map_at_20_std value: 0.1708 - type: nauc_map_at_20_diff1 value: 18.499299999999998 - type: nauc_map_at_100_max value: 23.7276 - type: nauc_map_at_100_std value: 0.3879 - type: nauc_map_at_100_diff1 value: 18.5423 - type: nauc_map_at_1000_max value: 23.7501 - type: nauc_map_at_1000_std value: 0.3886 - type: nauc_map_at_1000_diff1 value: 18.578500000000002 - type: nauc_recall_at_1_max value: 49.4087 - type: nauc_recall_at_1_std value: -8.233 - type: nauc_recall_at_1_diff1 value: 69.19380000000001 - type: nauc_recall_at_3_max value: 21.7043 - type: nauc_recall_at_3_std value: 0.24320000000000003 - type: nauc_recall_at_3_diff1 value: 12.102599999999999 - type: nauc_recall_at_5_max value: 16.923 - type: nauc_recall_at_5_std value: 2.9763 - type: nauc_recall_at_5_diff1 value: 5.5262 - type: nauc_recall_at_10_max value: 13.8286 - type: nauc_recall_at_10_std value: 6.1254 - type: nauc_recall_at_10_diff1 value: 0.6326 - type: nauc_recall_at_20_max value: 11.307300000000001 - type: nauc_recall_at_20_std value: 8.9861 - type: nauc_recall_at_20_diff1 value: -2.5909 - type: nauc_recall_at_100_max value: 8.2009 - type: nauc_recall_at_100_std value: 16.051199999999998 - type: nauc_recall_at_100_diff1 value: -7.757699999999999 - type: nauc_recall_at_1000_max value: 5.4062 - type: nauc_recall_at_1000_std value: 20.6122 - type: nauc_recall_at_1000_diff1 value: -11.931700000000001 - type: nauc_precision_at_1_max value: 49.4087 - type: nauc_precision_at_1_std value: -8.233 - type: nauc_precision_at_1_diff1 value: 69.19380000000001 - type: nauc_precision_at_3_max value: 21.7043 - type: nauc_precision_at_3_std value: 0.24320000000000003 - type: nauc_precision_at_3_diff1 value: 12.102599999999999 - type: nauc_precision_at_5_max value: 16.923 - type: nauc_precision_at_5_std value: 2.9763 - type: nauc_precision_at_5_diff1 value: 5.5262 - type: nauc_precision_at_10_max value: 13.8286 - type: nauc_precision_at_10_std value: 6.1254 - type: nauc_precision_at_10_diff1 value: 0.6326 - type: nauc_precision_at_20_max value: 11.307300000000001 - type: nauc_precision_at_20_std value: 8.9861 - type: nauc_precision_at_20_diff1 value: -2.5909 - type: nauc_precision_at_100_max value: 8.2009 - type: nauc_precision_at_100_std value: 16.051199999999998 - type: nauc_precision_at_100_diff1 value: -7.757699999999999 - type: nauc_precision_at_1000_max value: 5.4062 - type: nauc_precision_at_1000_std value: 20.6122 - type: nauc_precision_at_1000_diff1 value: -11.931700000000001 - type: nauc_mrr_at_1_max value: 49.4087 - type: nauc_mrr_at_1_std value: -8.233 - type: nauc_mrr_at_1_diff1 value: 69.19380000000001 - type: nauc_mrr_at_3_max value: 51.004099999999994 - type: nauc_mrr_at_3_std value: -6.4677 - type: nauc_mrr_at_3_diff1 value: 66.1969 - type: nauc_mrr_at_5_max value: 50.880199999999995 - type: nauc_mrr_at_5_std value: -6.3541 - type: nauc_mrr_at_5_diff1 value: 66.0764 - type: nauc_mrr_at_10_max value: 50.924899999999994 - type: nauc_mrr_at_10_std value: -6.2945 - type: nauc_mrr_at_10_diff1 value: 66.2079 - type: nauc_mrr_at_20_max value: 50.907199999999996 - type: nauc_mrr_at_20_std value: -6.253 - type: nauc_mrr_at_20_diff1 value: 66.28450000000001 - type: nauc_mrr_at_100_max value: 50.8991 - type: nauc_mrr_at_100_std value: -6.2459 - type: nauc_mrr_at_100_diff1 value: 66.3257 - type: nauc_mrr_at_1000_max value: 50.8934 - type: nauc_mrr_at_1000_std value: -6.2602 - type: nauc_mrr_at_1000_diff1 value: 66.328 - type: main_score value: 62.918 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 62.2348 - type: f1 value: 62.0977 - type: f1_weighted value: 62.0977 - type: ap value: 57.750800000000005 - type: ap_weighted value: 57.750800000000005 - type: main_score value: 62.2348 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: ndcg_at_1 value: 15.085999999999999 - type: ndcg_at_3 value: 23.567 - type: ndcg_at_5 value: 27.066000000000003 - type: ndcg_at_10 value: 30.711 - type: ndcg_at_20 value: 33.251999999999995 - type: ndcg_at_100 value: 37.221 - type: ndcg_at_1000 value: 39.133 - type: map_at_1 value: 14.654 - type: map_at_3 value: 21.234 - type: map_at_5 value: 23.189999999999998 - type: map_at_10 value: 24.72 - type: map_at_20 value: 25.433 - type: map_at_100 value: 25.994 - type: map_at_1000 value: 26.067 - type: recall_at_1 value: 14.654 - type: recall_at_3 value: 29.862 - type: recall_at_5 value: 38.274 - type: recall_at_10 value: 49.341 - type: recall_at_20 value: 59.206 - type: recall_at_100 value: 80.22399999999999 - type: recall_at_1000 value: 95.037 - type: precision_at_1 value: 15.085999999999999 - type: precision_at_3 value: 10.277 - type: precision_at_5 value: 7.922999999999999 - type: precision_at_10 value: 5.132 - type: precision_at_20 value: 3.0949999999999998 - type: precision_at_100 value: 0.845 - type: precision_at_1000 value: 0.101 - type: mrr_at_1 value: 15.085999999999999 - type: mrr_at_3 value: 21.7311 - type: mrr_at_5 value: 23.6738 - type: mrr_at_10 value: 25.184099999999997 - type: mrr_at_20 value: 25.878899999999998 - type: mrr_at_100 value: 26.4216 - type: mrr_at_1000 value: 26.4886 - type: nauc_ndcg_at_1_max value: 3.3686000000000003 - type: nauc_ndcg_at_1_std value: -14.960799999999999 - type: nauc_ndcg_at_1_diff1 value: 30.0257 - type: nauc_ndcg_at_3_max value: 4.3222 - type: nauc_ndcg_at_3_std value: -15.8473 - type: nauc_ndcg_at_3_diff1 value: 26.935399999999998 - type: nauc_ndcg_at_5_max value: 4.8392 - type: nauc_ndcg_at_5_std value: -15.7197 - type: nauc_ndcg_at_5_diff1 value: 26.1067 - type: nauc_ndcg_at_10_max value: 4.8289 - type: nauc_ndcg_at_10_std value: -14.713300000000002 - type: nauc_ndcg_at_10_diff1 value: 25.3576 - type: nauc_ndcg_at_20_max value: 5.2264 - type: nauc_ndcg_at_20_std value: -13.5723 - type: nauc_ndcg_at_20_diff1 value: 25.7189 - type: nauc_ndcg_at_100_max value: 6.2197000000000005 - type: nauc_ndcg_at_100_std value: -10.5613 - type: nauc_ndcg_at_100_diff1 value: 25.407200000000003 - type: nauc_ndcg_at_1000_max value: 6.336899999999999 - type: nauc_ndcg_at_1000_std value: -11.2538 - type: nauc_ndcg_at_1000_diff1 value: 25.8353 - type: nauc_map_at_1_max value: 3.4762 - type: nauc_map_at_1_std value: -14.829899999999999 - type: nauc_map_at_1_diff1 value: 30.220200000000002 - type: nauc_map_at_3_max value: 4.1498 - type: nauc_map_at_3_std value: -15.659699999999999 - type: nauc_map_at_3_diff1 value: 27.6738 - type: nauc_map_at_5_max value: 4.457599999999999 - type: nauc_map_at_5_std value: -15.593599999999999 - type: nauc_map_at_5_diff1 value: 27.147399999999998 - type: nauc_map_at_10_max value: 4.4191 - type: nauc_map_at_10_std value: -15.199599999999998 - type: nauc_map_at_10_diff1 value: 26.8024 - type: nauc_map_at_20_max value: 4.559699999999999 - type: nauc_map_at_20_std value: -14.8687 - type: nauc_map_at_20_diff1 value: 26.929799999999997 - type: nauc_map_at_100_max value: 4.709300000000001 - type: nauc_map_at_100_std value: -14.430599999999998 - type: nauc_map_at_100_diff1 value: 26.895200000000003 - type: nauc_map_at_1000_max value: 4.7146 - type: nauc_map_at_1000_std value: -14.4381 - type: nauc_map_at_1000_diff1 value: 26.9071 - type: nauc_recall_at_1_max value: 3.4762 - type: nauc_recall_at_1_std value: -14.829899999999999 - type: nauc_recall_at_1_diff1 value: 30.220200000000002 - type: nauc_recall_at_3_max value: 4.8518 - type: nauc_recall_at_3_std value: -16.215 - type: nauc_recall_at_3_diff1 value: 25.1628 - type: nauc_recall_at_5_max value: 5.8279 - type: nauc_recall_at_5_std value: -15.9303 - type: nauc_recall_at_5_diff1 value: 23.544999999999998 - type: nauc_recall_at_10_max value: 5.7948 - type: nauc_recall_at_10_std value: -13.1624 - type: nauc_recall_at_10_diff1 value: 21.5447 - type: nauc_recall_at_20_max value: 7.0539000000000005 - type: nauc_recall_at_20_std value: -8.9408 - type: nauc_recall_at_20_diff1 value: 22.4027 - type: nauc_recall_at_100_max value: 15.1651 - type: nauc_recall_at_100_std value: 16.419 - type: nauc_recall_at_100_diff1 value: 17.897299999999998 - type: nauc_recall_at_1000_max value: 41.646300000000004 - type: nauc_recall_at_1000_std value: 54.791000000000004 - type: nauc_recall_at_1000_diff1 value: 16.4922 - type: nauc_precision_at_1_max value: 3.3686000000000003 - type: nauc_precision_at_1_std value: -14.960799999999999 - type: nauc_precision_at_1_diff1 value: 30.0257 - type: nauc_precision_at_3_max value: 4.8638 - type: nauc_precision_at_3_std value: -16.3 - type: nauc_precision_at_3_diff1 value: 25.1213 - type: nauc_precision_at_5_max value: 5.8399 - type: nauc_precision_at_5_std value: -16.1007 - type: nauc_precision_at_5_diff1 value: 23.4288 - type: nauc_precision_at_10_max value: 6.042 - type: nauc_precision_at_10_std value: -13.0782 - type: nauc_precision_at_10_diff1 value: 20.8509 - type: nauc_precision_at_20_max value: 7.9528 - type: nauc_precision_at_20_std value: -8.2321 - type: nauc_precision_at_20_diff1 value: 21.0746 - type: nauc_precision_at_100_max value: 16.026699999999998 - type: nauc_precision_at_100_std value: 15.112200000000001 - type: nauc_precision_at_100_diff1 value: 13.2433 - type: nauc_precision_at_1000_max value: 24.8965 - type: nauc_precision_at_1000_std value: 24.741 - type: nauc_precision_at_1000_diff1 value: 2.8078 - type: nauc_mrr_at_1_max value: 3.3686000000000003 - type: nauc_mrr_at_1_std value: -14.960799999999999 - type: nauc_mrr_at_1_diff1 value: 30.0257 - type: nauc_mrr_at_3_max value: 3.9521 - type: nauc_mrr_at_3_std value: -15.6591 - type: nauc_mrr_at_3_diff1 value: 27.511799999999997 - type: nauc_mrr_at_5_max value: 4.3118 - type: nauc_mrr_at_5_std value: -15.5244 - type: nauc_mrr_at_5_diff1 value: 27.024199999999997 - type: nauc_mrr_at_10_max value: 4.3529 - type: nauc_mrr_at_10_std value: -15.065100000000001 - type: nauc_mrr_at_10_diff1 value: 26.7106 - type: nauc_mrr_at_20_max value: 4.4593 - type: nauc_mrr_at_20_std value: -14.7683 - type: nauc_mrr_at_20_diff1 value: 26.815099999999997 - type: nauc_mrr_at_100_max value: 4.5908999999999995 - type: nauc_mrr_at_100_std value: -14.361099999999999 - type: nauc_mrr_at_100_diff1 value: 26.7866 - type: nauc_mrr_at_1000_max value: 4.5903 - type: nauc_mrr_at_1000_std value: -14.3764 - type: nauc_mrr_at_1000_diff1 value: 26.801000000000002 - type: main_score value: 30.711 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.4505 - type: f1 value: 89.00200000000001 - type: f1_weighted value: 89.442 - type: main_score value: 89.4505 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 56.846799999999995 - type: f1 value: 39.2152 - type: f1_weighted value: 58.797999999999995 - type: main_score value: 56.846799999999995 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 64.768 - type: f1 value: 61.9285 - type: f1_weighted value: 63.67 - type: main_score value: 64.768 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 71.3416 - type: f1 value: 69.9576 - type: f1_weighted value: 71.19680000000001 - type: main_score value: 71.3416 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 32.5684 - type: v_measure_std value: 1.6362999999999999 - type: main_score value: 32.5684 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 31.551299999999998 - type: v_measure_std value: 1.7208999999999999 - type: main_score value: 31.551299999999998 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: map value: 30.883 - type: mrr value: 31.923299999999998 - type: nAUC_map_max value: -20.072000000000003 - type: nAUC_map_std value: -4.8503 - type: nAUC_map_diff1 value: 14.178099999999999 - type: nAUC_mrr_max value: -14.7901 - type: nAUC_mrr_std value: -2.8666 - type: nAUC_mrr_diff1 value: 13.2767 - type: main_score value: 30.883 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: ndcg_at_1 value: 41.486000000000004 - type: ndcg_at_3 value: 39.324 - type: ndcg_at_5 value: 36.949 - type: ndcg_at_10 value: 33.737 - type: ndcg_at_20 value: 31.320999999999998 - type: ndcg_at_100 value: 30.886000000000003 - type: ndcg_at_1000 value: 40.018 - type: map_at_1 value: 5.452 - type: map_at_3 value: 9.45 - type: map_at_5 value: 10.92 - type: map_at_10 value: 12.758 - type: map_at_20 value: 14.036999999999999 - type: map_at_100 value: 15.93 - type: map_at_1000 value: 17.422 - type: recall_at_1 value: 5.452 - type: recall_at_3 value: 10.732999999999999 - type: recall_at_5 value: 13.553 - type: recall_at_10 value: 17.119999999999997 - type: recall_at_20 value: 20.459 - type: recall_at_100 value: 30.719 - type: recall_at_1000 value: 62.766 - type: precision_at_1 value: 43.344 - type: precision_at_3 value: 37.152 - type: precision_at_5 value: 31.703 - type: precision_at_10 value: 24.799 - type: precision_at_20 value: 18.142 - type: precision_at_100 value: 7.8950000000000005 - type: precision_at_1000 value: 2.091 - type: mrr_at_1 value: 43.3437 - type: mrr_at_3 value: 51.135200000000005 - type: mrr_at_5 value: 52.15689999999999 - type: mrr_at_10 value: 52.9277 - type: mrr_at_20 value: 53.2931 - type: mrr_at_100 value: 53.467200000000005 - type: mrr_at_1000 value: 53.5122 - type: nauc_ndcg_at_1_max value: 33.6844 - type: nauc_ndcg_at_1_std value: 17.6117 - type: nauc_ndcg_at_1_diff1 value: 37.641999999999996 - type: nauc_ndcg_at_3_max value: 36.6302 - type: nauc_ndcg_at_3_std value: 25.738 - type: nauc_ndcg_at_3_diff1 value: 29.8566 - type: nauc_ndcg_at_5_max value: 39.043099999999995 - type: nauc_ndcg_at_5_std value: 28.904999999999998 - type: nauc_ndcg_at_5_diff1 value: 26.129400000000004 - type: nauc_ndcg_at_10_max value: 38.935199999999995 - type: nauc_ndcg_at_10_std value: 30.338700000000003 - type: nauc_ndcg_at_10_diff1 value: 23.594 - type: nauc_ndcg_at_20_max value: 38.2138 - type: nauc_ndcg_at_20_std value: 31.8994 - type: nauc_ndcg_at_20_diff1 value: 21.583 - type: nauc_ndcg_at_100_max value: 39.869 - type: nauc_ndcg_at_100_std value: 33.591300000000004 - type: nauc_ndcg_at_100_diff1 value: 23.0398 - type: nauc_ndcg_at_1000_max value: 44.9572 - type: nauc_ndcg_at_1000_std value: 38.222 - type: nauc_ndcg_at_1000_diff1 value: 23.7314 - type: nauc_map_at_1_max value: 8.0309 - type: nauc_map_at_1_std value: -12.6861 - type: nauc_map_at_1_diff1 value: 45.5924 - type: nauc_map_at_3_max value: 11.8264 - type: nauc_map_at_3_std value: -7.3325000000000005 - type: nauc_map_at_3_diff1 value: 35.5714 - type: nauc_map_at_5_max value: 15.7483 - type: nauc_map_at_5_std value: -2.9122 - type: nauc_map_at_5_diff1 value: 32.2211 - type: nauc_map_at_10_max value: 19.9795 - type: nauc_map_at_10_std value: 2.6611 - type: nauc_map_at_10_diff1 value: 29.047099999999997 - type: nauc_map_at_20_max value: 23.1754 - type: nauc_map_at_20_std value: 8.0668 - type: nauc_map_at_20_diff1 value: 27.7477 - type: nauc_map_at_100_max value: 26.4818 - type: nauc_map_at_100_std value: 15.723 - type: nauc_map_at_100_diff1 value: 26.5443 - type: nauc_map_at_1000_max value: 27.929100000000002 - type: nauc_map_at_1000_std value: 19.81 - type: nauc_map_at_1000_diff1 value: 25.0603 - type: nauc_recall_at_1_max value: 8.0309 - type: nauc_recall_at_1_std value: -12.6861 - type: nauc_recall_at_1_diff1 value: 45.5924 - type: nauc_recall_at_3_max value: 10.9894 - type: nauc_recall_at_3_std value: -7.4279 - type: nauc_recall_at_3_diff1 value: 29.917899999999996 - type: nauc_recall_at_5_max value: 15.7163 - type: nauc_recall_at_5_std value: -0.8366 - type: nauc_recall_at_5_diff1 value: 22.8634 - type: nauc_recall_at_10_max value: 19.5902 - type: nauc_recall_at_10_std value: 5.3492 - type: nauc_recall_at_10_diff1 value: 19.4157 - type: nauc_recall_at_20_max value: 23.1894 - type: nauc_recall_at_20_std value: 12.8919 - type: nauc_recall_at_20_diff1 value: 17.8387 - type: nauc_recall_at_100_max value: 30.150399999999998 - type: nauc_recall_at_100_std value: 27.5036 - type: nauc_recall_at_100_diff1 value: 15.4935 - type: nauc_recall_at_1000_max value: 32.404500000000006 - type: nauc_recall_at_1000_std value: 30.7325 - type: nauc_recall_at_1000_diff1 value: 13.9299 - type: nauc_precision_at_1_max value: 34.747699999999995 - type: nauc_precision_at_1_std value: 17.5475 - type: nauc_precision_at_1_diff1 value: 36.0582 - type: nauc_precision_at_3_max value: 39.8251 - type: nauc_precision_at_3_std value: 34.3835 - type: nauc_precision_at_3_diff1 value: 19.651699999999998 - type: nauc_precision_at_5_max value: 42.796800000000005 - type: nauc_precision_at_5_std value: 40.083999999999996 - type: nauc_precision_at_5_diff1 value: 12.4069 - type: nauc_precision_at_10_max value: 41.562599999999996 - type: nauc_precision_at_10_std value: 44.7888 - type: nauc_precision_at_10_diff1 value: 5.587000000000001 - type: nauc_precision_at_20_max value: 37.000499999999995 - type: nauc_precision_at_20_std value: 50.4486 - type: nauc_precision_at_20_diff1 value: -0.1011 - type: nauc_precision_at_100_max value: 24.7635 - type: nauc_precision_at_100_std value: 51.001200000000004 - type: nauc_precision_at_100_diff1 value: -7.7414 - type: nauc_precision_at_1000_max value: 10.837900000000001 - type: nauc_precision_at_1000_std value: 37.2421 - type: nauc_precision_at_1000_diff1 value: -14.086599999999999 - type: nauc_mrr_at_1_max value: 34.747699999999995 - type: nauc_mrr_at_1_std value: 17.5475 - type: nauc_mrr_at_1_diff1 value: 36.0582 - type: nauc_mrr_at_3_max value: 40.8392 - type: nauc_mrr_at_3_std value: 24.9403 - type: nauc_mrr_at_3_diff1 value: 33.9575 - type: nauc_mrr_at_5_max value: 42.2108 - type: nauc_mrr_at_5_std value: 26.374799999999997 - type: nauc_mrr_at_5_diff1 value: 33.8034 - type: nauc_mrr_at_10_max value: 42.180800000000005 - type: nauc_mrr_at_10_std value: 26.6843 - type: nauc_mrr_at_10_diff1 value: 33.151 - type: nauc_mrr_at_20_max value: 42.4685 - type: nauc_mrr_at_20_std value: 27.1065 - type: nauc_mrr_at_20_diff1 value: 33.0052 - type: nauc_mrr_at_100_max value: 42.417 - type: nauc_mrr_at_100_std value: 27.069300000000002 - type: nauc_mrr_at_100_diff1 value: 33.1211 - type: nauc_mrr_at_1000_max value: 42.3902 - type: nauc_mrr_at_1000_std value: 27.019 - type: nauc_mrr_at_1000_diff1 value: 33.1177 - type: main_score value: 33.737 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: ndcg_at_1 value: 32.793 - type: ndcg_at_3 value: 42.782 - type: ndcg_at_5 value: 47.554 - type: ndcg_at_10 value: 51.63100000000001 - type: ndcg_at_20 value: 54.005 - type: ndcg_at_100 value: 56.287 - type: ndcg_at_1000 value: 56.949000000000005 - type: map_at_1 value: 29.022 - type: map_at_3 value: 39.045 - type: map_at_5 value: 41.86 - type: map_at_10 value: 43.730000000000004 - type: map_at_20 value: 44.478 - type: map_at_100 value: 44.849 - type: map_at_1000 value: 44.877 - type: recall_at_1 value: 29.022 - type: recall_at_3 value: 50.40599999999999 - type: recall_at_5 value: 61.45 - type: recall_at_10 value: 73.32499999999999 - type: recall_at_20 value: 82.06099999999999 - type: recall_at_100 value: 93.455 - type: recall_at_1000 value: 98.414 - type: precision_at_1 value: 32.793 - type: precision_at_3 value: 19.583000000000002 - type: precision_at_5 value: 14.484 - type: precision_at_10 value: 8.737 - type: precision_at_20 value: 4.928 - type: precision_at_100 value: 1.134 - type: precision_at_1000 value: 0.12 - type: mrr_at_1 value: 32.821600000000004 - type: mrr_at_3 value: 42.275 - type: mrr_at_5 value: 44.7895 - type: mrr_at_10 value: 46.2574 - type: mrr_at_20 value: 46.8249 - type: mrr_at_100 value: 47.0971 - type: mrr_at_1000 value: 47.1157 - type: nauc_ndcg_at_1_max value: 23.167299999999997 - type: nauc_ndcg_at_1_std value: -4.5794 - type: nauc_ndcg_at_1_diff1 value: 31.1021 - type: nauc_ndcg_at_3_max value: 27.1071 - type: nauc_ndcg_at_3_std value: -4.8229 - type: nauc_ndcg_at_3_diff1 value: 26.442 - type: nauc_ndcg_at_5_max value: 29.579 - type: nauc_ndcg_at_5_std value: -3.9125 - type: nauc_ndcg_at_5_diff1 value: 26.1946 - type: nauc_ndcg_at_10_max value: 30.6847 - type: nauc_ndcg_at_10_std value: -2.3781 - type: nauc_ndcg_at_10_diff1 value: 25.9597 - type: nauc_ndcg_at_20_max value: 31.4414 - type: nauc_ndcg_at_20_std value: -0.6708000000000001 - type: nauc_ndcg_at_20_diff1 value: 25.886300000000002 - type: nauc_ndcg_at_100_max value: 30.5333 - type: nauc_ndcg_at_100_std value: -0.605 - type: nauc_ndcg_at_100_diff1 value: 26.3173 - type: nauc_ndcg_at_1000_max value: 29.6714 - type: nauc_ndcg_at_1000_std value: -1.4797 - type: nauc_ndcg_at_1000_diff1 value: 26.4662 - type: nauc_map_at_1_max value: 22.0826 - type: nauc_map_at_1_std value: -7.1051 - type: nauc_map_at_1_diff1 value: 31.398 - type: nauc_map_at_3_max value: 26.0631 - type: nauc_map_at_3_std value: -5.564100000000001 - type: nauc_map_at_3_diff1 value: 27.4542 - type: nauc_map_at_5_max value: 27.4859 - type: nauc_map_at_5_std value: -5.1595 - type: nauc_map_at_5_diff1 value: 27.4557 - type: nauc_map_at_10_max value: 27.9754 - type: nauc_map_at_10_std value: -4.4186000000000005 - type: nauc_map_at_10_diff1 value: 27.3476 - type: nauc_map_at_20_max value: 28.168 - type: nauc_map_at_20_std value: -3.8931 - type: nauc_map_at_20_diff1 value: 27.333800000000004 - type: nauc_map_at_100_max value: 28.020899999999997 - type: nauc_map_at_100_std value: -3.8826 - type: nauc_map_at_100_diff1 value: 27.411099999999998 - type: nauc_map_at_1000_max value: 27.9917 - type: nauc_map_at_1000_std value: -3.9068 - type: nauc_map_at_1000_diff1 value: 27.4158 - type: nauc_recall_at_1_max value: 22.0826 - type: nauc_recall_at_1_std value: -7.1051 - type: nauc_recall_at_1_diff1 value: 31.398 - type: nauc_recall_at_3_max value: 29.145500000000002 - type: nauc_recall_at_3_std value: -4.3699 - type: nauc_recall_at_3_diff1 value: 22.868 - type: nauc_recall_at_5_max value: 35.4075 - type: nauc_recall_at_5_std value: -2.0428 - type: nauc_recall_at_5_diff1 value: 21.4863 - type: nauc_recall_at_10_max value: 41.0673 - type: nauc_recall_at_10_std value: 3.6994 - type: nauc_recall_at_10_diff1 value: 19.2556 - type: nauc_recall_at_20_max value: 50.6702 - type: nauc_recall_at_20_std value: 16.162399999999998 - type: nauc_recall_at_20_diff1 value: 16.9676 - type: nauc_recall_at_100_max value: 64.5925 - type: nauc_recall_at_100_std value: 42.2234 - type: nauc_recall_at_100_diff1 value: 12.741 - type: nauc_recall_at_1000_max value: 66.29310000000001 - type: nauc_recall_at_1000_std value: 61.5236 - type: nauc_recall_at_1000_diff1 value: -6.1148 - type: nauc_precision_at_1_max value: 23.167299999999997 - type: nauc_precision_at_1_std value: -4.5794 - type: nauc_precision_at_1_diff1 value: 31.1021 - type: nauc_precision_at_3_max value: 28.3464 - type: nauc_precision_at_3_std value: -0.0571 - type: nauc_precision_at_3_diff1 value: 18.987399999999997 - type: nauc_precision_at_5_max value: 30.9637 - type: nauc_precision_at_5_std value: 2.3625 - type: nauc_precision_at_5_diff1 value: 15.912299999999998 - type: nauc_precision_at_10_max value: 28.3203 - type: nauc_precision_at_10_std value: 8.2947 - type: nauc_precision_at_10_diff1 value: 10.066899999999999 - type: nauc_precision_at_20_max value: 26.2198 - type: nauc_precision_at_20_std value: 15.4182 - type: nauc_precision_at_20_diff1 value: 5.0011 - type: nauc_precision_at_100_max value: 12.721599999999999 - type: nauc_precision_at_100_std value: 18.2616 - type: nauc_precision_at_100_diff1 value: -1.5249000000000001 - type: nauc_precision_at_1000_max value: 1.514 - type: nauc_precision_at_1000_std value: 12.6332 - type: nauc_precision_at_1000_diff1 value: -4.8346 - type: nauc_mrr_at_1_max value: 23.3079 - type: nauc_mrr_at_1_std value: -4.6507 - type: nauc_mrr_at_1_diff1 value: 31.014999999999997 - type: nauc_mrr_at_3_max value: 26.371299999999998 - type: nauc_mrr_at_3_std value: -3.6183 - type: nauc_mrr_at_3_diff1 value: 27.5342 - type: nauc_mrr_at_5_max value: 27.4604 - type: nauc_mrr_at_5_std value: -2.9482 - type: nauc_mrr_at_5_diff1 value: 27.308100000000003 - type: nauc_mrr_at_10_max value: 27.6781 - type: nauc_mrr_at_10_std value: -2.5515 - type: nauc_mrr_at_10_diff1 value: 27.338 - type: nauc_mrr_at_20_max value: 27.760099999999998 - type: nauc_mrr_at_20_std value: -2.2787 - type: nauc_mrr_at_20_diff1 value: 27.372200000000003 - type: nauc_mrr_at_100_max value: 27.6611 - type: nauc_mrr_at_100_std value: -2.3218 - type: nauc_mrr_at_100_diff1 value: 27.444000000000003 - type: nauc_mrr_at_1000_max value: 27.6393 - type: nauc_mrr_at_1000_std value: -2.3404000000000003 - type: nauc_mrr_at_1000_diff1 value: 27.4444 - type: main_score value: 51.63100000000001 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: ndcg_at_1 value: 79.36999999999999 - type: ndcg_at_3 value: 83.545 - type: ndcg_at_5 value: 85.32 - type: ndcg_at_10 value: 86.696 - type: ndcg_at_20 value: 87.46199999999999 - type: ndcg_at_100 value: 88.103 - type: ndcg_at_1000 value: 88.252 - type: map_at_1 value: 68.961 - type: map_at_3 value: 79.616 - type: map_at_5 value: 81.54 - type: map_at_10 value: 82.65400000000001 - type: map_at_20 value: 83.098 - type: map_at_100 value: 83.33 - type: map_at_1000 value: 83.34899999999999 - type: recall_at_1 value: 68.961 - type: recall_at_3 value: 85.501 - type: recall_at_5 value: 90.379 - type: recall_at_10 value: 94.407 - type: recall_at_20 value: 96.86399999999999 - type: recall_at_100 value: 99.226 - type: recall_at_1000 value: 99.958 - type: precision_at_1 value: 79.36999999999999 - type: precision_at_3 value: 36.35 - type: precision_at_5 value: 24.048 - type: precision_at_10 value: 13.145000000000001 - type: precision_at_20 value: 7.007 - type: precision_at_100 value: 1.517 - type: precision_at_1000 value: 0.156 - type: mrr_at_1 value: 79.3 - type: mrr_at_3 value: 84.82169999999999 - type: mrr_at_5 value: 85.6047 - type: mrr_at_10 value: 85.94500000000001 - type: mrr_at_20 value: 86.0381 - type: mrr_at_100 value: 86.0694 - type: mrr_at_1000 value: 86.0712 - type: nauc_ndcg_at_1_max value: 37.962 - type: nauc_ndcg_at_1_std value: -32.129999999999995 - type: nauc_ndcg_at_1_diff1 value: 76.2543 - type: nauc_ndcg_at_3_max value: 36.5568 - type: nauc_ndcg_at_3_std value: -36.9639 - type: nauc_ndcg_at_3_diff1 value: 74.33229999999999 - type: nauc_ndcg_at_5_max value: 36.6236 - type: nauc_ndcg_at_5_std value: -38.3823 - type: nauc_ndcg_at_5_diff1 value: 74.8725 - type: nauc_ndcg_at_10_max value: 37.2726 - type: nauc_ndcg_at_10_std value: -37.6889 - type: nauc_ndcg_at_10_diff1 value: 75.437 - type: nauc_ndcg_at_20_max value: 37.3643 - type: nauc_ndcg_at_20_std value: -36.4545 - type: nauc_ndcg_at_20_diff1 value: 75.3032 - type: nauc_ndcg_at_100_max value: 37.701 - type: nauc_ndcg_at_100_std value: -34.6794 - type: nauc_ndcg_at_100_diff1 value: 75.1545 - type: nauc_ndcg_at_1000_max value: 37.7386 - type: nauc_ndcg_at_1000_std value: -34.659099999999995 - type: nauc_ndcg_at_1000_diff1 value: 75.1303 - type: nauc_map_at_1_max value: 28.3786 - type: nauc_map_at_1_std value: -34.4402 - type: nauc_map_at_1_diff1 value: 78.58579999999999 - type: nauc_map_at_3_max value: 34.1617 - type: nauc_map_at_3_std value: -39.0191 - type: nauc_map_at_3_diff1 value: 75.551 - type: nauc_map_at_5_max value: 35.2348 - type: nauc_map_at_5_std value: -39.352399999999996 - type: nauc_map_at_5_diff1 value: 75.45530000000001 - type: nauc_map_at_10_max value: 36.0009 - type: nauc_map_at_10_std value: -38.389 - type: nauc_map_at_10_diff1 value: 75.523 - type: nauc_map_at_20_max value: 36.167300000000004 - type: nauc_map_at_20_std value: -37.5191 - type: nauc_map_at_20_diff1 value: 75.3798 - type: nauc_map_at_100_max value: 36.2928 - type: nauc_map_at_100_std value: -36.8001 - type: nauc_map_at_100_diff1 value: 75.2957 - type: nauc_map_at_1000_max value: 36.3027 - type: nauc_map_at_1000_std value: -36.7641 - type: nauc_map_at_1000_diff1 value: 75.29090000000001 - type: nauc_recall_at_1_max value: 28.3786 - type: nauc_recall_at_1_std value: -34.4402 - type: nauc_recall_at_1_diff1 value: 78.58579999999999 - type: nauc_recall_at_3_max value: 32.1082 - type: nauc_recall_at_3_std value: -43.2936 - type: nauc_recall_at_3_diff1 value: 71.4939 - type: nauc_recall_at_5_max value: 32.590599999999995 - type: nauc_recall_at_5_std value: -48.7416 - type: nauc_recall_at_5_diff1 value: 70.7945 - type: nauc_recall_at_10_max value: 34.755 - type: nauc_recall_at_10_std value: -49.398599999999995 - type: nauc_recall_at_10_diff1 value: 71.87219999999999 - type: nauc_recall_at_20_max value: 33.879999999999995 - type: nauc_recall_at_20_std value: -45.1325 - type: nauc_recall_at_20_diff1 value: 71.3805 - type: nauc_recall_at_100_max value: 37.4684 - type: nauc_recall_at_100_std value: -13.0134 - type: nauc_recall_at_100_diff1 value: 69.963 - type: nauc_recall_at_1000_max value: 31.6199 - type: nauc_recall_at_1000_std value: 59.0228 - type: nauc_recall_at_1000_diff1 value: 60.9687 - type: nauc_precision_at_1_max value: 37.962 - type: nauc_precision_at_1_std value: -32.129999999999995 - type: nauc_precision_at_1_diff1 value: 76.2543 - type: nauc_precision_at_3_max value: 11.419799999999999 - type: nauc_precision_at_3_std value: 2.5604999999999998 - type: nauc_precision_at_3_diff1 value: -11.505799999999999 - type: nauc_precision_at_5_max value: 4.454700000000001 - type: nauc_precision_at_5_std value: 11.6986 - type: nauc_precision_at_5_diff1 value: -26.2868 - type: nauc_precision_at_10_max value: -0.4261 - type: nauc_precision_at_10_std value: 20.7877 - type: nauc_precision_at_10_diff1 value: -34.5624 - type: nauc_precision_at_20_max value: -3.7817000000000003 - type: nauc_precision_at_20_std value: 27.056599999999996 - type: nauc_precision_at_20_diff1 value: -39.0052 - type: nauc_precision_at_100_max value: -6.4321 - type: nauc_precision_at_100_std value: 33.1245 - type: nauc_precision_at_100_diff1 value: -41.9135 - type: nauc_precision_at_1000_max value: -7.100199999999999 - type: nauc_precision_at_1000_std value: 34.0081 - type: nauc_precision_at_1000_diff1 value: -42.556 - type: nauc_mrr_at_1_max value: 37.754 - type: nauc_mrr_at_1_std value: -32.2644 - type: nauc_mrr_at_1_diff1 value: 76.4182 - type: nauc_mrr_at_3_max value: 38.7583 - type: nauc_mrr_at_3_std value: -33.631699999999995 - type: nauc_mrr_at_3_diff1 value: 75.30369999999999 - type: nauc_mrr_at_5_max value: 38.675399999999996 - type: nauc_mrr_at_5_std value: -33.873 - type: nauc_mrr_at_5_diff1 value: 75.58890000000001 - type: nauc_mrr_at_10_max value: 38.7962 - type: nauc_mrr_at_10_std value: -33.5451 - type: nauc_mrr_at_10_diff1 value: 75.7153 - type: nauc_mrr_at_20_max value: 38.7213 - type: nauc_mrr_at_20_std value: -33.433600000000006 - type: nauc_mrr_at_20_diff1 value: 75.6934 - type: nauc_mrr_at_100_max value: 38.6943 - type: nauc_mrr_at_100_std value: -33.4013 - type: nauc_mrr_at_100_diff1 value: 75.6932 - type: nauc_mrr_at_1000_max value: 38.6928 - type: nauc_mrr_at_1000_std value: -33.4051 - type: nauc_mrr_at_1000_diff1 value: 75.69369999999999 - type: main_score value: 86.696 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 50.019999999999996 - type: v_measure_std value: 4.5914 - type: main_score value: 50.019999999999996 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 53.9756 - type: v_measure_std value: 11.6573 - type: main_score value: 53.9756 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: ndcg_at_1 value: 24.6 - type: ndcg_at_3 value: 20.896 - type: ndcg_at_5 value: 18.497 - type: ndcg_at_10 value: 22.542 - type: ndcg_at_20 value: 25.812 - type: ndcg_at_100 value: 32.326 - type: ndcg_at_1000 value: 38.279999999999994 - type: map_at_1 value: 4.988 - type: map_at_3 value: 9.439 - type: map_at_5 value: 11.459999999999999 - type: map_at_10 value: 13.553 - type: map_at_20 value: 14.767 - type: map_at_100 value: 16.136 - type: map_at_1000 value: 16.512 - type: recall_at_1 value: 4.988 - type: recall_at_3 value: 12.046999999999999 - type: recall_at_5 value: 16.777 - type: recall_at_10 value: 24.212 - type: recall_at_20 value: 31.885 - type: recall_at_100 value: 53.105000000000004 - type: recall_at_1000 value: 82.02199999999999 - type: precision_at_1 value: 24.6 - type: precision_at_3 value: 19.8 - type: precision_at_5 value: 16.54 - type: precision_at_10 value: 11.940000000000001 - type: precision_at_20 value: 7.865 - type: precision_at_100 value: 2.616 - type: precision_at_1000 value: 0.404 - type: mrr_at_1 value: 24.6 - type: mrr_at_3 value: 33.1167 - type: mrr_at_5 value: 35.1717 - type: mrr_at_10 value: 36.7925 - type: mrr_at_20 value: 37.5284 - type: mrr_at_100 value: 37.9725 - type: mrr_at_1000 value: 38.0112 - type: nauc_ndcg_at_1_max value: 17.8923 - type: nauc_ndcg_at_1_std value: 9.1225 - type: nauc_ndcg_at_1_diff1 value: 22.665399999999998 - type: nauc_ndcg_at_3_max value: 23.6866 - type: nauc_ndcg_at_3_std value: 15.3093 - type: nauc_ndcg_at_3_diff1 value: 17.589299999999998 - type: nauc_ndcg_at_5_max value: 25.3398 - type: nauc_ndcg_at_5_std value: 18.002299999999998 - type: nauc_ndcg_at_5_diff1 value: 16.8155 - type: nauc_ndcg_at_10_max value: 28.057399999999998 - type: nauc_ndcg_at_10_std value: 22.7388 - type: nauc_ndcg_at_10_diff1 value: 16.0553 - type: nauc_ndcg_at_20_max value: 28.9134 - type: nauc_ndcg_at_20_std value: 25.389 - type: nauc_ndcg_at_20_diff1 value: 15.7728 - type: nauc_ndcg_at_100_max value: 29.9553 - type: nauc_ndcg_at_100_std value: 29.8607 - type: nauc_ndcg_at_100_diff1 value: 15.526100000000001 - type: nauc_ndcg_at_1000_max value: 29.088399999999996 - type: nauc_ndcg_at_1000_std value: 29.2896 - type: nauc_ndcg_at_1000_diff1 value: 15.2143 - type: nauc_map_at_1_max value: 17.9628 - type: nauc_map_at_1_std value: 8.9923 - type: nauc_map_at_1_diff1 value: 22.7227 - type: nauc_map_at_3_max value: 24.012700000000002 - type: nauc_map_at_3_std value: 15.1908 - type: nauc_map_at_3_diff1 value: 17.7637 - type: nauc_map_at_5_max value: 25.0497 - type: nauc_map_at_5_std value: 17.366300000000003 - type: nauc_map_at_5_diff1 value: 16.1512 - type: nauc_map_at_10_max value: 26.777299999999997 - type: nauc_map_at_10_std value: 21.0365 - type: nauc_map_at_10_diff1 value: 15.0999 - type: nauc_map_at_20_max value: 27.6561 - type: nauc_map_at_20_std value: 23.031399999999998 - type: nauc_map_at_20_diff1 value: 14.935300000000002 - type: nauc_map_at_100_max value: 28.015800000000002 - type: nauc_map_at_100_std value: 24.840899999999998 - type: nauc_map_at_100_diff1 value: 14.9355 - type: nauc_map_at_1000_max value: 27.9646 - type: nauc_map_at_1000_std value: 24.9601 - type: nauc_map_at_1000_diff1 value: 14.886 - type: nauc_recall_at_1_max value: 17.9628 - type: nauc_recall_at_1_std value: 8.9923 - type: nauc_recall_at_1_diff1 value: 22.7227 - type: nauc_recall_at_3_max value: 25.008399999999998 - type: nauc_recall_at_3_std value: 17.1697 - type: nauc_recall_at_3_diff1 value: 15.1082 - type: nauc_recall_at_5_max value: 26.4345 - type: nauc_recall_at_5_std value: 20.7923 - type: nauc_recall_at_5_diff1 value: 13.58 - type: nauc_recall_at_10_max value: 29.5057 - type: nauc_recall_at_10_std value: 27.8646 - type: nauc_recall_at_10_diff1 value: 11.8098 - type: nauc_recall_at_20_max value: 29.3419 - type: nauc_recall_at_20_std value: 31.6086 - type: nauc_recall_at_20_diff1 value: 10.6491 - type: nauc_recall_at_100_max value: 28.8421 - type: nauc_recall_at_100_std value: 40.2696 - type: nauc_recall_at_100_diff1 value: 8.1461 - type: nauc_recall_at_1000_max value: 22.8234 - type: nauc_recall_at_1000_std value: 41.6117 - type: nauc_recall_at_1000_diff1 value: 1.8689999999999998 - type: nauc_precision_at_1_max value: 17.8923 - type: nauc_precision_at_1_std value: 9.1225 - type: nauc_precision_at_1_diff1 value: 22.665399999999998 - type: nauc_precision_at_3_max value: 25.1067 - type: nauc_precision_at_3_std value: 17.4066 - type: nauc_precision_at_3_diff1 value: 15.0583 - type: nauc_precision_at_5_max value: 26.6005 - type: nauc_precision_at_5_std value: 20.9158 - type: nauc_precision_at_5_diff1 value: 13.591700000000001 - type: nauc_precision_at_10_max value: 29.8091 - type: nauc_precision_at_10_std value: 28.0069 - type: nauc_precision_at_10_diff1 value: 11.675699999999999 - type: nauc_precision_at_20_max value: 29.5651 - type: nauc_precision_at_20_std value: 31.439899999999998 - type: nauc_precision_at_20_diff1 value: 10.4784 - type: nauc_precision_at_100_max value: 28.853299999999997 - type: nauc_precision_at_100_std value: 39.3115 - type: nauc_precision_at_100_diff1 value: 7.6562 - type: nauc_precision_at_1000_max value: 23.025599999999997 - type: nauc_precision_at_1000_std value: 38.554300000000005 - type: nauc_precision_at_1000_diff1 value: 1.3502999999999998 - type: nauc_mrr_at_1_max value: 17.8923 - type: nauc_mrr_at_1_std value: 9.1225 - type: nauc_mrr_at_1_diff1 value: 22.665399999999998 - type: nauc_mrr_at_3_max value: 21.2588 - type: nauc_mrr_at_3_std value: 12.7528 - type: nauc_mrr_at_3_diff1 value: 19.808999999999997 - type: nauc_mrr_at_5_max value: 22.572200000000002 - type: nauc_mrr_at_5_std value: 14.210500000000001 - type: nauc_mrr_at_5_diff1 value: 20.502000000000002 - type: nauc_mrr_at_10_max value: 23.372799999999998 - type: nauc_mrr_at_10_std value: 15.1215 - type: nauc_mrr_at_10_diff1 value: 20.8449 - type: nauc_mrr_at_20_max value: 23.017599999999998 - type: nauc_mrr_at_20_std value: 15.0391 - type: nauc_mrr_at_20_diff1 value: 20.8233 - type: nauc_mrr_at_100_max value: 22.8993 - type: nauc_mrr_at_100_std value: 14.8474 - type: nauc_mrr_at_100_diff1 value: 20.8759 - type: nauc_mrr_at_1000_max value: 22.8744 - type: nauc_mrr_at_1000_std value: 14.8178 - type: nauc_mrr_at_1000_diff1 value: 20.8635 - type: main_score value: 22.542 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: pearson value: 77.4874 - type: spearman value: 68.79809999999999 - type: cosine_pearson value: 77.4874 - type: cosine_spearman value: 68.79809999999999 - type: manhattan_pearson value: 73.3583 - type: manhattan_spearman value: 68.6911 - type: euclidean_pearson value: 73.82039999999999 - type: euclidean_spearman value: 68.79809999999999 - type: main_score value: 68.79809999999999 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: pearson value: 67.8391 - type: spearman value: 64.77380000000001 - type: cosine_pearson value: 67.8391 - type: cosine_spearman value: 64.77380000000001 - type: manhattan_pearson value: 64.7258 - type: manhattan_spearman value: 64.1558 - type: euclidean_pearson value: 65.68469999999999 - type: euclidean_spearman value: 64.7722 - type: main_score value: 64.77380000000001 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: pearson value: 78.8177 - type: spearman value: 79.3253 - type: cosine_pearson value: 78.8177 - type: cosine_spearman value: 79.3253 - type: manhattan_pearson value: 78.6048 - type: manhattan_spearman value: 79.1874 - type: euclidean_pearson value: 78.71010000000001 - type: euclidean_spearman value: 79.3253 - type: main_score value: 79.3253 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: pearson value: 75.6791 - type: spearman value: 70.1701 - type: cosine_pearson value: 75.6791 - type: cosine_spearman value: 70.1701 - type: manhattan_pearson value: 73.85239999999999 - type: manhattan_spearman value: 69.9223 - type: euclidean_pearson value: 74.143 - type: euclidean_spearman value: 70.1701 - type: main_score value: 70.1701 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: pearson value: 80.4413 - type: spearman value: 82.0343 - type: cosine_pearson value: 80.4413 - type: cosine_spearman value: 82.0343 - type: manhattan_pearson value: 81.3627 - type: manhattan_spearman value: 81.8838 - type: euclidean_pearson value: 81.47569999999999 - type: euclidean_spearman value: 82.0343 - type: main_score value: 82.0343 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: pearson value: 77.172 - type: spearman value: 78.9633 - type: cosine_pearson value: 77.172 - type: cosine_spearman value: 78.9633 - type: manhattan_pearson value: 78.35849999999999 - type: manhattan_spearman value: 78.7975 - type: euclidean_pearson value: 78.5236 - type: euclidean_spearman value: 78.9633 - type: main_score value: 78.9633 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 83.5117 - type: spearman value: 84.64970000000001 - type: cosine_pearson value: 83.5117 - type: cosine_spearman value: 84.64970000000001 - type: manhattan_pearson value: 84.5137 - type: manhattan_spearman value: 84.7848 - type: euclidean_pearson value: 84.531 - type: euclidean_spearman value: 84.64970000000001 - type: main_score value: 84.64970000000001 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 29.0052 - type: spearman value: 30.640299999999996 - type: cosine_pearson value: 29.0052 - type: cosine_spearman value: 30.640299999999996 - type: manhattan_pearson value: 25.988099999999996 - type: manhattan_spearman value: 26.935399999999998 - type: euclidean_pearson value: 28.5366 - type: euclidean_spearman value: 30.640299999999996 - type: main_score value: 30.640299999999996 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 42.0755 - type: spearman value: 39.763999999999996 - type: cosine_pearson value: 42.0755 - type: cosine_spearman value: 39.763999999999996 - type: manhattan_pearson value: 40.872 - type: manhattan_spearman value: 38.4749 - type: euclidean_pearson value: 42.051500000000004 - type: euclidean_spearman value: 39.7565 - type: main_score value: 39.763999999999996 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 44.2318 - type: spearman value: 46.5518 - type: cosine_pearson value: 44.2318 - type: cosine_spearman value: 46.5518 - type: manhattan_pearson value: 43.396699999999996 - type: manhattan_spearman value: 46.1132 - type: euclidean_pearson value: 43.993500000000004 - type: euclidean_spearman value: 46.5518 - type: main_score value: 46.5518 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 36.716100000000004 - type: spearman value: 34.6968 - type: cosine_pearson value: 36.716100000000004 - type: cosine_spearman value: 34.6968 - type: manhattan_pearson value: 35.1918 - type: manhattan_spearman value: 33.3692 - type: euclidean_pearson value: 36.3921 - type: euclidean_spearman value: 34.6968 - type: main_score value: 34.6968 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 21.2825 - type: spearman value: 17.6922 - type: cosine_pearson value: 21.2825 - type: cosine_spearman value: 17.6922 - type: manhattan_pearson value: 19.491 - type: manhattan_spearman value: 15.989700000000001 - type: euclidean_pearson value: 21.583 - type: euclidean_spearman value: 17.6922 - type: main_score value: 17.6922 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 32.1584 - type: spearman value: 27.9254 - type: cosine_pearson value: 32.1584 - type: cosine_spearman value: 27.9254 - type: manhattan_pearson value: 34.2047 - type: manhattan_spearman value: 31.1955 - type: euclidean_pearson value: 32.4369 - type: euclidean_spearman value: 27.9254 - type: main_score value: 27.9254 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 21.0842 - type: spearman value: 18.5115 - type: cosine_pearson value: 21.0842 - type: cosine_spearman value: 18.5115 - type: manhattan_pearson value: 23.5904 - type: manhattan_spearman value: 21.032400000000003 - type: euclidean_pearson value: 21.2805 - type: euclidean_spearman value: 18.5115 - type: main_score value: 18.5115 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 66.9563 - type: spearman value: 67.4747 - type: cosine_pearson value: 66.9563 - type: cosine_spearman value: 67.4747 - type: manhattan_pearson value: 68.32629999999999 - type: manhattan_spearman value: 66.8163 - type: euclidean_pearson value: 68.731 - type: euclidean_spearman value: 67.4747 - type: main_score value: 67.4747 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 56.3095 - type: spearman value: 54.1005 - type: cosine_pearson value: 56.3095 - type: cosine_spearman value: 54.1005 - type: manhattan_pearson value: 59.4023 - type: manhattan_spearman value: 52.6259 - type: euclidean_pearson value: 58.6527 - type: euclidean_spearman value: 54.1005 - type: main_score value: 54.1005 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 62.0575 - type: spearman value: 66.9527 - type: cosine_pearson value: 62.0575 - type: cosine_spearman value: 66.9527 - type: manhattan_pearson value: 62.648700000000005 - type: manhattan_spearman value: 65.6446 - type: euclidean_pearson value: 63.546800000000005 - type: euclidean_spearman value: 66.9527 - type: main_score value: 66.9527 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 68.42439999999999 - type: spearman value: 69.0444 - type: cosine_pearson value: 68.42439999999999 - type: cosine_spearman value: 69.0444 - type: manhattan_pearson value: 65.1492 - type: manhattan_spearman value: 65.2364 - type: euclidean_pearson value: 68.4923 - type: euclidean_spearman value: 69.0444 - type: main_score value: 69.0444 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 34.164699999999996 - type: spearman value: 36.1776 - type: cosine_pearson value: 34.164699999999996 - type: cosine_spearman value: 36.1776 - type: manhattan_pearson value: 33.0685 - type: manhattan_spearman value: 34.4054 - type: euclidean_pearson value: 34.1002 - type: euclidean_spearman value: 36.1776 - type: main_score value: 36.1776 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: pearson value: 78.0802 - type: spearman value: 78.0444 - type: cosine_pearson value: 78.0802 - type: cosine_spearman value: 78.0444 - type: manhattan_pearson value: 78.0703 - type: manhattan_spearman value: 77.681 - type: euclidean_pearson value: 78.4998 - type: euclidean_spearman value: 78.0444 - type: main_score value: 78.0444 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 86.4489 - type: mrr value: 96.0178 - type: nAUC_map_max value: 49.2333 - type: nAUC_map_std value: 63.6541 - type: nAUC_map_diff1 value: 0.40959999999999996 - type: nAUC_mrr_max value: 83.6216 - type: nAUC_mrr_std value: 76.7559 - type: nAUC_mrr_diff1 value: 42.9429 - type: main_score value: 86.4489 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: ndcg_at_1 value: 59.333000000000006 - type: ndcg_at_3 value: 65.793 - type: ndcg_at_5 value: 69.429 - type: ndcg_at_10 value: 71.27 - type: ndcg_at_20 value: 72.929 - type: ndcg_at_100 value: 73.88900000000001 - type: ndcg_at_1000 value: 74.41 - type: map_at_1 value: 56.577999999999996 - type: map_at_3 value: 63.416 - type: map_at_5 value: 65.77 - type: map_at_10 value: 66.725 - type: map_at_20 value: 67.24799999999999 - type: map_at_100 value: 67.379 - type: map_at_1000 value: 67.4 - type: recall_at_1 value: 56.577999999999996 - type: recall_at_3 value: 70.072 - type: recall_at_5 value: 79.011 - type: recall_at_10 value: 84.2 - type: recall_at_20 value: 90.5 - type: recall_at_100 value: 95.667 - type: recall_at_1000 value: 99.667 - type: precision_at_1 value: 59.333000000000006 - type: precision_at_3 value: 25.556 - type: precision_at_5 value: 17.666999999999998 - type: precision_at_10 value: 9.6 - type: precision_at_20 value: 5.167 - type: precision_at_100 value: 1.087 - type: precision_at_1000 value: 0.11299999999999999 - type: mrr_at_1 value: 59.3333 - type: mrr_at_3 value: 64.9444 - type: mrr_at_5 value: 66.9278 - type: mrr_at_10 value: 67.5327 - type: mrr_at_20 value: 67.9354 - type: mrr_at_100 value: 68.0616 - type: mrr_at_1000 value: 68.08239999999999 - type: nauc_ndcg_at_1_max value: 62.536199999999994 - type: nauc_ndcg_at_1_std value: 4.3275 - type: nauc_ndcg_at_1_diff1 value: 78.2294 - type: nauc_ndcg_at_3_max value: 63.0626 - type: nauc_ndcg_at_3_std value: 6.0584 - type: nauc_ndcg_at_3_diff1 value: 74.4931 - type: nauc_ndcg_at_5_max value: 64.73989999999999 - type: nauc_ndcg_at_5_std value: 5.6514 - type: nauc_ndcg_at_5_diff1 value: 73.5498 - type: nauc_ndcg_at_10_max value: 65.43090000000001 - type: nauc_ndcg_at_10_std value: 9.1274 - type: nauc_ndcg_at_10_diff1 value: 72.4814 - type: nauc_ndcg_at_20_max value: 65.7156 - type: nauc_ndcg_at_20_std value: 9.9385 - type: nauc_ndcg_at_20_diff1 value: 73.0996 - type: nauc_ndcg_at_100_max value: 65.5687 - type: nauc_ndcg_at_100_std value: 8.818299999999999 - type: nauc_ndcg_at_100_diff1 value: 73.6361 - type: nauc_ndcg_at_1000_max value: 65.1956 - type: nauc_ndcg_at_1000_std value: 8.4772 - type: nauc_ndcg_at_1000_diff1 value: 74.0393 - type: nauc_map_at_1_max value: 58.2314 - type: nauc_map_at_1_std value: -2.7946 - type: nauc_map_at_1_diff1 value: 78.24940000000001 - type: nauc_map_at_3_max value: 61.364200000000004 - type: nauc_map_at_3_std value: 2.7072 - type: nauc_map_at_3_diff1 value: 75.4798 - type: nauc_map_at_5_max value: 63.1297 - type: nauc_map_at_5_std value: 3.9505 - type: nauc_map_at_5_diff1 value: 74.9693 - type: nauc_map_at_10_max value: 63.6643 - type: nauc_map_at_10_std value: 5.8328999999999995 - type: nauc_map_at_10_diff1 value: 74.5464 - type: nauc_map_at_20_max value: 63.8666 - type: nauc_map_at_20_std value: 6.1967 - type: nauc_map_at_20_diff1 value: 74.7224 - type: nauc_map_at_100_max value: 63.8254 - type: nauc_map_at_100_std value: 6.0627 - type: nauc_map_at_100_diff1 value: 74.791 - type: nauc_map_at_1000_max value: 63.811499999999995 - type: nauc_map_at_1000_std value: 6.0484 - type: nauc_map_at_1000_diff1 value: 74.807 - type: nauc_recall_at_1_max value: 58.2314 - type: nauc_recall_at_1_std value: -2.7946 - type: nauc_recall_at_1_diff1 value: 78.24940000000001 - type: nauc_recall_at_3_max value: 61.132299999999994 - type: nauc_recall_at_3_std value: 6.1988 - type: nauc_recall_at_3_diff1 value: 70.7273 - type: nauc_recall_at_5_max value: 66.542 - type: nauc_recall_at_5_std value: 5.7653 - type: nauc_recall_at_5_diff1 value: 66.4586 - type: nauc_recall_at_10_max value: 69.3605 - type: nauc_recall_at_10_std value: 19.6237 - type: nauc_recall_at_10_diff1 value: 60.2814 - type: nauc_recall_at_20_max value: 72.6154 - type: nauc_recall_at_20_std value: 31.3504 - type: nauc_recall_at_20_diff1 value: 58.8899 - type: nauc_recall_at_100_max value: 78.6002 - type: nauc_recall_at_100_std value: 26.484999999999996 - type: nauc_recall_at_100_diff1 value: 56.4605 - type: nauc_recall_at_1000_max value: 55.415499999999994 - type: nauc_recall_at_1000_std value: 72.2222 - type: nauc_recall_at_1000_diff1 value: 35.8077 - type: nauc_precision_at_1_max value: 62.536199999999994 - type: nauc_precision_at_1_std value: 4.3275 - type: nauc_precision_at_1_diff1 value: 78.2294 - type: nauc_precision_at_3_max value: 53.5524 - type: nauc_precision_at_3_std value: 23.5724 - type: nauc_precision_at_3_diff1 value: 47.5389 - type: nauc_precision_at_5_max value: 49.1594 - type: nauc_precision_at_5_std value: 32.3563 - type: nauc_precision_at_5_diff1 value: 28.2105 - type: nauc_precision_at_10_max value: 41.955799999999996 - type: nauc_precision_at_10_std value: 44.039699999999996 - type: nauc_precision_at_10_diff1 value: 12.0187 - type: nauc_precision_at_20_max value: 34.2442 - type: nauc_precision_at_20_std value: 50.204899999999995 - type: nauc_precision_at_20_diff1 value: -0.1954 - type: nauc_precision_at_100_max value: 26.8264 - type: nauc_precision_at_100_std value: 51.4247 - type: nauc_precision_at_100_diff1 value: -11.9827 - type: nauc_precision_at_1000_max value: 17.467 - type: nauc_precision_at_1000_std value: 56.435100000000006 - type: nauc_precision_at_1000_diff1 value: -24.2103 - type: nauc_mrr_at_1_max value: 62.536199999999994 - type: nauc_mrr_at_1_std value: 4.3275 - type: nauc_mrr_at_1_diff1 value: 78.2294 - type: nauc_mrr_at_3_max value: 64.5911 - type: nauc_mrr_at_3_std value: 7.8005 - type: nauc_mrr_at_3_diff1 value: 75.82140000000001 - type: nauc_mrr_at_5_max value: 65.1643 - type: nauc_mrr_at_5_std value: 7.258100000000001 - type: nauc_mrr_at_5_diff1 value: 75.2062 - type: nauc_mrr_at_10_max value: 65.3198 - type: nauc_mrr_at_10_std value: 8.2173 - type: nauc_mrr_at_10_diff1 value: 74.9449 - type: nauc_mrr_at_20_max value: 65.2169 - type: nauc_mrr_at_20_std value: 8.115400000000001 - type: nauc_mrr_at_20_diff1 value: 75.1765 - type: nauc_mrr_at_100_max value: 65.1744 - type: nauc_mrr_at_100_std value: 7.994700000000001 - type: nauc_mrr_at_100_diff1 value: 75.2388 - type: nauc_mrr_at_1000_max value: 65.1615 - type: nauc_mrr_at_1000_std value: 7.9817 - type: nauc_mrr_at_1000_diff1 value: 75.2553 - type: main_score value: 71.27 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: similarity_accuracy value: 99.7604 - type: similarity_accuracy_threshold value: 84.88210000000001 - type: similarity_f1 value: 87.86359999999999 - type: similarity_f1_threshold value: 84.88210000000001 - type: similarity_precision value: 88.1288 - type: similarity_recall value: 87.6 - type: similarity_ap value: 94.07140000000001 - type: cosine_accuracy value: 99.7604 - type: cosine_accuracy_threshold value: 84.88210000000001 - type: cosine_f1 value: 87.86359999999999 - type: cosine_f1_threshold value: 84.88210000000001 - type: cosine_precision value: 88.1288 - type: cosine_recall value: 87.6 - type: cosine_ap value: 94.07140000000001 - type: manhattan_accuracy value: 99.7644 - type: manhattan_accuracy_threshold value: 829.5789 - type: manhattan_f1 value: 87.92320000000001 - type: manhattan_f1_threshold value: 840.6424 - type: manhattan_precision value: 88.86619999999999 - type: manhattan_recall value: 87.0 - type: manhattan_ap value: 94.17 - type: euclidean_accuracy value: 99.7604 - type: euclidean_accuracy_threshold value: 54.986999999999995 - type: euclidean_f1 value: 87.86359999999999 - type: euclidean_f1_threshold value: 54.986999999999995 - type: euclidean_precision value: 88.1288 - type: euclidean_recall value: 87.6 - type: euclidean_ap value: 94.07140000000001 - type: dot_accuracy value: 99.7604 - type: dot_accuracy_threshold value: 84.88210000000001 - type: dot_f1 value: 87.86359999999999 - type: dot_f1_threshold value: 84.88210000000001 - type: dot_precision value: 88.1288 - type: dot_recall value: 87.6 - type: dot_ap value: 94.07140000000001 - type: max_accuracy value: 99.7644 - type: max_f1 value: 87.92320000000001 - type: max_precision value: 88.86619999999999 - type: max_recall value: 87.6 - type: max_ap value: 94.17 - type: main_score value: 94.17 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 64.6589 - type: v_measure_std value: 4.734 - type: main_score value: 64.6589 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.9388 - type: v_measure_std value: 1.6312 - type: main_score value: 32.9388 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.645399999999995 - type: mrr value: 53.5346 - type: nAUC_map_max value: 12.8874 - type: nAUC_map_std value: 9.2781 - type: nAUC_map_diff1 value: 39.864 - type: nAUC_mrr_max value: 13.278 - type: nAUC_mrr_std value: 9.501999999999999 - type: nAUC_mrr_diff1 value: 39.409499999999994 - type: main_score value: 52.645399999999995 - task: type: Retrieval dataset: name: MTEB StackOverflowQA (default) type: CoIR-Retrieval/stackoverflow-qa config: default split: test revision: db8f169f3894c14a00251061f957b2063eef2bd5 metrics: - type: ndcg_at_1 value: 74.97500000000001 - type: ndcg_at_3 value: 81.247 - type: ndcg_at_5 value: 82.921 - type: ndcg_at_10 value: 83.92699999999999 - type: ndcg_at_20 value: 84.57000000000001 - type: ndcg_at_100 value: 85.095 - type: ndcg_at_1000 value: 85.33800000000001 - type: map_at_1 value: 74.97500000000001 - type: map_at_3 value: 79.781 - type: map_at_5 value: 80.711 - type: map_at_10 value: 81.126 - type: map_at_20 value: 81.308 - type: map_at_100 value: 81.389 - type: map_at_1000 value: 81.39699999999999 - type: recall_at_1 value: 74.97500000000001 - type: recall_at_3 value: 85.456 - type: recall_at_5 value: 89.519 - type: recall_at_10 value: 92.628 - type: recall_at_20 value: 95.135 - type: recall_at_100 value: 97.844 - type: recall_at_1000 value: 99.799 - type: precision_at_1 value: 74.97500000000001 - type: precision_at_3 value: 28.485 - type: precision_at_5 value: 17.904 - type: precision_at_10 value: 9.263 - type: precision_at_20 value: 4.757 - type: precision_at_100 value: 0.9780000000000001 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 74.9749 - type: mrr_at_3 value: 79.781 - type: mrr_at_5 value: 80.7113 - type: mrr_at_10 value: 81.12610000000001 - type: mrr_at_20 value: 81.30760000000001 - type: mrr_at_100 value: 81.38889999999999 - type: mrr_at_1000 value: 81.3974 - type: nauc_ndcg_at_1_max value: 76.1721 - type: nauc_ndcg_at_1_std value: -5.5159 - type: nauc_ndcg_at_1_diff1 value: 84.6697 - type: nauc_ndcg_at_3_max value: 78.27629999999999 - type: nauc_ndcg_at_3_std value: -1.2 - type: nauc_ndcg_at_3_diff1 value: 81.1214 - type: nauc_ndcg_at_5_max value: 77.7687 - type: nauc_ndcg_at_5_std value: -1.8698 - type: nauc_ndcg_at_5_diff1 value: 80.9252 - type: nauc_ndcg_at_10_max value: 77.8029 - type: nauc_ndcg_at_10_std value: -1.5579 - type: nauc_ndcg_at_10_diff1 value: 81.1043 - type: nauc_ndcg_at_20_max value: 77.79310000000001 - type: nauc_ndcg_at_20_std value: -1.7669000000000001 - type: nauc_ndcg_at_20_diff1 value: 81.4121 - type: nauc_ndcg_at_100_max value: 77.7522 - type: nauc_ndcg_at_100_std value: -1.4502 - type: nauc_ndcg_at_100_diff1 value: 81.684 - type: nauc_ndcg_at_1000_max value: 77.6032 - type: nauc_ndcg_at_1000_std value: -2.0256 - type: nauc_ndcg_at_1000_diff1 value: 81.7641 - type: nauc_map_at_1_max value: 76.1721 - type: nauc_map_at_1_std value: -5.5159 - type: nauc_map_at_1_diff1 value: 84.6697 - type: nauc_map_at_3_max value: 77.6991 - type: nauc_map_at_3_std value: -2.3189 - type: nauc_map_at_3_diff1 value: 82.0708 - type: nauc_map_at_5_max value: 77.4286 - type: nauc_map_at_5_std value: -2.721 - type: nauc_map_at_5_diff1 value: 82.0265 - type: nauc_map_at_10_max value: 77.4212 - type: nauc_map_at_10_std value: -2.633 - type: nauc_map_at_10_diff1 value: 82.109 - type: nauc_map_at_20_max value: 77.4188 - type: nauc_map_at_20_std value: -2.6752000000000002 - type: nauc_map_at_20_diff1 value: 82.19340000000001 - type: nauc_map_at_100_max value: 77.4169 - type: nauc_map_at_100_std value: -2.6487 - type: nauc_map_at_100_diff1 value: 82.2353 - type: nauc_map_at_1000_max value: 77.413 - type: nauc_map_at_1000_std value: -2.6639 - type: nauc_map_at_1000_diff1 value: 82.238 - type: nauc_recall_at_1_max value: 76.1721 - type: nauc_recall_at_1_std value: -5.5159 - type: nauc_recall_at_1_diff1 value: 84.6697 - type: nauc_recall_at_3_max value: 80.4678 - type: nauc_recall_at_3_std value: 3.0113000000000003 - type: nauc_recall_at_3_diff1 value: 77.5303 - type: nauc_recall_at_5_max value: 79.2732 - type: nauc_recall_at_5_std value: 2.0842 - type: nauc_recall_at_5_diff1 value: 75.5155 - type: nauc_recall_at_10_max value: 80.2527 - type: nauc_recall_at_10_std value: 5.7078 - type: nauc_recall_at_10_diff1 value: 74.4861 - type: nauc_recall_at_20_max value: 81.29950000000001 - type: nauc_recall_at_20_std value: 6.5553 - type: nauc_recall_at_20_diff1 value: 74.5628 - type: nauc_recall_at_100_max value: 83.8742 - type: nauc_recall_at_100_std value: 28.4213 - type: nauc_recall_at_100_diff1 value: 74.4027 - type: nauc_recall_at_1000_max value: 60.9178 - type: nauc_recall_at_1000_std value: -2.6599 - type: nauc_recall_at_1000_diff1 value: 47.6074 - type: nauc_precision_at_1_max value: 76.1721 - type: nauc_precision_at_1_std value: -5.5159 - type: nauc_precision_at_1_diff1 value: 84.6697 - type: nauc_precision_at_3_max value: 80.4678 - type: nauc_precision_at_3_std value: 3.0113000000000003 - type: nauc_precision_at_3_diff1 value: 77.5303 - type: nauc_precision_at_5_max value: 79.2732 - type: nauc_precision_at_5_std value: 2.0842 - type: nauc_precision_at_5_diff1 value: 75.5155 - type: nauc_precision_at_10_max value: 80.2527 - type: nauc_precision_at_10_std value: 5.7078 - type: nauc_precision_at_10_diff1 value: 74.4861 - type: nauc_precision_at_20_max value: 81.29950000000001 - type: nauc_precision_at_20_std value: 6.5553 - type: nauc_precision_at_20_diff1 value: 74.5628 - type: nauc_precision_at_100_max value: 83.8742 - type: nauc_precision_at_100_std value: 28.4213 - type: nauc_precision_at_100_diff1 value: 74.4027 - type: nauc_precision_at_1000_max value: 60.9178 - type: nauc_precision_at_1000_std value: -2.6599 - type: nauc_precision_at_1000_diff1 value: 47.6074 - type: nauc_mrr_at_1_max value: 76.1721 - type: nauc_mrr_at_1_std value: -5.5159 - type: nauc_mrr_at_1_diff1 value: 84.6697 - type: nauc_mrr_at_3_max value: 77.6991 - type: nauc_mrr_at_3_std value: -2.3189 - type: nauc_mrr_at_3_diff1 value: 82.0708 - type: nauc_mrr_at_5_max value: 77.4286 - type: nauc_mrr_at_5_std value: -2.721 - type: nauc_mrr_at_5_diff1 value: 82.0265 - type: nauc_mrr_at_10_max value: 77.4212 - type: nauc_mrr_at_10_std value: -2.633 - type: nauc_mrr_at_10_diff1 value: 82.109 - type: nauc_mrr_at_20_max value: 77.4188 - type: nauc_mrr_at_20_std value: -2.6752000000000002 - type: nauc_mrr_at_20_diff1 value: 82.19340000000001 - type: nauc_mrr_at_100_max value: 77.4169 - type: nauc_mrr_at_100_std value: -2.6487 - type: nauc_mrr_at_100_diff1 value: 82.2353 - type: nauc_mrr_at_1000_max value: 77.413 - type: nauc_mrr_at_1000_std value: -2.6639 - type: nauc_mrr_at_1000_diff1 value: 82.238 - type: main_score value: 83.92699999999999 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: pearson value: 29.8395 - type: spearman value: 29.383 - type: cosine_spearman value: 29.383 - type: cosine_pearson value: 29.8395 - type: dot_spearman value: 29.383 - type: dot_pearson value: 29.8395 - type: main_score value: 29.383 - task: type: Retrieval dataset: name: MTEB SyntheticText2SQL (default) type: CoIR-Retrieval/synthetic-text2sql config: default split: test revision: 686b87296c3a0191b5d9415a00526c62db9fce09 metrics: - type: ndcg_at_1 value: 4.222 - type: ndcg_at_3 value: 38.329 - type: ndcg_at_5 value: 42.076 - type: ndcg_at_10 value: 44.775 - type: ndcg_at_20 value: 46.528999999999996 - type: ndcg_at_100 value: 48.554 - type: ndcg_at_1000 value: 49.143 - type: map_at_1 value: 4.222 - type: map_at_3 value: 30.676 - type: map_at_5 value: 32.76 - type: map_at_10 value: 33.898 - type: map_at_20 value: 34.386 - type: map_at_100 value: 34.677 - type: map_at_1000 value: 34.701 - type: recall_at_1 value: 4.222 - type: recall_at_3 value: 60.178 - type: recall_at_5 value: 69.253 - type: recall_at_10 value: 77.474 - type: recall_at_20 value: 84.36200000000001 - type: recall_at_100 value: 95.12899999999999 - type: recall_at_1000 value: 99.675 - type: precision_at_1 value: 4.222 - type: precision_at_3 value: 20.058999999999997 - type: precision_at_5 value: 13.850999999999999 - type: precision_at_10 value: 7.747 - type: precision_at_20 value: 4.218 - type: precision_at_100 value: 0.951 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 27.3287 - type: mrr_at_3 value: 43.8956 - type: mrr_at_5 value: 45.656 - type: mrr_at_10 value: 46.6697 - type: mrr_at_20 value: 47.1331 - type: mrr_at_100 value: 47.4153 - type: mrr_at_1000 value: 47.4391 - type: nauc_ndcg_at_1_max value: 16.045 - type: nauc_ndcg_at_1_std value: -8.7715 - type: nauc_ndcg_at_1_diff1 value: 48.4886 - type: nauc_ndcg_at_3_max value: 30.771500000000003 - type: nauc_ndcg_at_3_std value: -16.2537 - type: nauc_ndcg_at_3_diff1 value: -59.0158 - type: nauc_ndcg_at_5_max value: 30.354 - type: nauc_ndcg_at_5_std value: -16.576 - type: nauc_ndcg_at_5_diff1 value: -55.0555 - type: nauc_ndcg_at_10_max value: 30.0579 - type: nauc_ndcg_at_10_std value: -16.3765 - type: nauc_ndcg_at_10_diff1 value: -52.5829 - type: nauc_ndcg_at_20_max value: 29.8131 - type: nauc_ndcg_at_20_std value: -15.7493 - type: nauc_ndcg_at_20_diff1 value: -51.1605 - type: nauc_ndcg_at_100_max value: 29.9313 - type: nauc_ndcg_at_100_std value: -14.9786 - type: nauc_ndcg_at_100_diff1 value: -49.6997 - type: nauc_ndcg_at_1000_max value: 29.7154 - type: nauc_ndcg_at_1000_std value: -15.2567 - type: nauc_ndcg_at_1000_diff1 value: -49.660399999999996 - type: nauc_map_at_1_max value: 16.045 - type: nauc_map_at_1_std value: -8.7715 - type: nauc_map_at_1_diff1 value: 48.4886 - type: nauc_map_at_3_max value: 29.6122 - type: nauc_map_at_3_std value: -15.509500000000001 - type: nauc_map_at_3_diff1 value: -52.033300000000004 - type: nauc_map_at_5_max value: 29.3076 - type: nauc_map_at_5_std value: -15.7 - type: nauc_map_at_5_diff1 value: -49.1839 - type: nauc_map_at_10_max value: 29.1468 - type: nauc_map_at_10_std value: -15.564400000000001 - type: nauc_map_at_10_diff1 value: -47.7791 - type: nauc_map_at_20_max value: 29.0578 - type: nauc_map_at_20_std value: -15.3635 - type: nauc_map_at_20_diff1 value: -47.2635 - type: nauc_map_at_100_max value: 29.0523 - type: nauc_map_at_100_std value: -15.2602 - type: nauc_map_at_100_diff1 value: -46.9875 - type: nauc_map_at_1000_max value: 29.048299999999998 - type: nauc_map_at_1000_std value: -15.2626 - type: nauc_map_at_1000_diff1 value: -46.98 - type: nauc_recall_at_1_max value: 16.045 - type: nauc_recall_at_1_std value: -8.7715 - type: nauc_recall_at_1_diff1 value: 48.4886 - type: nauc_recall_at_3_max value: 32.8552 - type: nauc_recall_at_3_std value: -17.6374 - type: nauc_recall_at_3_diff1 value: -71.1273 - type: nauc_recall_at_5_max value: 32.378299999999996 - type: nauc_recall_at_5_std value: -18.411 - type: nauc_recall_at_5_diff1 value: -65.7517 - type: nauc_recall_at_10_max value: 32.041799999999995 - type: nauc_recall_at_10_std value: -18.4057 - type: nauc_recall_at_10_diff1 value: -62.019999999999996 - type: nauc_recall_at_20_max value: 31.663999999999998 - type: nauc_recall_at_20_std value: -16.352800000000002 - type: nauc_recall_at_20_diff1 value: -59.1186 - type: nauc_recall_at_100_max value: 37.872499999999995 - type: nauc_recall_at_100_std value: -4.3914 - type: nauc_recall_at_100_diff1 value: -51.8363 - type: nauc_recall_at_1000_max value: 59.5105 - type: nauc_recall_at_1000_std value: 23.3375 - type: nauc_recall_at_1000_diff1 value: -73.9075 - type: nauc_precision_at_1_max value: 16.045 - type: nauc_precision_at_1_std value: -8.7715 - type: nauc_precision_at_1_diff1 value: 48.4886 - type: nauc_precision_at_3_max value: 32.8552 - type: nauc_precision_at_3_std value: -17.6374 - type: nauc_precision_at_3_diff1 value: -71.1273 - type: nauc_precision_at_5_max value: 32.378299999999996 - type: nauc_precision_at_5_std value: -18.411 - type: nauc_precision_at_5_diff1 value: -65.7517 - type: nauc_precision_at_10_max value: 32.041799999999995 - type: nauc_precision_at_10_std value: -18.4057 - type: nauc_precision_at_10_diff1 value: -62.019999999999996 - type: nauc_precision_at_20_max value: 31.663999999999998 - type: nauc_precision_at_20_std value: -16.352800000000002 - type: nauc_precision_at_20_diff1 value: -59.1186 - type: nauc_precision_at_100_max value: 37.872499999999995 - type: nauc_precision_at_100_std value: -4.3914 - type: nauc_precision_at_100_diff1 value: -51.8363 - type: nauc_precision_at_1000_max value: 59.5105 - type: nauc_precision_at_1000_std value: 23.3375 - type: nauc_precision_at_1000_diff1 value: -73.9075 - type: nauc_mrr_at_1_max value: 15.1452 - type: nauc_mrr_at_1_std value: -9.760399999999999 - type: nauc_mrr_at_1_diff1 value: -39.2235 - type: nauc_mrr_at_3_max value: 23.6826 - type: nauc_mrr_at_3_std value: -13.300899999999999 - type: nauc_mrr_at_3_diff1 value: -55.17809999999999 - type: nauc_mrr_at_5_max value: 23.3754 - type: nauc_mrr_at_5_std value: -13.306299999999998 - type: nauc_mrr_at_5_diff1 value: -53.744499999999995 - type: nauc_mrr_at_10_max value: 23.0703 - type: nauc_mrr_at_10_std value: -13.1632 - type: nauc_mrr_at_10_diff1 value: -53.2374 - type: nauc_mrr_at_20_max value: 22.9496 - type: nauc_mrr_at_20_std value: -13.031 - type: nauc_mrr_at_20_diff1 value: -53.016 - type: nauc_mrr_at_100_max value: 22.9044 - type: nauc_mrr_at_100_std value: -12.9409 - type: nauc_mrr_at_100_diff1 value: -52.9092 - type: nauc_mrr_at_1000_max value: 22.897100000000002 - type: nauc_mrr_at_1000_std value: -12.940399999999999 - type: nauc_mrr_at_1000_diff1 value: -52.9095 - type: main_score value: 44.775 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: ndcg_at_1 value: 70.0 - type: ndcg_at_3 value: 68.704 - type: ndcg_at_5 value: 67.533 - type: ndcg_at_10 value: 63.098 - type: ndcg_at_20 value: 60.507999999999996 - type: ndcg_at_100 value: 49.847 - type: ndcg_at_1000 value: 48.394999999999996 - type: map_at_1 value: 0.211 - type: map_at_3 value: 0.555 - type: map_at_5 value: 0.873 - type: map_at_10 value: 1.526 - type: map_at_20 value: 2.731 - type: map_at_100 value: 8.863 - type: map_at_1000 value: 23.162 - type: recall_at_1 value: 0.211 - type: recall_at_3 value: 0.5930000000000001 - type: recall_at_5 value: 0.962 - type: recall_at_10 value: 1.748 - type: recall_at_20 value: 3.318 - type: recall_at_100 value: 12.447999999999999 - type: recall_at_1000 value: 46.794999999999995 - type: precision_at_1 value: 76.0 - type: precision_at_3 value: 72.667 - type: precision_at_5 value: 71.6 - type: precision_at_10 value: 66.0 - type: precision_at_20 value: 63.6 - type: precision_at_100 value: 51.339999999999996 - type: precision_at_1000 value: 21.68 - type: mrr_at_1 value: 76.0 - type: mrr_at_3 value: 84.0 - type: mrr_at_5 value: 84.39999999999999 - type: mrr_at_10 value: 84.85000000000001 - type: mrr_at_20 value: 84.85000000000001 - type: mrr_at_100 value: 84.85000000000001 - type: mrr_at_1000 value: 84.85000000000001 - type: nauc_ndcg_at_1_max value: 48.710300000000004 - type: nauc_ndcg_at_1_std value: 72.6125 - type: nauc_ndcg_at_1_diff1 value: -19.9816 - type: nauc_ndcg_at_3_max value: 44.8032 - type: nauc_ndcg_at_3_std value: 64.7227 - type: nauc_ndcg_at_3_diff1 value: -25.933899999999998 - type: nauc_ndcg_at_5_max value: 44.7004 - type: nauc_ndcg_at_5_std value: 65.05330000000001 - type: nauc_ndcg_at_5_diff1 value: -26.0531 - type: nauc_ndcg_at_10_max value: 49.5716 - type: nauc_ndcg_at_10_std value: 66.18730000000001 - type: nauc_ndcg_at_10_diff1 value: -22.3525 - type: nauc_ndcg_at_20_max value: 49.0212 - type: nauc_ndcg_at_20_std value: 71.2387 - type: nauc_ndcg_at_20_diff1 value: -21.6522 - type: nauc_ndcg_at_100_max value: 47.3029 - type: nauc_ndcg_at_100_std value: 82.31819999999999 - type: nauc_ndcg_at_100_diff1 value: -27.5265 - type: nauc_ndcg_at_1000_max value: 38.8474 - type: nauc_ndcg_at_1000_std value: 77.1578 - type: nauc_ndcg_at_1000_diff1 value: -29.350700000000003 - type: nauc_map_at_1_max value: 16.4698 - type: nauc_map_at_1_std value: 9.657300000000001 - type: nauc_map_at_1_diff1 value: -4.3484 - type: nauc_map_at_3_max value: 25.183299999999996 - type: nauc_map_at_3_std value: 16.8245 - type: nauc_map_at_3_diff1 value: -7.1254 - type: nauc_map_at_5_max value: 24.5899 - type: nauc_map_at_5_std value: 19.8027 - type: nauc_map_at_5_diff1 value: -9.8547 - type: nauc_map_at_10_max value: 34.9032 - type: nauc_map_at_10_std value: 26.435599999999997 - type: nauc_map_at_10_diff1 value: -8.833499999999999 - type: nauc_map_at_20_max value: 40.551700000000004 - type: nauc_map_at_20_std value: 34.6141 - type: nauc_map_at_20_diff1 value: -8.578199999999999 - type: nauc_map_at_100_max value: 51.403299999999994 - type: nauc_map_at_100_std value: 68.4083 - type: nauc_map_at_100_diff1 value: -17.7135 - type: nauc_map_at_1000_max value: 48.9955 - type: nauc_map_at_1000_std value: 82.9784 - type: nauc_map_at_1000_diff1 value: -26.473000000000003 - type: nauc_recall_at_1_max value: 16.4698 - type: nauc_recall_at_1_std value: 9.657300000000001 - type: nauc_recall_at_1_diff1 value: -4.3484 - type: nauc_recall_at_3_max value: 21.4136 - type: nauc_recall_at_3_std value: 11.4801 - type: nauc_recall_at_3_diff1 value: -7.1396 - type: nauc_recall_at_5_max value: 18.0314 - type: nauc_recall_at_5_std value: 12.7486 - type: nauc_recall_at_5_diff1 value: -9.7349 - type: nauc_recall_at_10_max value: 27.8032 - type: nauc_recall_at_10_std value: 18.7061 - type: nauc_recall_at_10_diff1 value: -9.2739 - type: nauc_recall_at_20_max value: 30.878299999999996 - type: nauc_recall_at_20_std value: 26.0295 - type: nauc_recall_at_20_diff1 value: -7.8001000000000005 - type: nauc_recall_at_100_max value: 39.4065 - type: nauc_recall_at_100_std value: 56.112399999999994 - type: nauc_recall_at_100_diff1 value: -17.8753 - type: nauc_recall_at_1000_max value: 31.571199999999997 - type: nauc_recall_at_1000_std value: 65.3181 - type: nauc_recall_at_1000_diff1 value: -26.398899999999998 - type: nauc_precision_at_1_max value: 59.8382 - type: nauc_precision_at_1_std value: 66.9075 - type: nauc_precision_at_1_diff1 value: -5.1873000000000005 - type: nauc_precision_at_3_max value: 55.787600000000005 - type: nauc_precision_at_3_std value: 64.1127 - type: nauc_precision_at_3_diff1 value: -24.3791 - type: nauc_precision_at_5_max value: 50.0544 - type: nauc_precision_at_5_std value: 61.812599999999996 - type: nauc_precision_at_5_diff1 value: -24.5456 - type: nauc_precision_at_10_max value: 57.4695 - type: nauc_precision_at_10_std value: 63.7448 - type: nauc_precision_at_10_diff1 value: -22.6982 - type: nauc_precision_at_20_max value: 57.3052 - type: nauc_precision_at_20_std value: 72.00619999999999 - type: nauc_precision_at_20_diff1 value: -18.2329 - type: nauc_precision_at_100_max value: 50.0873 - type: nauc_precision_at_100_std value: 84.9689 - type: nauc_precision_at_100_diff1 value: -27.625300000000003 - type: nauc_precision_at_1000_max value: 29.3103 - type: nauc_precision_at_1000_std value: 57.898700000000005 - type: nauc_precision_at_1000_diff1 value: -28.8765 - type: nauc_mrr_at_1_max value: 59.8382 - type: nauc_mrr_at_1_std value: 66.9075 - type: nauc_mrr_at_1_diff1 value: -5.1873000000000005 - type: nauc_mrr_at_3_max value: 58.4682 - type: nauc_mrr_at_3_std value: 64.6751 - type: nauc_mrr_at_3_diff1 value: -5.9737 - type: nauc_mrr_at_5_max value: 59.099999999999994 - type: nauc_mrr_at_5_std value: 63.6902 - type: nauc_mrr_at_5_diff1 value: -6.482499999999999 - type: nauc_mrr_at_10_max value: 57.9638 - type: nauc_mrr_at_10_std value: 63.716300000000004 - type: nauc_mrr_at_10_diff1 value: -5.6598999999999995 - type: nauc_mrr_at_20_max value: 57.9638 - type: nauc_mrr_at_20_std value: 63.716300000000004 - type: nauc_mrr_at_20_diff1 value: -5.6598999999999995 - type: nauc_mrr_at_100_max value: 57.9638 - type: nauc_mrr_at_100_std value: 63.716300000000004 - type: nauc_mrr_at_100_diff1 value: -5.6598999999999995 - type: nauc_mrr_at_1000_max value: 57.9638 - type: nauc_mrr_at_1000_std value: 63.716300000000004 - type: nauc_mrr_at_1000_diff1 value: -5.6598999999999995 - type: main_score value: 63.098 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: ndcg_at_1 value: 23.469 - type: ndcg_at_3 value: 25.522 - type: ndcg_at_5 value: 24.333 - type: ndcg_at_10 value: 24.029 - type: ndcg_at_20 value: 24.573 - type: ndcg_at_100 value: 34.425 - type: ndcg_at_1000 value: 46.907 - type: map_at_1 value: 1.976 - type: map_at_3 value: 4.589 - type: map_at_5 value: 6.555999999999999 - type: map_at_10 value: 9.687999999999999 - type: map_at_20 value: 11.926 - type: map_at_100 value: 15.116999999999999 - type: map_at_1000 value: 16.769000000000002 - type: recall_at_1 value: 1.976 - type: recall_at_3 value: 6.101 - type: recall_at_5 value: 9.68 - type: recall_at_10 value: 16.633 - type: recall_at_20 value: 23.589 - type: recall_at_100 value: 45.61 - type: recall_at_1000 value: 82.48100000000001 - type: precision_at_1 value: 26.531 - type: precision_at_3 value: 27.891 - type: precision_at_5 value: 25.714 - type: precision_at_10 value: 22.448999999999998 - type: precision_at_20 value: 16.837 - type: precision_at_100 value: 7.122000000000001 - type: precision_at_1000 value: 1.5270000000000001 - type: mrr_at_1 value: 26.5306 - type: mrr_at_3 value: 39.1156 - type: mrr_at_5 value: 41.1565 - type: mrr_at_10 value: 43.863 - type: mrr_at_20 value: 44.5963 - type: mrr_at_100 value: 44.766600000000004 - type: mrr_at_1000 value: 44.766600000000004 - type: nauc_ndcg_at_1_max value: -31.661099999999998 - type: nauc_ndcg_at_1_std value: 2.8871 - type: nauc_ndcg_at_1_diff1 value: 3.4787 - type: nauc_ndcg_at_3_max value: -34.6673 - type: nauc_ndcg_at_3_std value: -3.8882 - type: nauc_ndcg_at_3_diff1 value: 0.6512 - type: nauc_ndcg_at_5_max value: -33.815 - type: nauc_ndcg_at_5_std value: 0.20209999999999997 - type: nauc_ndcg_at_5_diff1 value: -6.4072000000000005 - type: nauc_ndcg_at_10_max value: -26.9953 - type: nauc_ndcg_at_10_std value: -3.6511 - type: nauc_ndcg_at_10_diff1 value: -3.8763 - type: nauc_ndcg_at_20_max value: -30.218600000000002 - type: nauc_ndcg_at_20_std value: -1.4384 - type: nauc_ndcg_at_20_diff1 value: -8.5927 - type: nauc_ndcg_at_100_max value: -32.1409 - type: nauc_ndcg_at_100_std value: 20.1662 - type: nauc_ndcg_at_100_diff1 value: -12.0591 - type: nauc_ndcg_at_1000_max value: -31.6892 - type: nauc_ndcg_at_1000_std value: 32.1464 - type: nauc_ndcg_at_1000_diff1 value: -8.3651 - type: nauc_map_at_1_max value: -41.9612 - type: nauc_map_at_1_std value: -11.0332 - type: nauc_map_at_1_diff1 value: -5.2508 - type: nauc_map_at_3_max value: -30.4968 - type: nauc_map_at_3_std value: -11.138 - type: nauc_map_at_3_diff1 value: -0.8447 - type: nauc_map_at_5_max value: -24.7543 - type: nauc_map_at_5_std value: -10.302 - type: nauc_map_at_5_diff1 value: -10.0762 - type: nauc_map_at_10_max value: -20.420099999999998 - type: nauc_map_at_10_std value: -10.485 - type: nauc_map_at_10_diff1 value: -10.3134 - type: nauc_map_at_20_max value: -20.8606 - type: nauc_map_at_20_std value: -6.3984 - type: nauc_map_at_20_diff1 value: -10.8605 - type: nauc_map_at_100_max value: -22.6385 - type: nauc_map_at_100_std value: 3.8738 - type: nauc_map_at_100_diff1 value: -12.9055 - type: nauc_map_at_1000_max value: -23.0823 - type: nauc_map_at_1000_std value: 8.6942 - type: nauc_map_at_1000_diff1 value: -13.1715 - type: nauc_recall_at_1_max value: -41.9612 - type: nauc_recall_at_1_std value: -11.0332 - type: nauc_recall_at_1_diff1 value: -5.2508 - type: nauc_recall_at_3_max value: -25.9715 - type: nauc_recall_at_3_std value: -14.9623 - type: nauc_recall_at_3_diff1 value: -4.2583 - type: nauc_recall_at_5_max value: -24.5848 - type: nauc_recall_at_5_std value: -14.258299999999998 - type: nauc_recall_at_5_diff1 value: -13.1162 - type: nauc_recall_at_10_max value: -22.3834 - type: nauc_recall_at_10_std value: -15.274199999999999 - type: nauc_recall_at_10_diff1 value: -10.8836 - type: nauc_recall_at_20_max value: -22.8634 - type: nauc_recall_at_20_std value: -4.8215 - type: nauc_recall_at_20_diff1 value: -11.1747 - type: nauc_recall_at_100_max value: -25.9537 - type: nauc_recall_at_100_std value: 29.75 - type: nauc_recall_at_100_diff1 value: -15.512799999999999 - type: nauc_recall_at_1000_max value: -18.9449 - type: nauc_recall_at_1000_std value: 69.619 - type: nauc_recall_at_1000_diff1 value: -5.629300000000001 - type: nauc_precision_at_1_max value: -33.7627 - type: nauc_precision_at_1_std value: 1.8065000000000002 - type: nauc_precision_at_1_diff1 value: 5.3592 - type: nauc_precision_at_3_max value: -30.7992 - type: nauc_precision_at_3_std value: -6.285399999999999 - type: nauc_precision_at_3_diff1 value: 1.1098000000000001 - type: nauc_precision_at_5_max value: -27.8949 - type: nauc_precision_at_5_std value: -1.8754 - type: nauc_precision_at_5_diff1 value: -8.0528 - type: nauc_precision_at_10_max value: -19.659299999999998 - type: nauc_precision_at_10_std value: -0.9809999999999999 - type: nauc_precision_at_10_diff1 value: -2.0972999999999997 - type: nauc_precision_at_20_max value: -25.810899999999997 - type: nauc_precision_at_20_std value: 19.5577 - type: nauc_precision_at_20_diff1 value: -8.879199999999999 - type: nauc_precision_at_100_max value: -21.1488 - type: nauc_precision_at_100_std value: 65.00200000000001 - type: nauc_precision_at_100_diff1 value: -11.740499999999999 - type: nauc_precision_at_1000_max value: 20.7392 - type: nauc_precision_at_1000_std value: 38.2851 - type: nauc_precision_at_1000_diff1 value: 17.4954 - type: nauc_mrr_at_1_max value: -33.7627 - type: nauc_mrr_at_1_std value: 1.8065000000000002 - type: nauc_mrr_at_1_diff1 value: 5.3592 - type: nauc_mrr_at_3_max value: -39.837 - type: nauc_mrr_at_3_std value: -5.3861 - type: nauc_mrr_at_3_diff1 value: -4.1776 - type: nauc_mrr_at_5_max value: -39.756099999999996 - type: nauc_mrr_at_5_std value: -5.3674 - type: nauc_mrr_at_5_diff1 value: -2.4693 - type: nauc_mrr_at_10_max value: -37.7379 - type: nauc_mrr_at_10_std value: -6.2844 - type: nauc_mrr_at_10_diff1 value: -0.6525000000000001 - type: nauc_mrr_at_20_max value: -38.4522 - type: nauc_mrr_at_20_std value: -5.0927 - type: nauc_mrr_at_20_diff1 value: -0.2814 - type: nauc_mrr_at_100_max value: -38.1599 - type: nauc_mrr_at_100_std value: -5.2147 - type: nauc_mrr_at_100_diff1 value: -0.7001000000000001 - type: nauc_mrr_at_1000_max value: -38.1599 - type: nauc_mrr_at_1000_std value: -5.2147 - type: nauc_mrr_at_1000_diff1 value: -0.7001000000000001 - type: main_score value: 24.029 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 62.9395 - type: f1 value: 47.7133 - type: f1_weighted value: 71.0525 - type: ap value: 10.306600000000001 - type: ap_weighted value: 10.306600000000001 - type: main_score value: 62.9395 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 52.8721 - type: f1 value: 53.034800000000004 - type: f1_weighted value: 52.4319 - type: main_score value: 52.8721 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 44.9227 - type: v_measure_std value: 1.1638000000000002 - type: main_score value: 44.9227 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: similarity_accuracy value: 82.04090000000001 - type: similarity_accuracy_threshold value: 86.6147 - type: similarity_f1 value: 57.258399999999995 - type: similarity_f1_threshold value: 82.9233 - type: similarity_precision value: 52.1456 - type: similarity_recall value: 63.4828 - type: similarity_ap value: 60.0317 - type: cosine_accuracy value: 82.04090000000001 - type: cosine_accuracy_threshold value: 86.6147 - type: cosine_f1 value: 57.258399999999995 - type: cosine_f1_threshold value: 82.9233 - type: cosine_precision value: 52.1456 - type: cosine_recall value: 63.4828 - type: cosine_ap value: 60.0317 - type: manhattan_accuracy value: 81.9574 - type: manhattan_accuracy_threshold value: 794.4433 - type: manhattan_f1 value: 57.1936 - type: manhattan_f1_threshold value: 898.9445 - type: manhattan_precision value: 51.91480000000001 - type: manhattan_recall value: 63.6675 - type: manhattan_ap value: 59.9255 - type: euclidean_accuracy value: 82.04090000000001 - type: euclidean_accuracy_threshold value: 51.7403 - type: euclidean_f1 value: 57.258399999999995 - type: euclidean_f1_threshold value: 58.440999999999995 - type: euclidean_precision value: 52.1456 - type: euclidean_recall value: 63.4828 - type: euclidean_ap value: 60.0317 - type: dot_accuracy value: 82.04090000000001 - type: dot_accuracy_threshold value: 86.6147 - type: dot_f1 value: 57.258399999999995 - type: dot_f1_threshold value: 82.9233 - type: dot_precision value: 52.1456 - type: dot_recall value: 63.4828 - type: dot_ap value: 60.0317 - type: max_accuracy value: 82.04090000000001 - type: max_f1 value: 57.258399999999995 - type: max_precision value: 52.1456 - type: max_recall value: 63.6675 - type: max_ap value: 60.0317 - type: main_score value: 60.0317 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: similarity_accuracy value: 87.3035 - type: similarity_accuracy_threshold value: 85.4123 - type: similarity_f1 value: 74.5555 - type: similarity_f1_threshold value: 83.7581 - type: similarity_precision value: 72.55369999999999 - type: similarity_recall value: 76.6708 - type: similarity_ap value: 82.42930000000001 - type: cosine_accuracy value: 87.3035 - type: cosine_accuracy_threshold value: 85.4123 - type: cosine_f1 value: 74.5555 - type: cosine_f1_threshold value: 83.7581 - type: cosine_precision value: 72.55369999999999 - type: cosine_recall value: 76.6708 - type: cosine_ap value: 82.42930000000001 - type: manhattan_accuracy value: 87.3249 - type: manhattan_accuracy_threshold value: 831.9304999999999 - type: manhattan_f1 value: 74.8665 - type: manhattan_f1_threshold value: 893.9980999999999 - type: manhattan_precision value: 70.8502 - type: manhattan_recall value: 79.3656 - type: manhattan_ap value: 82.5792 - type: euclidean_accuracy value: 87.3035 - type: euclidean_accuracy_threshold value: 54.014300000000006 - type: euclidean_f1 value: 74.5555 - type: euclidean_f1_threshold value: 56.9946 - type: euclidean_precision value: 72.55369999999999 - type: euclidean_recall value: 76.6708 - type: euclidean_ap value: 82.42920000000001 - type: dot_accuracy value: 87.3035 - type: dot_accuracy_threshold value: 85.4123 - type: dot_f1 value: 74.5555 - type: dot_f1_threshold value: 83.7581 - type: dot_precision value: 72.55369999999999 - type: dot_recall value: 76.6708 - type: dot_ap value: 82.42920000000001 - type: max_accuracy value: 87.3249 - type: max_f1 value: 74.8665 - type: max_precision value: 72.55369999999999 - type: max_recall value: 79.3656 - type: max_ap value: 82.5792 - type: main_score value: 82.5792 --- # Granite-Embedding-30m-English **Model Summary:** Granite-Embedding-30m-English is a 30M parameter dense biencoder embedding model from the Granite Embeddings suite that can be used to generate high quality text embeddings. This model produces embedding vectors of size 384 and is trained using a combination of open source relevance-pair datasets with permissive, enterprise-friendly license, and IBM collected and generated datasets. While maintaining competitive scores on academic benchmarks such as BEIR, this model also performs well on many enterprise use cases. This model is developed using retrieval oriented pretraining, contrastive finetuning, knowledge distillation and model merging for improved performance. - **Developers:** Granite Embedding Team, IBM - **GitHub Repository:** [ibm-granite/granite-embedding-models](https://github.com/ibm-granite/granite-embedding-models) - **Website**: [Granite Docs](https://www.ibm.com/granite/docs/) - **Paper:** Coming Soon - **Release Date**: December 18th, 2024 - **License:** [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) **Supported Languages:** English. **Intended use:** The model is designed to produce fixed length vector representations for a given text, which can be used for text similarity, retrieval, and search applications. **Usage with Sentence Transformers:** The model is compatible with SentenceTransformer library and is very easy to use: First, install the sentence transformers library ```shell pip install sentence_transformers ``` The model can then be used to encode pairs of text and find the similarity between their representations ```python from sentence_transformers import SentenceTransformer, util model_path = "ibm-granite/granite-embedding-30m-english" # Load the Sentence Transformer model model = SentenceTransformer(model_path) input_queries = [ ' Who made the song My achy breaky heart? ', 'summit define' ] input_passages = [ "Achy Breaky Heart is a country song written by Don Von Tress. Originally titled Don't Tell My Heart and performed by The Marcy Brothers in 1991. ", "Definition of summit for English Language Learners. : 1 the highest point of a mountain : the top of a mountain. : 2 the highest level. : 3 a meeting or series of meetings between the leaders of two or more governments." ] # encode queries and passages query_embeddings = model.encode(input_queries) passage_embeddings = model.encode(input_passages) # calculate cosine similarity print(util.cos_sim(query_embeddings, passage_embeddings)) ``` **Usage with Huggingface Transformers:** This is a simple example of how to use the Granite-Embedding-30m-English model with the Transformers library and PyTorch. First, install the required libraries ```shell pip install transformers torch ``` The model can then be used to encode pairs of text ```python import torch from transformers import AutoModel, AutoTokenizer model_path = "ibm-granite/granite-embedding-30m-english" # Load the model and tokenizer model = AutoModel.from_pretrained(model_path) tokenizer = AutoTokenizer.from_pretrained(model_path) model.eval() input_queries = [ ' Who made the song My achy breaky heart? ', 'summit define' ] # tokenize inputs tokenized_queries = tokenizer(input_queries, padding=True, truncation=True, return_tensors='pt') # encode queries with torch.no_grad(): # Queries model_output = model(**tokenized_queries) # Perform pooling. granite-embedding-30m-english uses CLS Pooling query_embeddings = model_output[0][:, 0] # normalize the embeddings query_embeddings = torch.nn.functional.normalize(query_embeddings, dim=1) ``` **Evaluation:** Granite-Embedding-30M-English is twice as fast as other models with similar embedding dimensions, while maintaining competitive performance. The performance of the Granite-Embedding-30M-English model on MTEB Retrieval (i.e., BEIR) and code retrieval (CoIR) benchmarks is reported below. | Model | Paramters (M)| Embedding Dimension | MTEB Retrieval (15) | CoIR (10) | |---------------------------------|:------------:|:-------------------:|:-------------------: |:----------:| |granite-embedding-30m-english |30 |384 |49.1 |47.0 | **Model Architecture:** Granite-Embedding-30m-English is based on an encoder-only RoBERTa like transformer architecture, trained internally at IBM Research. | Model | granite-embedding-30m-english | granite-embedding-125m-english | granite-embedding-107m-multilingual | granite-embedding-278m-multilingual | | :--------- | :-------:| :--------: | :-----:| :-----:| | Embedding size | **384** | 768 | 384 | 768 | | Number of layers | **6** | 12 | 6 | 12 | | Number of attention heads | **12** | 12 | 12 | 12 | | Intermediate size | **1536** | 3072 | 1536 | 3072 | | Activation Function | **GeLU** | GeLU | GeLU | GeLU | | Vocabulary Size | **50265**| 50265 | 250002 | 250002 | | Max. Sequence Length | **512** | 512 | 512 | 512 | | # Parameters | **30M** | 125M | 107M | 278M | **Training Data:** Overall, the training data consists of four key sources: (1) unsupervised title-body paired data scraped from the web, (2) publicly available paired with permissive, enterprise-friendly license, (3) IBM-internal paired data targetting specific technical domains, and (4) IBM-generated synthetic data. The data is listed below: | **Dataset** | **Num. Pairs** | |----------------------------------------------------|:---------------:| | SPECTER citation triplets | 684,100 | | Stack Exchange Duplicate questions (titles) | 304,525 | | Stack Exchange Duplicate questions (bodies) | 250,519 | | Stack Exchange Duplicate questions (titles+bodies) | 250,460 | | Natural Questions (NQ) | 100,231 | | SQuAD2.0 | 87,599 | | PAQ (Question, Answer) pairs | 64,371,441 | | Stack Exchange (Title, Answer) pairs | 4,067,139 | | Stack Exchange (Title, Body) pairs | 23,978,013 | | Stack Exchange (Title+Body, Answer) pairs | 187,195 | | S2ORC Citation pairs (Titles) | 52,603,982 | | S2ORC (Title, Abstract) | 41,769,185 | | S2ORC (Citations, abstracts) | 52,603,982 | | WikiAnswers Duplicate question pairs | 77,427,422 | | SearchQA | 582,261 | | HotpotQA | 85,000 | | Fever | 109,810 | | Arxiv | 2,358,545 | | Wikipedia | 20,745,403 | | PubMed | 20,000,000 | | Miracl En Pairs | 9,016 | | DBPedia Title-Body Pairs | 4,635,922 | | Synthetic: Query-Wikipedia Passage | 1,879,093 | | Synthetic: Fact Verification | 9,888 | | IBM Internal Triples | 40,290 | | IBM Internal Title-Body Pairs | 1,524,586 | Notably, we do not use the popular MS-MARCO retrieval dataset in our training corpus due to its non-commercial license, while other open-source models train on this dataset due to its high quality. **Infrastructure:** We train Granite Embedding Models using IBM's computing cluster, Cognitive Compute Cluster, which is outfitted with NVIDIA A100 80gb GPUs. This cluster provides a scalable and efficient infrastructure for training our models over multiple GPUs. **Ethical Considerations and Limitations:** The data used to train the base language model was filtered to remove text containing hate, abuse, and profanity. Granite-Embedding-30m-English is trained only for English texts, and has a context length of 512 tokens (longer texts will be truncated to this size). **Resources** - ⭐️ Learn about the latest updates with Granite: https://www.ibm.com/granite - 📄 Get started with tutorials, best practices, and prompt engineering advice: https://www.ibm.com/granite/docs/ - 💡 Learn about the latest Granite learning resources: https://ibm.biz/granite-learning-resources <!-- ## Citation ``` @misc{granite-embedding-models, author = {author 1, author2, ...}, title = {}, journal = {}, volume = {}, year = {2024}, url = {https://arxiv.org/abs/0000.00000}, } ``` -->
[ "BIOSSES", "SCIFACT" ]
tasksource/deberta-small-long-nli
tasksource
zero-shot-classification
[ "transformers", "pytorch", "safetensors", "deberta-v2", "text-classification", "deberta-v3-small", "deberta-v3", "deberta", "nli", "natural-language-inference", "multitask", "multi-task", "pipeline", "extreme-multi-task", "extreme-mtl", "tasksource", "zero-shot", "rlhf", "zero-shot-classification", "en", "dataset:nyu-mll/glue", "dataset:aps/super_glue", "dataset:facebook/anli", "dataset:tasksource/babi_nli", "dataset:sick", "dataset:snli", "dataset:scitail", "dataset:hans", "dataset:alisawuffles/WANLI", "dataset:tasksource/recast", "dataset:sileod/probability_words_nli", "dataset:joey234/nan-nli", "dataset:pietrolesci/nli_fever", "dataset:pietrolesci/breaking_nli", "dataset:pietrolesci/conj_nli", "dataset:pietrolesci/fracas", "dataset:pietrolesci/dialogue_nli", "dataset:pietrolesci/mpe", "dataset:pietrolesci/dnc", "dataset:pietrolesci/recast_white", "dataset:pietrolesci/joci", "dataset:pietrolesci/robust_nli", "dataset:pietrolesci/robust_nli_is_sd", "dataset:pietrolesci/robust_nli_li_ts", "dataset:pietrolesci/gen_debiased_nli", "dataset:pietrolesci/add_one_rte", "dataset:tasksource/imppres", "dataset:hlgd", "dataset:paws", "dataset:medical_questions_pairs", "dataset:Anthropic/model-written-evals", "dataset:truthful_qa", "dataset:nightingal3/fig-qa", "dataset:tasksource/bigbench", "dataset:blimp", "dataset:cos_e", "dataset:cosmos_qa", "dataset:dream", "dataset:openbookqa", "dataset:qasc", "dataset:quartz", "dataset:quail", "dataset:head_qa", "dataset:sciq", "dataset:social_i_qa", "dataset:wiki_hop", "dataset:wiqa", "dataset:piqa", "dataset:hellaswag", "dataset:pkavumba/balanced-copa", "dataset:12ml/e-CARE", "dataset:art", "dataset:winogrande", "dataset:codah", "dataset:ai2_arc", "dataset:definite_pronoun_resolution", "dataset:swag", "dataset:math_qa", "dataset:metaeval/utilitarianism", "dataset:mteb/amazon_counterfactual", "dataset:SetFit/insincere-questions", "dataset:SetFit/toxic_conversations", "dataset:turingbench/TuringBench", "dataset:trec", "dataset:tals/vitaminc", "dataset:hope_edi", "dataset:strombergnlp/rumoureval_2019", "dataset:ethos", "dataset:tweet_eval", "dataset:discovery", "dataset:pragmeval", "dataset:silicone", "dataset:lex_glue", "dataset:papluca/language-identification", "dataset:imdb", "dataset:rotten_tomatoes", "dataset:ag_news", "dataset:yelp_review_full", "dataset:financial_phrasebank", "dataset:poem_sentiment", "dataset:dbpedia_14", "dataset:amazon_polarity", "dataset:app_reviews", "dataset:hate_speech18", "dataset:sms_spam", "dataset:humicroedit", "dataset:snips_built_in_intents", "dataset:hate_speech_offensive", "dataset:yahoo_answers_topics", "dataset:pacovaldez/stackoverflow-questions", "dataset:zapsdcn/hyperpartisan_news", "dataset:zapsdcn/sciie", "dataset:zapsdcn/citation_intent", "dataset:go_emotions", "dataset:allenai/scicite", "dataset:liar", "dataset:relbert/lexical_relation_classification", "dataset:tasksource/linguisticprobing", "dataset:tasksource/crowdflower", "dataset:metaeval/ethics", "dataset:emo", "dataset:google_wellformed_query", "dataset:tweets_hate_speech_detection", "dataset:has_part", "dataset:blog_authorship_corpus", "dataset:launch/open_question_type", "dataset:health_fact", "dataset:commonsense_qa", "dataset:mc_taco", "dataset:ade_corpus_v2", "dataset:prajjwal1/discosense", "dataset:circa", "dataset:PiC/phrase_similarity", "dataset:copenlu/scientific-exaggeration-detection", "dataset:quarel", "dataset:mwong/fever-evidence-related", "dataset:numer_sense", "dataset:dynabench/dynasent", "dataset:raquiba/Sarcasm_News_Headline", "dataset:sem_eval_2010_task_8", "dataset:demo-org/auditor_review", "dataset:medmcqa", "dataset:RuyuanWan/Dynasent_Disagreement", "dataset:RuyuanWan/Politeness_Disagreement", "dataset:RuyuanWan/SBIC_Disagreement", "dataset:RuyuanWan/SChem_Disagreement", "dataset:RuyuanWan/Dilemmas_Disagreement", "dataset:lucasmccabe/logiqa", "dataset:wiki_qa", "dataset:tasksource/cycic_classification", "dataset:tasksource/cycic_multiplechoice", "dataset:tasksource/sts-companion", "dataset:tasksource/commonsense_qa_2.0", "dataset:tasksource/lingnli", "dataset:tasksource/monotonicity-entailment", "dataset:tasksource/arct", "dataset:tasksource/scinli", "dataset:tasksource/naturallogic", "dataset:onestop_qa", "dataset:demelin/moral_stories", "dataset:corypaik/prost", "dataset:aps/dynahate", "dataset:metaeval/syntactic-augmentation-nli", "dataset:tasksource/autotnli", "dataset:lasha-nlp/CONDAQA", "dataset:openai/webgpt_comparisons", "dataset:Dahoas/synthetic-instruct-gptj-pairwise", "dataset:metaeval/scruples", "dataset:metaeval/wouldyourather", "dataset:metaeval/defeasible-nli", "dataset:tasksource/help-nli", "dataset:metaeval/nli-veridicality-transitivity", "dataset:tasksource/lonli", "dataset:tasksource/dadc-limit-nli", "dataset:ColumbiaNLP/FLUTE", "dataset:tasksource/strategy-qa", "dataset:openai/summarize_from_feedback", "dataset:tasksource/folio", "dataset:yale-nlp/FOLIO", "dataset:tasksource/tomi-nli", "dataset:tasksource/avicenna", "dataset:stanfordnlp/SHP", "dataset:GBaker/MedQA-USMLE-4-options-hf", "dataset:sileod/wikimedqa", "dataset:declare-lab/cicero", "dataset:amydeng2000/CREAK", "dataset:tasksource/mutual", "dataset:inverse-scaling/NeQA", "dataset:inverse-scaling/quote-repetition", "dataset:inverse-scaling/redefine-math", "dataset:tasksource/puzzte", "dataset:tasksource/implicatures", "dataset:race", "dataset:tasksource/race-c", "dataset:tasksource/spartqa-yn", "dataset:tasksource/spartqa-mchoice", "dataset:tasksource/temporal-nli", "dataset:riddle_sense", "dataset:tasksource/clcd-english", "dataset:maximedb/twentyquestions", "dataset:metaeval/reclor", "dataset:tasksource/counterfactually-augmented-imdb", "dataset:tasksource/counterfactually-augmented-snli", "dataset:metaeval/cnli", "dataset:tasksource/boolq-natural-perturbations", "dataset:metaeval/acceptability-prediction", "dataset:metaeval/equate", "dataset:tasksource/ScienceQA_text_only", "dataset:Jiangjie/ekar_english", "dataset:tasksource/implicit-hate-stg1", "dataset:metaeval/chaos-mnli-ambiguity", "dataset:IlyaGusev/headline_cause", "dataset:tasksource/logiqa-2.0-nli", "dataset:tasksource/oasst2_dense_flat", "dataset:sileod/mindgames", "dataset:metaeval/ambient", "dataset:metaeval/path-naturalness-prediction", "dataset:civil_comments", "dataset:AndyChiang/cloth", "dataset:AndyChiang/dgen", "dataset:tasksource/I2D2", "dataset:webis/args_me", "dataset:webis/Touche23-ValueEval", "dataset:tasksource/starcon", "dataset:PolyAI/banking77", "dataset:tasksource/ConTRoL-nli", "dataset:tasksource/tracie", "dataset:tasksource/sherliic", "dataset:tasksource/sen-making", "dataset:tasksource/winowhy", "dataset:tasksource/robustLR", "dataset:CLUTRR/v1", "dataset:tasksource/logical-fallacy", "dataset:tasksource/parade", "dataset:tasksource/cladder", "dataset:tasksource/subjectivity", "dataset:tasksource/MOH", "dataset:tasksource/VUAC", "dataset:tasksource/TroFi", "dataset:sharc_modified", "dataset:tasksource/conceptrules_v2", "dataset:metaeval/disrpt", "dataset:tasksource/zero-shot-label-nli", "dataset:tasksource/com2sense", "dataset:tasksource/scone", "dataset:tasksource/winodict", "dataset:tasksource/fool-me-twice", "dataset:tasksource/monli", "dataset:tasksource/corr2cause", "dataset:lighteval/lsat_qa", "dataset:tasksource/apt", "dataset:zeroshot/twitter-financial-news-sentiment", "dataset:tasksource/icl-symbol-tuning-instruct", "dataset:tasksource/SpaceNLI", "dataset:sihaochen/propsegment", "dataset:HannahRoseKirk/HatemojiBuild", "dataset:tasksource/regset", "dataset:tasksource/esci", "dataset:lmsys/chatbot_arena_conversations", "dataset:neurae/dnd_style_intents", "dataset:hitachi-nlp/FLD.v2", "dataset:tasksource/SDOH-NLI", "dataset:allenai/scifact_entailment", "dataset:tasksource/feasibilityQA", "dataset:tasksource/simple_pair", "dataset:tasksource/AdjectiveScaleProbe-nli", "dataset:tasksource/resnli", "dataset:tasksource/SpaRTUN", "dataset:tasksource/ReSQ", "dataset:tasksource/semantic_fragments_nli", "dataset:MoritzLaurer/dataset_train_nli", "dataset:tasksource/stepgame", "dataset:tasksource/nlgraph", "dataset:tasksource/oasst2_pairwise_rlhf_reward", "dataset:tasksource/hh-rlhf", "dataset:tasksource/ruletaker", "dataset:qbao775/PARARULE-Plus", "dataset:tasksource/proofwriter", "dataset:tasksource/logical-entailment", "dataset:tasksource/nope", "dataset:tasksource/LogicNLI", "dataset:kiddothe2b/contract-nli", "dataset:AshtonIsNotHere/nli4ct_semeval2024", "dataset:tasksource/lsat-ar", "dataset:tasksource/lsat-rc", "dataset:AshtonIsNotHere/biosift-nli", "dataset:tasksource/brainteasers", "dataset:Anthropic/persuasion", "dataset:erbacher/AmbigNQ-clarifying-question", "dataset:tasksource/SIGA-nli", "dataset:unigram/FOL-nli", "dataset:tasksource/goal-step-wikihow", "dataset:GGLab/PARADISE", "dataset:tasksource/doc-nli", "dataset:tasksource/mctest-nli", "dataset:tasksource/patent-phrase-similarity", "dataset:tasksource/natural-language-satisfiability", "dataset:tasksource/idioms-nli", "dataset:tasksource/lifecycle-entailment", "dataset:nvidia/HelpSteer", "dataset:nvidia/HelpSteer2", "dataset:sadat2307/MSciNLI", "dataset:pushpdeep/UltraFeedback-paired", "dataset:tasksource/AES2-essay-scoring", "dataset:tasksource/english-grading", "dataset:tasksource/wice", "dataset:Dzeniks/hover", "dataset:sileod/missing-item-prediction", "dataset:tasksource/tasksource_dpo_pairs", "arxiv:2301.05948", "base_model:microsoft/deberta-v3-small", "base_model:finetune:microsoft/deberta-v3-small", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2024-01-31T12:02:12Z"
2024-08-28T15:06:37+00:00
89,304
42
--- base_model: microsoft/deberta-v3-small datasets: - nyu-mll/glue - aps/super_glue - facebook/anli - tasksource/babi_nli - sick - snli - scitail - hans - alisawuffles/WANLI - tasksource/recast - sileod/probability_words_nli - joey234/nan-nli - pietrolesci/nli_fever - pietrolesci/breaking_nli - pietrolesci/conj_nli - pietrolesci/fracas - pietrolesci/dialogue_nli - pietrolesci/mpe - pietrolesci/dnc - pietrolesci/recast_white - pietrolesci/joci - pietrolesci/robust_nli - pietrolesci/robust_nli_is_sd - pietrolesci/robust_nli_li_ts - pietrolesci/gen_debiased_nli - pietrolesci/add_one_rte - tasksource/imppres - hlgd - paws - medical_questions_pairs - Anthropic/model-written-evals - truthful_qa - nightingal3/fig-qa - tasksource/bigbench - blimp - cos_e - cosmos_qa - dream - openbookqa - qasc - quartz - quail - head_qa - sciq - social_i_qa - wiki_hop - wiqa - piqa - hellaswag - pkavumba/balanced-copa - 12ml/e-CARE - art - winogrande - codah - ai2_arc - definite_pronoun_resolution - swag - math_qa - metaeval/utilitarianism - mteb/amazon_counterfactual - SetFit/insincere-questions - SetFit/toxic_conversations - turingbench/TuringBench - trec - tals/vitaminc - hope_edi - strombergnlp/rumoureval_2019 - ethos - tweet_eval - discovery - pragmeval - silicone - lex_glue - papluca/language-identification - imdb - rotten_tomatoes - ag_news - yelp_review_full - financial_phrasebank - poem_sentiment - dbpedia_14 - amazon_polarity - app_reviews - hate_speech18 - sms_spam - humicroedit - snips_built_in_intents - hate_speech_offensive - yahoo_answers_topics - pacovaldez/stackoverflow-questions - zapsdcn/hyperpartisan_news - zapsdcn/sciie - zapsdcn/citation_intent - go_emotions - allenai/scicite - liar - relbert/lexical_relation_classification - tasksource/linguisticprobing - tasksource/crowdflower - metaeval/ethics - emo - google_wellformed_query - tweets_hate_speech_detection - has_part - blog_authorship_corpus - launch/open_question_type - health_fact - commonsense_qa - mc_taco - ade_corpus_v2 - prajjwal1/discosense - circa - PiC/phrase_similarity - copenlu/scientific-exaggeration-detection - quarel - mwong/fever-evidence-related - numer_sense - dynabench/dynasent - raquiba/Sarcasm_News_Headline - sem_eval_2010_task_8 - demo-org/auditor_review - medmcqa - RuyuanWan/Dynasent_Disagreement - RuyuanWan/Politeness_Disagreement - RuyuanWan/SBIC_Disagreement - RuyuanWan/SChem_Disagreement - RuyuanWan/Dilemmas_Disagreement - lucasmccabe/logiqa - wiki_qa - tasksource/cycic_classification - tasksource/cycic_multiplechoice - tasksource/sts-companion - tasksource/commonsense_qa_2.0 - tasksource/lingnli - tasksource/monotonicity-entailment - tasksource/arct - tasksource/scinli - tasksource/naturallogic - onestop_qa - demelin/moral_stories - corypaik/prost - aps/dynahate - metaeval/syntactic-augmentation-nli - tasksource/autotnli - lasha-nlp/CONDAQA - openai/webgpt_comparisons - Dahoas/synthetic-instruct-gptj-pairwise - metaeval/scruples - metaeval/wouldyourather - metaeval/defeasible-nli - tasksource/help-nli - metaeval/nli-veridicality-transitivity - tasksource/lonli - tasksource/dadc-limit-nli - ColumbiaNLP/FLUTE - tasksource/strategy-qa - openai/summarize_from_feedback - tasksource/folio - yale-nlp/FOLIO - tasksource/tomi-nli - tasksource/avicenna - stanfordnlp/SHP - GBaker/MedQA-USMLE-4-options-hf - sileod/wikimedqa - declare-lab/cicero - amydeng2000/CREAK - tasksource/mutual - inverse-scaling/NeQA - inverse-scaling/quote-repetition - inverse-scaling/redefine-math - tasksource/puzzte - tasksource/implicatures - race - tasksource/race-c - tasksource/spartqa-yn - tasksource/spartqa-mchoice - tasksource/temporal-nli - riddle_sense - tasksource/clcd-english - maximedb/twentyquestions - metaeval/reclor - tasksource/counterfactually-augmented-imdb - tasksource/counterfactually-augmented-snli - metaeval/cnli - tasksource/boolq-natural-perturbations - metaeval/acceptability-prediction - metaeval/equate - tasksource/ScienceQA_text_only - Jiangjie/ekar_english - tasksource/implicit-hate-stg1 - metaeval/chaos-mnli-ambiguity - IlyaGusev/headline_cause - tasksource/logiqa-2.0-nli - tasksource/oasst2_dense_flat - sileod/mindgames - metaeval/ambient - metaeval/path-naturalness-prediction - civil_comments - AndyChiang/cloth - AndyChiang/dgen - tasksource/I2D2 - webis/args_me - webis/Touche23-ValueEval - tasksource/starcon - PolyAI/banking77 - tasksource/ConTRoL-nli - tasksource/tracie - tasksource/sherliic - tasksource/sen-making - tasksource/winowhy - tasksource/robustLR - CLUTRR/v1 - tasksource/logical-fallacy - tasksource/parade - tasksource/cladder - tasksource/subjectivity - tasksource/MOH - tasksource/VUAC - tasksource/TroFi - sharc_modified - tasksource/conceptrules_v2 - metaeval/disrpt - tasksource/zero-shot-label-nli - tasksource/com2sense - tasksource/scone - tasksource/winodict - tasksource/fool-me-twice - tasksource/monli - tasksource/corr2cause - lighteval/lsat_qa - tasksource/apt - zeroshot/twitter-financial-news-sentiment - tasksource/icl-symbol-tuning-instruct - tasksource/SpaceNLI - sihaochen/propsegment - HannahRoseKirk/HatemojiBuild - tasksource/regset - tasksource/esci - lmsys/chatbot_arena_conversations - neurae/dnd_style_intents - hitachi-nlp/FLD.v2 - tasksource/SDOH-NLI - allenai/scifact_entailment - tasksource/feasibilityQA - tasksource/simple_pair - tasksource/AdjectiveScaleProbe-nli - tasksource/resnli - tasksource/SpaRTUN - tasksource/ReSQ - tasksource/semantic_fragments_nli - MoritzLaurer/dataset_train_nli - tasksource/stepgame - tasksource/nlgraph - tasksource/oasst2_pairwise_rlhf_reward - tasksource/hh-rlhf - tasksource/ruletaker - qbao775/PARARULE-Plus - tasksource/proofwriter - tasksource/logical-entailment - tasksource/nope - tasksource/LogicNLI - kiddothe2b/contract-nli - AshtonIsNotHere/nli4ct_semeval2024 - tasksource/lsat-ar - tasksource/lsat-rc - AshtonIsNotHere/biosift-nli - tasksource/brainteasers - Anthropic/persuasion - erbacher/AmbigNQ-clarifying-question - tasksource/SIGA-nli - unigram/FOL-nli - tasksource/goal-step-wikihow - GGLab/PARADISE - tasksource/doc-nli - tasksource/mctest-nli - tasksource/patent-phrase-similarity - tasksource/natural-language-satisfiability - tasksource/idioms-nli - tasksource/lifecycle-entailment - nvidia/HelpSteer - nvidia/HelpSteer2 - sadat2307/MSciNLI - pushpdeep/UltraFeedback-paired - tasksource/AES2-essay-scoring - tasksource/english-grading - tasksource/wice - Dzeniks/hover - sileod/missing-item-prediction - tasksource/tasksource_dpo_pairs language: en library_name: transformers license: apache-2.0 metrics: - accuracy pipeline_tag: zero-shot-classification tags: - deberta-v3-small - deberta-v3 - deberta - text-classification - nli - natural-language-inference - multitask - multi-task - pipeline - extreme-multi-task - extreme-mtl - tasksource - zero-shot - rlhf --- # Model Card for DeBERTa-v3-small-tasksource-nli [DeBERTa-v3-small](https://hf.co/microsoft/deberta-v3-small) with context length of 1680 tokens fine-tuned on tasksource for 250k steps. I oversampled long NLI tasks (ConTRoL, doc-nli). Training data include HelpSteer v1/v2, logical reasoning tasks (FOLIO, FOL-nli, LogicNLI...), OASST, hh/rlhf, linguistics oriented NLI tasks, tasksource-dpo, fact verification tasks. This model is suitable for long context NLI or as a backbone for reward models or classifiers fine-tuning. This checkpoint has strong zero-shot validation performance on many tasks (e.g. 70% on WNLI), and can be used for: - Zero-shot entailment-based classification for arbitrary labels [ZS]. - Natural language inference [NLI] - Further fine-tuning on a new task or tasksource task (classification, token classification or multiple-choice) [FT]. | test_name | accuracy | |:----------------------------|----------------:| | anli/a1 | 57.2 | | anli/a2 | 46.1 | | anli/a3 | 47.2 | | nli_fever | 71.7 | | FOLIO | 47.1 | | ConTRoL-nli | 52.2 | | cladder | 52.8 | | zero-shot-label-nli | 70.0 | | chatbot_arena_conversations | 67.8 | | oasst2_pairwise_rlhf_reward | 75.6 | | doc-nli | 75.0 | Zero-shot GPT-4 scores 61% on FOLIO (logical reasoning), 62% on cladder (probabilistic reasoning) and 56.4% on ConTRoL (long context NLI). # [ZS] Zero-shot classification pipeline ```python from transformers import pipeline classifier = pipeline("zero-shot-classification",model="tasksource/deberta-small-long-nli") text = "one day I will see the world" candidate_labels = ['travel', 'cooking', 'dancing'] classifier(text, candidate_labels) ``` NLI training data of this model includes [label-nli](https://huggingface.co/datasets/tasksource/zero-shot-label-nli), a NLI dataset specially constructed to improve this kind of zero-shot classification. # [NLI] Natural language inference pipeline ```python from transformers import pipeline pipe = pipeline("text-classification",model="tasksource/deberta-small-long-nli") pipe([dict(text='there is a cat', text_pair='there is a black cat')]) #list of (premise,hypothesis) # [{'label': 'neutral', 'score': 0.9952911138534546}] ``` # [FT] Tasknet: 3 lines fine-tuning ```python # !pip install tasknet import tasknet as tn hparams=dict(model_name='tasksource/deberta-small-long-nli', learning_rate=2e-5) model, trainer = tn.Model_Trainer([tn.AutoTask("glue/rte")], hparams) trainer.train() ``` ### Software and training details The model was trained on 600 tasks for 250k steps with a batch size of 384 and a peak learning rate of 2e-5. Training took 14 days on Nvidia A30 24GB gpu. This is the shared model with the MNLI classifier on top. Each task had a specific CLS embedding, which is dropped 10% of the time to facilitate model use without it. All multiple-choice model used the same classification layers. For classification tasks, models shared weights if their labels matched. https://github.com/sileod/tasksource/ \ https://github.com/sileod/tasknet/ \ Training code: https://colab.research.google.com/drive/1iB4Oxl9_B5W3ZDzXoWJN-olUbqLBxgQS?usp=sharing # Citation More details on this [article:](https://arxiv.org/abs/2301.05948) ``` @inproceedings{sileo-2024-tasksource, title = "tasksource: A Large Collection of {NLP} tasks with a Structured Dataset Preprocessing Framework", author = "Sileo, Damien", booktitle = "Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024)", month = may, year = "2024", address = "Torino, Italia", publisher = "ELRA and ICCL", url = "https://aclanthology.org/2024.lrec-main.1361", pages = "15655--15684", } ``` # Model Card Contact [email protected] </details>
[ "HEAD-QA", "MEDQA", "SCICITE", "SCIFACT", "SCIQ", "SCITAIL" ]
EleutherAI/gpt-neo-2.7B
EleutherAI
text-generation
[ "transformers", "pytorch", "jax", "rust", "safetensors", "gpt_neo", "text-generation", "text generation", "causal-lm", "en", "dataset:EleutherAI/pile", "arxiv:2101.00027", "license:mit", "autotrain_compatible", "endpoints_compatible", "region:us" ]
"2022-03-02T23:29:04Z"
2023-07-09T15:52:52+00:00
87,905
478
--- datasets: - EleutherAI/pile language: - en license: mit tags: - text generation - pytorch - causal-lm --- # GPT-Neo 2.7B ## Model Description GPT-Neo 2.7B is a transformer model designed using EleutherAI's replication of the GPT-3 architecture. GPT-Neo refers to the class of models, while 2.7B represents the number of parameters of this particular pre-trained model. ## Training data GPT-Neo 2.7B was trained on the Pile, a large scale curated dataset created by EleutherAI for the purpose of training this model. ## Training procedure This model was trained for 420 billion tokens over 400,000 steps. It was trained as a masked autoregressive language model, using cross-entropy loss. ## Intended Use and Limitations This way, the model learns an inner representation of the English language that can then be used to extract features useful for downstream tasks. The model is best at what it was pretrained for however, which is generating texts from a prompt. ### How to use You can use this model directly with a pipeline for text generation. This example generates a different sequence each time it's run: ```py >>> from transformers import pipeline >>> generator = pipeline('text-generation', model='EleutherAI/gpt-neo-2.7B') >>> generator("EleutherAI has", do_sample=True, min_length=50) [{'generated_text': 'EleutherAI has made a commitment to create new software packages for each of its major clients and has'}] ``` ### Limitations and Biases GPT-Neo was trained as an autoregressive language model. This means that its core functionality is taking a string of text and predicting the next token. While language models are widely used for tasks other than this, there are a lot of unknowns with this work. GPT-Neo was trained on the Pile, a dataset known to contain profanity, lewd, and otherwise abrasive language. Depending on your usecase GPT-Neo may produce socially unacceptable text. See Sections 5 and 6 of the Pile paper for a more detailed analysis of the biases in the Pile. As with all language models, it is hard to predict in advance how GPT-Neo will respond to particular prompts and offensive content may occur without warning. We recommend having a human curate or filter the outputs before releasing them, both to censor undesirable content and to improve the quality of the results. ## Eval results All evaluations were done using our [evaluation harness](https://github.com/EleutherAI/lm-evaluation-harness). Some results for GPT-2 and GPT-3 are inconsistent with the values reported in the respective papers. We are currently looking into why, and would greatly appreciate feedback and further testing of our eval harness. If you would like to contribute evaluations you have done, please reach out on our [Discord](https://discord.gg/vtRgjbM). ### Linguistic Reasoning | Model and Size | Pile BPB | Pile PPL | Wikitext PPL | Lambada PPL | Lambada Acc | Winogrande | Hellaswag | | ---------------- | ---------- | ---------- | ------------- | ----------- | ----------- | ---------- | ----------- | | GPT-Neo 1.3B | 0.7527 | 6.159 | 13.10 | 7.498 | 57.23% | 55.01% | 38.66% | | GPT-2 1.5B | 1.0468 | ----- | 17.48 | 10.634 | 51.21% | 59.40% | 40.03% | | **GPT-Neo 2.7B** | **0.7165** | **5.646** | **11.39** | **5.626** | **62.22%** | **56.50%** | **42.73%** | | GPT-3 Ada | 0.9631 | ----- | ----- | 9.954 | 51.60% | 52.90% | 35.93% | ### Physical and Scientific Reasoning | Model and Size | MathQA | PubMedQA | Piqa | | ---------------- | ---------- | ---------- | ----------- | | GPT-Neo 1.3B | 24.05% | 54.40% | 71.11% | | GPT-2 1.5B | 23.64% | 58.33% | 70.78% | | **GPT-Neo 2.7B** | **24.72%** | **57.54%** | **72.14%** | | GPT-3 Ada | 24.29% | 52.80% | 68.88% | ### Down-Stream Applications TBD ### BibTeX entry and citation info To cite this model, use ```bibtex @software{gpt-neo, author = {Black, Sid and Leo, Gao and Wang, Phil and Leahy, Connor and Biderman, Stella}, title = {{GPT-Neo: Large Scale Autoregressive Language Modeling with Mesh-Tensorflow}}, month = mar, year = 2021, note = {{If you use this software, please cite it using these metadata.}}, publisher = {Zenodo}, version = {1.0}, doi = {10.5281/zenodo.5297715}, url = {https://doi.org/10.5281/zenodo.5297715} } @article{gao2020pile, title={The Pile: An 800GB Dataset of Diverse Text for Language Modeling}, author={Gao, Leo and Biderman, Stella and Black, Sid and Golding, Laurence and Hoppe, Travis and Foster, Charles and Phang, Jason and He, Horace and Thite, Anish and Nabeshima, Noa and others}, journal={arXiv preprint arXiv:2101.00027}, year={2020} } ```
[ "PUBMEDQA" ]
abhinand/MedEmbed-large-v0.1
abhinand
null
[ "sentence-transformers", "safetensors", "bert", "medembed", "medical-embedding", "clinical-embedding", "information-retrieval", "en", "dataset:MedicalQARetrieval", "dataset:NFCorpus", "dataset:PublicHealthQA", "dataset:TRECCOVID", "dataset:ArguAna", "base_model:BAAI/bge-large-en-v1.5", "base_model:finetune:BAAI/bge-large-en-v1.5", "license:apache-2.0", "region:us" ]
"2024-10-20T11:43:03Z"
2024-10-21T06:49:18+00:00
85,805
18
--- base_model: - BAAI/bge-large-en-v1.5 datasets: - MedicalQARetrieval - NFCorpus - PublicHealthQA - TRECCOVID - ArguAna language: en license: apache-2.0 metrics: - nDCG - MAP - Recall - Precision - MRR tags: - medembed - medical-embedding - clinical-embedding - information-retrieval - sentence-transformers --- # MedEmbed: Specialized Embedding Model for Medical and Clinical Information Retrieval ![benchmark-scores](https://cdn-uploads.huggingface.co/production/uploads/60c8619d95d852a24572b025/gTx5-m68LQ3eyNd6fLki2.png) ## Model Description MedEmbed is a family of embedding models fine-tuned specifically for medical and clinical data, designed to enhance performance in healthcare-related natural language processing (NLP) tasks, particularly information retrieval. **GitHub Repo:** [https://github.com/abhinand5/MedEmbed](https://github.com/abhinand5/MedEmbed) **Technical Blog Post:** [https://huggingface.co/blog/abhinand/medembed-finetuned-embedding-models-for-medical-ir](https://huggingface.co/blog/abhinand/medembed-finetuned-embedding-models-for-medical-ir) ## Intended Use This model is intended for use in medical and clinical contexts to improve information retrieval, question answering, and semantic search tasks. It can be integrated into healthcare systems, research tools, and medical literature databases to enhance search capabilities and information access. ## Training Data ![synthetic-datagen-flow](https://cdn-uploads.huggingface.co/production/uploads/60c8619d95d852a24572b025/asaA5QDO_j0PWFQV9NXCu.png) The model was trained using a simple yet effective synthetic data generation pipeline: 1. Source: Clinical notes from PubMed Central (PMC) 2. Processing: [LLaMA 3.1 70B](https://huggingface.co/meta-llama/Llama-3.1-70B-Instruct) model used to generate query-response pairs 3. Augmentation: Negative sampling for challenging examples 4. Format: Triplets (query, positive response, negative response) for contrastive learning ## Performance MedEmbed consistently outperforms general-purpose embedding models across various medical NLP benchmarks: - ArguAna - MedicalQARetrieval - NFCorpus - PublicHealthQA - TRECCOVID Specific performance metrics (nDCG, MAP, Recall, Precision, MRR) are available in the full documentation. ## Limitations While highly effective for medical and clinical data, this model may not generalize well to non-medical domains. It should be used with caution in general-purpose NLP tasks. ## Ethical Considerations Users should be aware of potential biases in medical data and the ethical implications of AI in healthcare. This model should be used as a tool to assist, not replace, human expertise in medical decision-making. ## Citation If you use this model in your research, please cite: ```bibtex @software{balachandran2024medembed, author = {Balachandran, Abhinand}, title = {MedEmbed: Medical-Focused Embedding Models}, year = {2024}, url = {https://github.com/abhinand5/MedEmbed} } ``` For more detailed information, visit our GitHub repository.
[ "MEDICAL DATA" ]
EleutherAI/pythia-70m
EleutherAI
null
[ "gpt-neox", "pytorch", "safetensors", "gpt_neox", "causal-lm", "pythia", "en", "dataset:EleutherAI/pile", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "region:us" ]
"2023-02-13T14:54:51Z"
2023-11-21T19:04:09+00:00
85,506
64
--- datasets: - EleutherAI/pile language: - en library_name: gpt-neox license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-70M ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-70M for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-70M as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-70M has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-70M will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-70M to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-70M may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-70M. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-70M. ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure> # [Open LLM Leaderboard Evaluation Results](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard) Detailed results can be found [here](https://huggingface.co/datasets/open-llm-leaderboard/details_EleutherAI__pythia-70m) | Metric | Value | |-----------------------|---------------------------| | Avg. | 25.28 | | ARC (25-shot) | 21.59 | | HellaSwag (10-shot) | 27.29 | | MMLU (5-shot) | 25.9 | | TruthfulQA (0-shot) | 47.06 | | Winogrande (5-shot) | 51.46 | | GSM8K (5-shot) | 0.3 | | DROP (3-shot) | 3.33 |
[ "SCIQ" ]
EleutherAI/pythia-1.4b
EleutherAI
text-generation
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "causal-lm", "pythia", "en", "dataset:EleutherAI/the_pile", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2023-02-09T14:08:20Z"
2023-07-09T16:01:57+00:00
83,953
23
--- datasets: - EleutherAI/the_pile language: - en license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-1.4B ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-1.4B for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-1.4B as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-1.4B has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-1.4B will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-1.4B to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-1.4B may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-1.4B. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-1.4B. ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "SCIQ" ]
Snowflake/snowflake-arctic-embed-l-v2.0
Snowflake
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "xlm-roberta", "feature-extraction", "sentence-similarity", "mteb", "arctic", "snowflake-arctic-embed", "transformers.js", "af", "ar", "az", "be", "bg", "bn", "ca", "ceb", "cs", "cy", "da", "de", "el", "en", "es", "et", "eu", "fa", "fi", "fr", "gl", "gu", "he", "hi", "hr", "ht", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ky", "lo", "lt", "lv", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "pa", "pl", "pt", "qu", "ro", "ru", "si", "sk", "sl", "so", "sq", "sr", "sv", "sw", "ta", "te", "th", "tl", "tr", "uk", "ur", "vi", "yo", "zh", "arxiv:2412.04506", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-11-08T16:55:31Z"
2024-12-14T00:18:38+00:00
78,528
132
--- language: - af - ar - az - be - bg - bn - ca - ceb - cs - cy - da - de - el - en - es - et - eu - fa - fi - fr - gl - gu - he - hi - hr - ht - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ky - lo - lt - lv - mk - ml - mn - mr - ms - my - ne - nl - pa - pl - pt - qu - ro - ru - si - sk - sl - so - sq - sr - sv - sw - ta - te - th - tl - tr - uk - ur - vi - yo - zh license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb - arctic - snowflake-arctic-embed - transformers.js model-index: - name: snowflake-arctic-embed-l-v2.0 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 67.039 - type: f1 value: 55.1806 - type: f1_weighted value: 73.41149999999999 - type: ap value: 17.9914 - type: ap_weighted value: 17.9914 - type: main_score value: 67.039 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 65.59700000000001 - type: f1 value: 60.244299999999996 - type: f1_weighted value: 68.9975 - type: ap value: 29.762100000000004 - type: ap_weighted value: 29.762100000000004 - type: main_score value: 65.59700000000001 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 74.2565 - type: f1 value: 74.0291 - type: f1_weighted value: 74.0291 - type: ap value: 68.7595 - type: ap_weighted value: 68.7595 - type: main_score value: 74.2565 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 34.946 - type: f1 value: 34.2853 - type: f1_weighted value: 34.2853 - type: main_score value: 34.946 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: ndcg_at_1 value: 33.286 - type: ndcg_at_3 value: 49.051 - type: ndcg_at_5 value: 54.107000000000006 - type: ndcg_at_10 value: 59.146 - type: ndcg_at_20 value: 60.897999999999996 - type: ndcg_at_100 value: 61.78399999999999 - type: ndcg_at_1000 value: 61.845000000000006 - type: map_at_1 value: 33.286 - type: map_at_3 value: 45.14 - type: map_at_5 value: 47.939 - type: map_at_10 value: 50.046 - type: map_at_20 value: 50.56 - type: map_at_100 value: 50.708 - type: map_at_1000 value: 50.712 - type: recall_at_1 value: 33.286 - type: recall_at_3 value: 60.38400000000001 - type: recall_at_5 value: 72.688 - type: recall_at_10 value: 88.122 - type: recall_at_20 value: 94.808 - type: recall_at_100 value: 99.21799999999999 - type: recall_at_1000 value: 99.644 - type: precision_at_1 value: 33.286 - type: precision_at_3 value: 20.128 - type: precision_at_5 value: 14.538 - type: precision_at_10 value: 8.812000000000001 - type: precision_at_20 value: 4.74 - type: precision_at_100 value: 0.992 - type: precision_at_1000 value: 0.1 - type: mrr_at_1 value: 33.926 - type: mrr_at_3 value: 45.3414 - type: mrr_at_5 value: 48.1828 - type: mrr_at_10 value: 50.270700000000005 - type: mrr_at_20 value: 50.7844 - type: mrr_at_100 value: 50.9259 - type: mrr_at_1000 value: 50.9294 - type: nauc_ndcg_at_1_max value: -10.305 - type: nauc_ndcg_at_1_std value: -15.674199999999999 - type: nauc_ndcg_at_1_diff1 value: 18.6355 - type: nauc_ndcg_at_3_max value: -7.744 - type: nauc_ndcg_at_3_std value: -16.894000000000002 - type: nauc_ndcg_at_3_diff1 value: 15.4469 - type: nauc_ndcg_at_5_max value: -6.4887 - type: nauc_ndcg_at_5_std value: -16.1382 - type: nauc_ndcg_at_5_diff1 value: 13.8214 - type: nauc_ndcg_at_10_max value: -7.616499999999999 - type: nauc_ndcg_at_10_std value: -15.8073 - type: nauc_ndcg_at_10_diff1 value: 13.7678 - type: nauc_ndcg_at_20_max value: -6.9801 - type: nauc_ndcg_at_20_std value: -15.068699999999998 - type: nauc_ndcg_at_20_diff1 value: 14.2013 - type: nauc_ndcg_at_100_max value: -7.5221 - type: nauc_ndcg_at_100_std value: -15.417200000000001 - type: nauc_ndcg_at_100_diff1 value: 15.1072 - type: nauc_ndcg_at_1000_max value: -7.6931 - type: nauc_ndcg_at_1000_std value: -15.5367 - type: nauc_ndcg_at_1000_diff1 value: 15.001700000000001 - type: nauc_map_at_1_max value: -10.305 - type: nauc_map_at_1_std value: -15.674199999999999 - type: nauc_map_at_1_diff1 value: 18.6355 - type: nauc_map_at_3_max value: -8.4505 - type: nauc_map_at_3_std value: -16.5487 - type: nauc_map_at_3_diff1 value: 15.965599999999998 - type: nauc_map_at_5_max value: -7.8429 - type: nauc_map_at_5_std value: -16.1332 - type: nauc_map_at_5_diff1 value: 15.0893 - type: nauc_map_at_10_max value: -8.3186 - type: nauc_map_at_10_std value: -15.979399999999998 - type: nauc_map_at_10_diff1 value: 15.136199999999999 - type: nauc_map_at_20_max value: -8.1697 - type: nauc_map_at_20_std value: -15.8241 - type: nauc_map_at_20_diff1 value: 15.260599999999998 - type: nauc_map_at_100_max value: -8.2285 - type: nauc_map_at_100_std value: -15.8624 - type: nauc_map_at_100_diff1 value: 15.412600000000001 - type: nauc_map_at_1000_max value: -8.2359 - type: nauc_map_at_1000_std value: -15.867 - type: nauc_map_at_1000_diff1 value: 15.408 - type: nauc_recall_at_1_max value: -10.305 - type: nauc_recall_at_1_std value: -15.674199999999999 - type: nauc_recall_at_1_diff1 value: 18.6355 - type: nauc_recall_at_3_max value: -5.5097 - type: nauc_recall_at_3_std value: -17.9896 - type: nauc_recall_at_3_diff1 value: 13.9525 - type: nauc_recall_at_5_max value: -0.9383 - type: nauc_recall_at_5_std value: -16.035 - type: nauc_recall_at_5_diff1 value: 8.8431 - type: nauc_recall_at_10_max value: -2.8548 - type: nauc_recall_at_10_std value: -14.1203 - type: nauc_recall_at_10_diff1 value: 3.2265 - type: nauc_recall_at_20_max value: 14.2043 - type: nauc_recall_at_20_std value: 2.1298999999999997 - type: nauc_recall_at_20_diff1 value: -1.9900000000000002 - type: nauc_recall_at_100_max value: 44.0173 - type: nauc_recall_at_100_std value: 42.131800000000005 - type: nauc_recall_at_100_diff1 value: 29.9983 - type: nauc_recall_at_1000_max value: 25.9434 - type: nauc_recall_at_1000_std value: 53.9252 - type: nauc_recall_at_1000_diff1 value: -0.9778 - type: nauc_precision_at_1_max value: -10.305 - type: nauc_precision_at_1_std value: -15.674199999999999 - type: nauc_precision_at_1_diff1 value: 18.6355 - type: nauc_precision_at_3_max value: -5.5097 - type: nauc_precision_at_3_std value: -17.9896 - type: nauc_precision_at_3_diff1 value: 13.9525 - type: nauc_precision_at_5_max value: -0.9383 - type: nauc_precision_at_5_std value: -16.035 - type: nauc_precision_at_5_diff1 value: 8.8431 - type: nauc_precision_at_10_max value: -2.8548 - type: nauc_precision_at_10_std value: -14.1203 - type: nauc_precision_at_10_diff1 value: 3.2265 - type: nauc_precision_at_20_max value: 14.2043 - type: nauc_precision_at_20_std value: 2.1298999999999997 - type: nauc_precision_at_20_diff1 value: -1.9900000000000002 - type: nauc_precision_at_100_max value: 44.0173 - type: nauc_precision_at_100_std value: 42.131800000000005 - type: nauc_precision_at_100_diff1 value: 29.9983 - type: nauc_precision_at_1000_max value: 25.9434 - type: nauc_precision_at_1000_std value: 53.9252 - type: nauc_precision_at_1000_diff1 value: -0.9778 - type: nauc_mrr_at_1_max value: -9.833 - type: nauc_mrr_at_1_std value: -14.8351 - type: nauc_mrr_at_1_diff1 value: 16.7604 - type: nauc_mrr_at_3_max value: -9.0116 - type: nauc_mrr_at_3_std value: -16.296 - type: nauc_mrr_at_3_diff1 value: 14.178199999999999 - type: nauc_mrr_at_5_max value: -8.308300000000001 - type: nauc_mrr_at_5_std value: -15.751999999999999 - type: nauc_mrr_at_5_diff1 value: 13.306299999999998 - type: nauc_mrr_at_10_max value: -8.7962 - type: nauc_mrr_at_10_std value: -15.688099999999999 - type: nauc_mrr_at_10_diff1 value: 13.2589 - type: nauc_mrr_at_20_max value: -8.6773 - type: nauc_mrr_at_20_std value: -15.479499999999998 - type: nauc_mrr_at_20_diff1 value: 13.354 - type: nauc_mrr_at_100_max value: -8.7533 - type: nauc_mrr_at_100_std value: -15.553600000000001 - type: nauc_mrr_at_100_diff1 value: 13.4796 - type: nauc_mrr_at_1000_max value: -8.7608 - type: nauc_mrr_at_1000_std value: -15.5582 - type: nauc_mrr_at_1000_diff1 value: 13.4748 - type: main_score value: 59.146 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 43.9715 - type: v_measure_std value: 13.4325 - type: main_score value: 43.9715 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 34.775800000000004 - type: v_measure_std value: 13.922799999999999 - type: main_score value: 34.775800000000004 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 63.3521 - type: mrr value: 77.5965 - type: nAUC_map_max value: 21.2353 - type: nAUC_map_std value: 17.002100000000002 - type: nAUC_map_diff1 value: 3.8135000000000003 - type: nAUC_mrr_max value: 35.058299999999996 - type: nAUC_mrr_std value: 20.432 - type: nAUC_mrr_diff1 value: 9.2584 - type: main_score value: 63.3521 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: pearson value: 89.8072 - type: spearman value: 87.2875 - type: cosine_pearson value: 89.8072 - type: cosine_spearman value: 87.2875 - type: manhattan_pearson value: 87.9173 - type: manhattan_spearman value: 86.7327 - type: euclidean_pearson value: 88.21600000000001 - type: euclidean_spearman value: 87.2875 - type: main_score value: 87.2875 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 81.8149 - type: f1 value: 81.2226 - type: f1_weighted value: 81.2226 - type: main_score value: 81.8149 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.0927 - type: v_measure_std value: 0.7048 - type: main_score value: 35.0927 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 30.220999999999997 - type: v_measure_std value: 1.107 - type: main_score value: 30.220999999999997 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: ndcg_at_1 value: 44.349 - type: ndcg_at_3 value: 50.109 - type: ndcg_at_5 value: 52.88699999999999 - type: ndcg_at_10 value: 55.799 - type: ndcg_at_20 value: 57.589999999999996 - type: ndcg_at_100 value: 60.539 - type: ndcg_at_1000 value: 61.897000000000006 - type: map_at_1 value: 36.230000000000004 - type: map_at_3 value: 44.929 - type: map_at_5 value: 47.191 - type: map_at_10 value: 48.88 - type: map_at_20 value: 49.685 - type: map_at_100 value: 50.327 - type: map_at_1000 value: 50.431000000000004 - type: recall_at_1 value: 36.230000000000004 - type: recall_at_3 value: 53.173 - type: recall_at_5 value: 60.35 - type: recall_at_10 value: 69.07 - type: recall_at_20 value: 75.371 - type: recall_at_100 value: 88.736 - type: recall_at_1000 value: 96.75399999999999 - type: precision_at_1 value: 44.349 - type: precision_at_3 value: 23.748 - type: precision_at_5 value: 17.368 - type: precision_at_10 value: 10.629 - type: precision_at_20 value: 6.152 - type: precision_at_100 value: 1.6150000000000002 - type: precision_at_1000 value: 0.201 - type: mrr_at_1 value: 44.3491 - type: mrr_at_3 value: 52.0744 - type: mrr_at_5 value: 53.9628 - type: mrr_at_10 value: 54.9072 - type: mrr_at_20 value: 55.19539999999999 - type: mrr_at_100 value: 55.4537 - type: mrr_at_1000 value: 55.4787 - type: nauc_ndcg_at_1_max value: 36.404599999999995 - type: nauc_ndcg_at_1_std value: -4.5556 - type: nauc_ndcg_at_1_diff1 value: 57.4025 - type: nauc_ndcg_at_3_max value: 38.0347 - type: nauc_ndcg_at_3_std value: -2.2339 - type: nauc_ndcg_at_3_diff1 value: 50.9146 - type: nauc_ndcg_at_5_max value: 38.2927 - type: nauc_ndcg_at_5_std value: -2.3645 - type: nauc_ndcg_at_5_diff1 value: 51.638 - type: nauc_ndcg_at_10_max value: 38.4619 - type: nauc_ndcg_at_10_std value: -2.8955 - type: nauc_ndcg_at_10_diff1 value: 51.35849999999999 - type: nauc_ndcg_at_20_max value: 38.2122 - type: nauc_ndcg_at_20_std value: -1.9339 - type: nauc_ndcg_at_20_diff1 value: 50.4981 - type: nauc_ndcg_at_100_max value: 39.380900000000004 - type: nauc_ndcg_at_100_std value: -0.21889999999999998 - type: nauc_ndcg_at_100_diff1 value: 51.5696 - type: nauc_ndcg_at_1000_max value: 38.9069 - type: nauc_ndcg_at_1000_std value: -0.8251 - type: nauc_ndcg_at_1000_diff1 value: 51.605500000000006 - type: nauc_map_at_1_max value: 31.694 - type: nauc_map_at_1_std value: -4.2857 - type: nauc_map_at_1_diff1 value: 57.991400000000006 - type: nauc_map_at_3_max value: 36.115399999999994 - type: nauc_map_at_3_std value: -3.9859999999999998 - type: nauc_map_at_3_diff1 value: 52.394 - type: nauc_map_at_5_max value: 36.896499999999996 - type: nauc_map_at_5_std value: -3.6282 - type: nauc_map_at_5_diff1 value: 52.7023 - type: nauc_map_at_10_max value: 37.2695 - type: nauc_map_at_10_std value: -3.7142 - type: nauc_map_at_10_diff1 value: 52.6081 - type: nauc_map_at_20_max value: 37.4097 - type: nauc_map_at_20_std value: -3.0479 - type: nauc_map_at_20_diff1 value: 52.2999 - type: nauc_map_at_100_max value: 37.6608 - type: nauc_map_at_100_std value: -2.7363999999999997 - type: nauc_map_at_100_diff1 value: 52.5068 - type: nauc_map_at_1000_max value: 37.6406 - type: nauc_map_at_1000_std value: -2.7695000000000003 - type: nauc_map_at_1000_diff1 value: 52.5091 - type: nauc_recall_at_1_max value: 31.694 - type: nauc_recall_at_1_std value: -4.2857 - type: nauc_recall_at_1_diff1 value: 57.991400000000006 - type: nauc_recall_at_3_max value: 35.9705 - type: nauc_recall_at_3_std value: -2.78 - type: nauc_recall_at_3_diff1 value: 44.2342 - type: nauc_recall_at_5_max value: 36.3608 - type: nauc_recall_at_5_std value: -1.8541999999999998 - type: nauc_recall_at_5_diff1 value: 45.0955 - type: nauc_recall_at_10_max value: 35.7364 - type: nauc_recall_at_10_std value: -3.2479 - type: nauc_recall_at_10_diff1 value: 42.3031 - type: nauc_recall_at_20_max value: 34.7814 - type: nauc_recall_at_20_std value: 0.7642 - type: nauc_recall_at_20_diff1 value: 37.3357 - type: nauc_recall_at_100_max value: 49.1721 - type: nauc_recall_at_100_std value: 27.8334 - type: nauc_recall_at_100_diff1 value: 39.549 - type: nauc_recall_at_1000_max value: 59.516400000000004 - type: nauc_recall_at_1000_std value: 66.1089 - type: nauc_recall_at_1000_diff1 value: 31.4818 - type: nauc_precision_at_1_max value: 36.404599999999995 - type: nauc_precision_at_1_std value: -4.5556 - type: nauc_precision_at_1_diff1 value: 57.4025 - type: nauc_precision_at_3_max value: 35.7954 - type: nauc_precision_at_3_std value: 0.6122 - type: nauc_precision_at_3_diff1 value: 29.4346 - type: nauc_precision_at_5_max value: 31.322699999999998 - type: nauc_precision_at_5_std value: 2.2124 - type: nauc_precision_at_5_diff1 value: 21.1992 - type: nauc_precision_at_10_max value: 22.6897 - type: nauc_precision_at_10_std value: 3.6117999999999997 - type: nauc_precision_at_10_diff1 value: 9.0833 - type: nauc_precision_at_20_max value: 14.954799999999999 - type: nauc_precision_at_20_std value: 7.2373 - type: nauc_precision_at_20_diff1 value: -0.544 - type: nauc_precision_at_100_max value: 4.2428 - type: nauc_precision_at_100_std value: 7.3461 - type: nauc_precision_at_100_diff1 value: -11.3684 - type: nauc_precision_at_1000_max value: -9.148399999999999 - type: nauc_precision_at_1000_std value: -3.5724 - type: nauc_precision_at_1000_diff1 value: -19.142400000000002 - type: nauc_mrr_at_1_max value: 36.404599999999995 - type: nauc_mrr_at_1_std value: -4.5556 - type: nauc_mrr_at_1_diff1 value: 57.4025 - type: nauc_mrr_at_3_max value: 38.7222 - type: nauc_mrr_at_3_std value: -2.3924000000000003 - type: nauc_mrr_at_3_diff1 value: 52.7995 - type: nauc_mrr_at_5_max value: 38.7579 - type: nauc_mrr_at_5_std value: -2.6441 - type: nauc_mrr_at_5_diff1 value: 53.547599999999996 - type: nauc_mrr_at_10_max value: 38.7832 - type: nauc_mrr_at_10_std value: -2.5202999999999998 - type: nauc_mrr_at_10_diff1 value: 53.4856 - type: nauc_mrr_at_20_max value: 38.6588 - type: nauc_mrr_at_20_std value: -2.501 - type: nauc_mrr_at_20_diff1 value: 53.3571 - type: nauc_mrr_at_100_max value: 38.6456 - type: nauc_mrr_at_100_std value: -2.4756 - type: nauc_mrr_at_100_diff1 value: 53.455600000000004 - type: nauc_mrr_at_1000_max value: 38.6449 - type: nauc_mrr_at_1000_std value: -2.4623 - type: nauc_mrr_at_1000_diff1 value: 53.45419999999999 - type: main_score value: 55.799 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: ndcg_at_1 value: 44.204 - type: ndcg_at_3 value: 49.549 - type: ndcg_at_5 value: 51.658 - type: ndcg_at_10 value: 53.681 - type: ndcg_at_20 value: 55.129 - type: ndcg_at_100 value: 57.691 - type: ndcg_at_1000 value: 59.325 - type: map_at_1 value: 35.193000000000005 - type: map_at_3 value: 44.005 - type: map_at_5 value: 46.043 - type: map_at_10 value: 47.491 - type: map_at_20 value: 48.169000000000004 - type: map_at_100 value: 48.789 - type: map_at_1000 value: 48.898 - type: recall_at_1 value: 35.193000000000005 - type: recall_at_3 value: 51.333 - type: recall_at_5 value: 57.436 - type: recall_at_10 value: 63.991 - type: recall_at_20 value: 69.37100000000001 - type: recall_at_100 value: 81.099 - type: recall_at_1000 value: 91.363 - type: precision_at_1 value: 44.204 - type: precision_at_3 value: 24.374000000000002 - type: precision_at_5 value: 17.287 - type: precision_at_10 value: 10.293 - type: precision_at_20 value: 5.943 - type: precision_at_100 value: 1.5730000000000002 - type: precision_at_1000 value: 0.197 - type: mrr_at_1 value: 44.2038 - type: mrr_at_3 value: 51.624199999999995 - type: mrr_at_5 value: 52.9459 - type: mrr_at_10 value: 53.697399999999995 - type: mrr_at_20 value: 54.028200000000005 - type: mrr_at_100 value: 54.267900000000004 - type: mrr_at_1000 value: 54.3028 - type: nauc_ndcg_at_1_max value: 45.3525 - type: nauc_ndcg_at_1_std value: -2.2124 - type: nauc_ndcg_at_1_diff1 value: 59.392100000000006 - type: nauc_ndcg_at_3_max value: 46.6258 - type: nauc_ndcg_at_3_std value: -2.8042000000000002 - type: nauc_ndcg_at_3_diff1 value: 55.0995 - type: nauc_ndcg_at_5_max value: 47.3391 - type: nauc_ndcg_at_5_std value: -1.8336999999999999 - type: nauc_ndcg_at_5_diff1 value: 54.848 - type: nauc_ndcg_at_10_max value: 47.713899999999995 - type: nauc_ndcg_at_10_std value: -0.6185 - type: nauc_ndcg_at_10_diff1 value: 54.6241 - type: nauc_ndcg_at_20_max value: 48.072900000000004 - type: nauc_ndcg_at_20_std value: -0.21589999999999998 - type: nauc_ndcg_at_20_diff1 value: 54.655100000000004 - type: nauc_ndcg_at_100_max value: 48.4791 - type: nauc_ndcg_at_100_std value: 1.9865000000000002 - type: nauc_ndcg_at_100_diff1 value: 54.033 - type: nauc_ndcg_at_1000_max value: 48.3686 - type: nauc_ndcg_at_1000_std value: 1.8716 - type: nauc_ndcg_at_1000_diff1 value: 54.125 - type: nauc_map_at_1_max value: 34.797200000000004 - type: nauc_map_at_1_std value: -13.140199999999998 - type: nauc_map_at_1_diff1 value: 61.197100000000006 - type: nauc_map_at_3_max value: 41.4347 - type: nauc_map_at_3_std value: -10.0816 - type: nauc_map_at_3_diff1 value: 57.8979 - type: nauc_map_at_5_max value: 43.1536 - type: nauc_map_at_5_std value: -7.8041 - type: nauc_map_at_5_diff1 value: 57.1125 - type: nauc_map_at_10_max value: 44.243700000000004 - type: nauc_map_at_10_std value: -6.047000000000001 - type: nauc_map_at_10_diff1 value: 56.688700000000004 - type: nauc_map_at_20_max value: 44.7799 - type: nauc_map_at_20_std value: -5.2916 - type: nauc_map_at_20_diff1 value: 56.565799999999996 - type: nauc_map_at_100_max value: 45.3233 - type: nauc_map_at_100_std value: -4.287 - type: nauc_map_at_100_diff1 value: 56.41460000000001 - type: nauc_map_at_1000_max value: 45.3992 - type: nauc_map_at_1000_std value: -4.1593 - type: nauc_map_at_1000_diff1 value: 56.413599999999995 - type: nauc_recall_at_1_max value: 34.797200000000004 - type: nauc_recall_at_1_std value: -13.140199999999998 - type: nauc_recall_at_1_diff1 value: 61.197100000000006 - type: nauc_recall_at_3_max value: 42.7264 - type: nauc_recall_at_3_std value: -8.201799999999999 - type: nauc_recall_at_3_diff1 value: 52.3494 - type: nauc_recall_at_5_max value: 44.6494 - type: nauc_recall_at_5_std value: -3.3112999999999997 - type: nauc_recall_at_5_diff1 value: 50.1019 - type: nauc_recall_at_10_max value: 46.6669 - type: nauc_recall_at_10_std value: 2.3359 - type: nauc_recall_at_10_diff1 value: 48.1454 - type: nauc_recall_at_20_max value: 48.7828 - type: nauc_recall_at_20_std value: 6.0266 - type: nauc_recall_at_20_diff1 value: 46.786699999999996 - type: nauc_recall_at_100_max value: 53.081999999999994 - type: nauc_recall_at_100_std value: 24.1569 - type: nauc_recall_at_100_diff1 value: 40.4049 - type: nauc_recall_at_1000_max value: 55.803000000000004 - type: nauc_recall_at_1000_std value: 36.3769 - type: nauc_recall_at_1000_diff1 value: 34.336 - type: nauc_precision_at_1_max value: 45.3525 - type: nauc_precision_at_1_std value: -2.2124 - type: nauc_precision_at_1_diff1 value: 59.392100000000006 - type: nauc_precision_at_3_max value: 44.2838 - type: nauc_precision_at_3_std value: 14.3908 - type: nauc_precision_at_3_diff1 value: 27.219700000000003 - type: nauc_precision_at_5_max value: 42.9914 - type: nauc_precision_at_5_std value: 23.0682 - type: nauc_precision_at_5_diff1 value: 16.2263 - type: nauc_precision_at_10_max value: 38.5042 - type: nauc_precision_at_10_std value: 30.792199999999998 - type: nauc_precision_at_10_diff1 value: 5.7691 - type: nauc_precision_at_20_max value: 34.417500000000004 - type: nauc_precision_at_20_std value: 34.1749 - type: nauc_precision_at_20_diff1 value: -0.9022 - type: nauc_precision_at_100_max value: 27.4072 - type: nauc_precision_at_100_std value: 42.4351 - type: nauc_precision_at_100_diff1 value: -11.407 - type: nauc_precision_at_1000_max value: 16.142400000000002 - type: nauc_precision_at_1000_std value: 36.4482 - type: nauc_precision_at_1000_diff1 value: -16.8073 - type: nauc_mrr_at_1_max value: 45.3525 - type: nauc_mrr_at_1_std value: -2.2124 - type: nauc_mrr_at_1_diff1 value: 59.392100000000006 - type: nauc_mrr_at_3_max value: 48.7407 - type: nauc_mrr_at_3_std value: 0.2074 - type: nauc_mrr_at_3_diff1 value: 55.8153 - type: nauc_mrr_at_5_max value: 48.9081 - type: nauc_mrr_at_5_std value: 0.9781 - type: nauc_mrr_at_5_diff1 value: 55.6807 - type: nauc_mrr_at_10_max value: 48.7888 - type: nauc_mrr_at_10_std value: 1.384 - type: nauc_mrr_at_10_diff1 value: 55.5207 - type: nauc_mrr_at_20_max value: 48.7371 - type: nauc_mrr_at_20_std value: 1.3671 - type: nauc_mrr_at_20_diff1 value: 55.508199999999995 - type: nauc_mrr_at_100_max value: 48.7472 - type: nauc_mrr_at_100_std value: 1.5221 - type: nauc_mrr_at_100_diff1 value: 55.5036 - type: nauc_mrr_at_1000_max value: 48.7402 - type: nauc_mrr_at_1000_std value: 1.5072 - type: nauc_mrr_at_1000_diff1 value: 55.507 - type: main_score value: 53.681 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: ndcg_at_1 value: 50.345 - type: ndcg_at_3 value: 57.776 - type: ndcg_at_5 value: 60.477000000000004 - type: ndcg_at_10 value: 63.172 - type: ndcg_at_20 value: 64.62 - type: ndcg_at_100 value: 66.538 - type: ndcg_at_1000 value: 67.43 - type: map_at_1 value: 44.153 - type: map_at_3 value: 53.979 - type: map_at_5 value: 55.925000000000004 - type: map_at_10 value: 57.32899999999999 - type: map_at_20 value: 57.879000000000005 - type: map_at_100 value: 58.239 - type: map_at_1000 value: 58.285 - type: recall_at_1 value: 44.153 - type: recall_at_3 value: 62.766999999999996 - type: recall_at_5 value: 69.405 - type: recall_at_10 value: 77.107 - type: recall_at_20 value: 82.337 - type: recall_at_100 value: 91.307 - type: recall_at_1000 value: 97.586 - type: precision_at_1 value: 50.345 - type: precision_at_3 value: 25.601000000000003 - type: precision_at_5 value: 17.416999999999998 - type: precision_at_10 value: 9.994 - type: precision_at_20 value: 5.492 - type: precision_at_100 value: 1.261 - type: precision_at_1000 value: 0.13799999999999998 - type: mrr_at_1 value: 50.3448 - type: mrr_at_3 value: 58.160900000000005 - type: mrr_at_5 value: 59.549600000000005 - type: mrr_at_10 value: 60.545899999999996 - type: mrr_at_20 value: 60.8453 - type: mrr_at_100 value: 61.06120000000001 - type: mrr_at_1000 value: 61.083299999999994 - type: nauc_ndcg_at_1_max value: 39.467400000000005 - type: nauc_ndcg_at_1_std value: -6.512 - type: nauc_ndcg_at_1_diff1 value: 57.337700000000005 - type: nauc_ndcg_at_3_max value: 42.8884 - type: nauc_ndcg_at_3_std value: -6.0156 - type: nauc_ndcg_at_3_diff1 value: 54.432 - type: nauc_ndcg_at_5_max value: 44.831500000000005 - type: nauc_ndcg_at_5_std value: -4.3286999999999995 - type: nauc_ndcg_at_5_diff1 value: 54.6971 - type: nauc_ndcg_at_10_max value: 44.391799999999996 - type: nauc_ndcg_at_10_std value: -3.6792 - type: nauc_ndcg_at_10_diff1 value: 53.749199999999995 - type: nauc_ndcg_at_20_max value: 44.9459 - type: nauc_ndcg_at_20_std value: -2.1965 - type: nauc_ndcg_at_20_diff1 value: 53.7261 - type: nauc_ndcg_at_100_max value: 45.0603 - type: nauc_ndcg_at_100_std value: -1.1026 - type: nauc_ndcg_at_100_diff1 value: 54.059900000000006 - type: nauc_ndcg_at_1000_max value: 44.9294 - type: nauc_ndcg_at_1000_std value: -1.7629 - type: nauc_ndcg_at_1000_diff1 value: 54.57189999999999 - type: nauc_map_at_1_max value: 34.3031 - type: nauc_map_at_1_std value: -8.9637 - type: nauc_map_at_1_diff1 value: 57.99100000000001 - type: nauc_map_at_3_max value: 40.732 - type: nauc_map_at_3_std value: -8.312999999999999 - type: nauc_map_at_3_diff1 value: 55.9106 - type: nauc_map_at_5_max value: 42.1709 - type: nauc_map_at_5_std value: -6.9354 - type: nauc_map_at_5_diff1 value: 56.042899999999996 - type: nauc_map_at_10_max value: 42.1589 - type: nauc_map_at_10_std value: -6.3601 - type: nauc_map_at_10_diff1 value: 55.490700000000004 - type: nauc_map_at_20_max value: 42.595 - type: nauc_map_at_20_std value: -5.5588 - type: nauc_map_at_20_diff1 value: 55.4651 - type: nauc_map_at_100_max value: 42.6911 - type: nauc_map_at_100_std value: -5.2459999999999996 - type: nauc_map_at_100_diff1 value: 55.45060000000001 - type: nauc_map_at_1000_max value: 42.7134 - type: nauc_map_at_1000_std value: -5.2317 - type: nauc_map_at_1000_diff1 value: 55.4871 - type: nauc_recall_at_1_max value: 34.3031 - type: nauc_recall_at_1_std value: -8.9637 - type: nauc_recall_at_1_diff1 value: 57.99100000000001 - type: nauc_recall_at_3_max value: 43.623400000000004 - type: nauc_recall_at_3_std value: -6.2843 - type: nauc_recall_at_3_diff1 value: 50.775800000000004 - type: nauc_recall_at_5_max value: 48.7222 - type: nauc_recall_at_5_std value: -0.9506000000000001 - type: nauc_recall_at_5_diff1 value: 50.41480000000001 - type: nauc_recall_at_10_max value: 47.6178 - type: nauc_recall_at_10_std value: 2.2783 - type: nauc_recall_at_10_diff1 value: 45.1663 - type: nauc_recall_at_20_max value: 51.454 - type: nauc_recall_at_20_std value: 11.8339 - type: nauc_recall_at_20_diff1 value: 42.8694 - type: nauc_recall_at_100_max value: 58.145500000000006 - type: nauc_recall_at_100_std value: 35.4717 - type: nauc_recall_at_100_diff1 value: 40.8401 - type: nauc_recall_at_1000_max value: 79.9122 - type: nauc_recall_at_1000_std value: 64.5076 - type: nauc_recall_at_1000_diff1 value: 48.7357 - type: nauc_precision_at_1_max value: 39.467400000000005 - type: nauc_precision_at_1_std value: -6.512 - type: nauc_precision_at_1_diff1 value: 57.337700000000005 - type: nauc_precision_at_3_max value: 39.763799999999996 - type: nauc_precision_at_3_std value: 2.8881 - type: nauc_precision_at_3_diff1 value: 30.5735 - type: nauc_precision_at_5_max value: 38.062200000000004 - type: nauc_precision_at_5_std value: 10.2952 - type: nauc_precision_at_5_diff1 value: 21.2531 - type: nauc_precision_at_10_max value: 31.330099999999998 - type: nauc_precision_at_10_std value: 16.6561 - type: nauc_precision_at_10_diff1 value: 8.4745 - type: nauc_precision_at_20_max value: 28.5499 - type: nauc_precision_at_20_std value: 25.593300000000003 - type: nauc_precision_at_20_diff1 value: 0.8708 - type: nauc_precision_at_100_max value: 20.275299999999998 - type: nauc_precision_at_100_std value: 31.6878 - type: nauc_precision_at_100_diff1 value: -8.8113 - type: nauc_precision_at_1000_max value: 15.4133 - type: nauc_precision_at_1000_std value: 29.5211 - type: nauc_precision_at_1000_diff1 value: -11.061300000000001 - type: nauc_mrr_at_1_max value: 39.467400000000005 - type: nauc_mrr_at_1_std value: -6.512 - type: nauc_mrr_at_1_diff1 value: 57.337700000000005 - type: nauc_mrr_at_3_max value: 42.9279 - type: nauc_mrr_at_3_std value: -5.251200000000001 - type: nauc_mrr_at_3_diff1 value: 54.8802 - type: nauc_mrr_at_5_max value: 43.5261 - type: nauc_mrr_at_5_std value: -4.4842 - type: nauc_mrr_at_5_diff1 value: 54.874500000000005 - type: nauc_mrr_at_10_max value: 43.2392 - type: nauc_mrr_at_10_std value: -4.2739 - type: nauc_mrr_at_10_diff1 value: 54.5466 - type: nauc_mrr_at_20_max value: 43.2263 - type: nauc_mrr_at_20_std value: -4.122 - type: nauc_mrr_at_20_diff1 value: 54.5397 - type: nauc_mrr_at_100_max value: 43.2131 - type: nauc_mrr_at_100_std value: -4.041 - type: nauc_mrr_at_100_diff1 value: 54.586800000000004 - type: nauc_mrr_at_1000_max value: 43.2078 - type: nauc_mrr_at_1000_std value: -4.0622 - type: nauc_mrr_at_1000_diff1 value: 54.606100000000005 - type: main_score value: 63.172 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: ndcg_at_1 value: 32.429 - type: ndcg_at_3 value: 39.639 - type: ndcg_at_5 value: 42.051 - type: ndcg_at_10 value: 44.759 - type: ndcg_at_20 value: 46.588 - type: ndcg_at_100 value: 49.457 - type: ndcg_at_1000 value: 51.248000000000005 - type: map_at_1 value: 30.259999999999998 - type: map_at_3 value: 36.998 - type: map_at_5 value: 38.452 - type: map_at_10 value: 39.653 - type: map_at_20 value: 40.199 - type: map_at_100 value: 40.63 - type: map_at_1000 value: 40.701 - type: recall_at_1 value: 30.259999999999998 - type: recall_at_3 value: 44.531 - type: recall_at_5 value: 50.349999999999994 - type: recall_at_10 value: 58.294999999999995 - type: recall_at_20 value: 65.19200000000001 - type: recall_at_100 value: 79.699 - type: recall_at_1000 value: 93.181 - type: precision_at_1 value: 32.429 - type: precision_at_3 value: 16.61 - type: precision_at_5 value: 11.39 - type: precision_at_10 value: 6.746 - type: precision_at_20 value: 3.8019999999999996 - type: precision_at_100 value: 0.963 - type: precision_at_1000 value: 0.11399999999999999 - type: mrr_at_1 value: 32.4294 - type: mrr_at_3 value: 39.265499999999996 - type: mrr_at_5 value: 40.6158 - type: mrr_at_10 value: 41.7454 - type: mrr_at_20 value: 42.187999999999995 - type: mrr_at_100 value: 42.530699999999996 - type: mrr_at_1000 value: 42.584300000000006 - type: nauc_ndcg_at_1_max value: 30.2344 - type: nauc_ndcg_at_1_std value: -8.76 - type: nauc_ndcg_at_1_diff1 value: 43.3339 - type: nauc_ndcg_at_3_max value: 31.300299999999996 - type: nauc_ndcg_at_3_std value: -5.2691 - type: nauc_ndcg_at_3_diff1 value: 39.6872 - type: nauc_ndcg_at_5_max value: 31.844099999999997 - type: nauc_ndcg_at_5_std value: -4.228400000000001 - type: nauc_ndcg_at_5_diff1 value: 38.2047 - type: nauc_ndcg_at_10_max value: 31.664900000000003 - type: nauc_ndcg_at_10_std value: -3.2960000000000003 - type: nauc_ndcg_at_10_diff1 value: 36.6259 - type: nauc_ndcg_at_20_max value: 31.630999999999997 - type: nauc_ndcg_at_20_std value: -2.6685 - type: nauc_ndcg_at_20_diff1 value: 36.577 - type: nauc_ndcg_at_100_max value: 32.283899999999996 - type: nauc_ndcg_at_100_std value: -2.1553 - type: nauc_ndcg_at_100_diff1 value: 36.3958 - type: nauc_ndcg_at_1000_max value: 32.4852 - type: nauc_ndcg_at_1000_std value: -2.3408 - type: nauc_ndcg_at_1000_diff1 value: 37.0227 - type: nauc_map_at_1_max value: 27.620800000000003 - type: nauc_map_at_1_std value: -10.7657 - type: nauc_map_at_1_diff1 value: 43.7864 - type: nauc_map_at_3_max value: 30.0483 - type: nauc_map_at_3_std value: -6.9221 - type: nauc_map_at_3_diff1 value: 40.826 - type: nauc_map_at_5_max value: 30.560399999999998 - type: nauc_map_at_5_std value: -6.1894 - type: nauc_map_at_5_diff1 value: 40.0042 - type: nauc_map_at_10_max value: 30.665100000000002 - type: nauc_map_at_10_std value: -5.8472 - type: nauc_map_at_10_diff1 value: 39.3857 - type: nauc_map_at_20_max value: 30.761699999999998 - type: nauc_map_at_20_std value: -5.591 - type: nauc_map_at_20_diff1 value: 39.4111 - type: nauc_map_at_100_max value: 30.859399999999997 - type: nauc_map_at_100_std value: -5.532 - type: nauc_map_at_100_diff1 value: 39.3888 - type: nauc_map_at_1000_max value: 30.871199999999998 - type: nauc_map_at_1000_std value: -5.5322000000000005 - type: nauc_map_at_1000_diff1 value: 39.4166 - type: nauc_recall_at_1_max value: 27.620800000000003 - type: nauc_recall_at_1_std value: -10.7657 - type: nauc_recall_at_1_diff1 value: 43.7864 - type: nauc_recall_at_3_max value: 31.187199999999997 - type: nauc_recall_at_3_std value: -2.5515 - type: nauc_recall_at_3_diff1 value: 36.9576 - type: nauc_recall_at_5_max value: 32.6827 - type: nauc_recall_at_5_std value: -0.4259 - type: nauc_recall_at_5_diff1 value: 33.1674 - type: nauc_recall_at_10_max value: 31.729400000000002 - type: nauc_recall_at_10_std value: 2.8294 - type: nauc_recall_at_10_diff1 value: 27.7289 - type: nauc_recall_at_20_max value: 30.9251 - type: nauc_recall_at_20_std value: 5.9573 - type: nauc_recall_at_20_diff1 value: 26.271499999999996 - type: nauc_recall_at_100_max value: 35.8557 - type: nauc_recall_at_100_std value: 14.478399999999999 - type: nauc_recall_at_100_diff1 value: 20.6213 - type: nauc_recall_at_1000_max value: 49.7086 - type: nauc_recall_at_1000_std value: 36.9282 - type: nauc_recall_at_1000_diff1 value: 14.288300000000001 - type: nauc_precision_at_1_max value: 30.2344 - type: nauc_precision_at_1_std value: -8.76 - type: nauc_precision_at_1_diff1 value: 43.3339 - type: nauc_precision_at_3_max value: 34.808699999999995 - type: nauc_precision_at_3_std value: 0.7861999999999999 - type: nauc_precision_at_3_diff1 value: 33.232299999999995 - type: nauc_precision_at_5_max value: 35.9325 - type: nauc_precision_at_5_std value: 4.1644 - type: nauc_precision_at_5_diff1 value: 28.872799999999998 - type: nauc_precision_at_10_max value: 34.2471 - type: nauc_precision_at_10_std value: 7.2728 - type: nauc_precision_at_10_diff1 value: 21.044999999999998 - type: nauc_precision_at_20_max value: 31.828200000000002 - type: nauc_precision_at_20_std value: 10.2775 - type: nauc_precision_at_20_diff1 value: 16.7988 - type: nauc_precision_at_100_max value: 26.320100000000004 - type: nauc_precision_at_100_std value: 14.0416 - type: nauc_precision_at_100_diff1 value: 3.4286999999999996 - type: nauc_precision_at_1000_max value: 17.6282 - type: nauc_precision_at_1000_std value: 13.1888 - type: nauc_precision_at_1000_diff1 value: -6.7075 - type: nauc_mrr_at_1_max value: 30.2344 - type: nauc_mrr_at_1_std value: -8.76 - type: nauc_mrr_at_1_diff1 value: 43.3339 - type: nauc_mrr_at_3_max value: 32.2423 - type: nauc_mrr_at_3_std value: -4.6264 - type: nauc_mrr_at_3_diff1 value: 39.6214 - type: nauc_mrr_at_5_max value: 32.496199999999995 - type: nauc_mrr_at_5_std value: -4.3406 - type: nauc_mrr_at_5_diff1 value: 38.921 - type: nauc_mrr_at_10_max value: 32.330799999999996 - type: nauc_mrr_at_10_std value: -3.943 - type: nauc_mrr_at_10_diff1 value: 38.2251 - type: nauc_mrr_at_20_max value: 32.1807 - type: nauc_mrr_at_20_std value: -3.9316999999999998 - type: nauc_mrr_at_20_diff1 value: 38.2161 - type: nauc_mrr_at_100_max value: 32.2413 - type: nauc_mrr_at_100_std value: -3.8869000000000002 - type: nauc_mrr_at_100_diff1 value: 38.217800000000004 - type: nauc_mrr_at_1000_max value: 32.2481 - type: nauc_mrr_at_1000_std value: -3.8933000000000004 - type: nauc_mrr_at_1000_diff1 value: 38.2515 - type: main_score value: 44.759 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: ndcg_at_1 value: 22.761 - type: ndcg_at_3 value: 27.578999999999997 - type: ndcg_at_5 value: 30.067 - type: ndcg_at_10 value: 32.823 - type: ndcg_at_20 value: 35.129 - type: ndcg_at_100 value: 38.903999999999996 - type: ndcg_at_1000 value: 41.181 - type: map_at_1 value: 18.360000000000003 - type: map_at_3 value: 24.264 - type: map_at_5 value: 25.844 - type: map_at_10 value: 27.093 - type: map_at_20 value: 27.839999999999996 - type: map_at_100 value: 28.416999999999998 - type: map_at_1000 value: 28.517 - type: recall_at_1 value: 18.360000000000003 - type: recall_at_3 value: 31.044 - type: recall_at_5 value: 37.432 - type: recall_at_10 value: 45.525999999999996 - type: recall_at_20 value: 53.557 - type: recall_at_100 value: 72.14500000000001 - type: recall_at_1000 value: 88.041 - type: precision_at_1 value: 22.761 - type: precision_at_3 value: 13.350000000000001 - type: precision_at_5 value: 9.801 - type: precision_at_10 value: 6.157 - type: precision_at_20 value: 3.744 - type: precision_at_100 value: 1.055 - type: precision_at_1000 value: 0.13799999999999998 - type: mrr_at_1 value: 22.761200000000002 - type: mrr_at_3 value: 29.187400000000004 - type: mrr_at_5 value: 30.866500000000002 - type: mrr_at_10 value: 32.0236 - type: mrr_at_20 value: 32.5924 - type: mrr_at_100 value: 32.995000000000005 - type: mrr_at_1000 value: 33.042100000000005 - type: nauc_ndcg_at_1_max value: 22.3876 - type: nauc_ndcg_at_1_std value: -0.26649999999999996 - type: nauc_ndcg_at_1_diff1 value: 42.7688 - type: nauc_ndcg_at_3_max value: 24.329 - type: nauc_ndcg_at_3_std value: 1.3894 - type: nauc_ndcg_at_3_diff1 value: 38.5792 - type: nauc_ndcg_at_5_max value: 24.331 - type: nauc_ndcg_at_5_std value: 3.1460000000000004 - type: nauc_ndcg_at_5_diff1 value: 36.1599 - type: nauc_ndcg_at_10_max value: 23.9962 - type: nauc_ndcg_at_10_std value: 3.6198 - type: nauc_ndcg_at_10_diff1 value: 34.615899999999996 - type: nauc_ndcg_at_20_max value: 23.189899999999998 - type: nauc_ndcg_at_20_std value: 3.3743000000000003 - type: nauc_ndcg_at_20_diff1 value: 34.5344 - type: nauc_ndcg_at_100_max value: 24.1644 - type: nauc_ndcg_at_100_std value: 5.3245000000000005 - type: nauc_ndcg_at_100_diff1 value: 34.1404 - type: nauc_ndcg_at_1000_max value: 24.4504 - type: nauc_ndcg_at_1000_std value: 5.0385 - type: nauc_ndcg_at_1000_diff1 value: 34.3277 - type: nauc_map_at_1_max value: 20.5435 - type: nauc_map_at_1_std value: -0.1746 - type: nauc_map_at_1_diff1 value: 43.252 - type: nauc_map_at_3_max value: 23.108999999999998 - type: nauc_map_at_3_std value: 0.8848 - type: nauc_map_at_3_diff1 value: 39.9259 - type: nauc_map_at_5_max value: 23.329900000000002 - type: nauc_map_at_5_std value: 1.7795999999999998 - type: nauc_map_at_5_diff1 value: 38.448 - type: nauc_map_at_10_max value: 23.1789 - type: nauc_map_at_10_std value: 2.1036 - type: nauc_map_at_10_diff1 value: 37.653 - type: nauc_map_at_20_max value: 22.9132 - type: nauc_map_at_20_std value: 2.1094 - type: nauc_map_at_20_diff1 value: 37.5569 - type: nauc_map_at_100_max value: 23.0857 - type: nauc_map_at_100_std value: 2.4645 - type: nauc_map_at_100_diff1 value: 37.4881 - type: nauc_map_at_1000_max value: 23.0988 - type: nauc_map_at_1000_std value: 2.4427999999999996 - type: nauc_map_at_1000_diff1 value: 37.4707 - type: nauc_recall_at_1_max value: 20.5435 - type: nauc_recall_at_1_std value: -0.1746 - type: nauc_recall_at_1_diff1 value: 43.252 - type: nauc_recall_at_3_max value: 24.393500000000003 - type: nauc_recall_at_3_std value: 3.3230999999999997 - type: nauc_recall_at_3_diff1 value: 34.7983 - type: nauc_recall_at_5_max value: 23.4229 - type: nauc_recall_at_5_std value: 6.2542 - type: nauc_recall_at_5_diff1 value: 28.8147 - type: nauc_recall_at_10_max value: 22.6162 - type: nauc_recall_at_10_std value: 6.9113 - type: nauc_recall_at_10_diff1 value: 24.617900000000002 - type: nauc_recall_at_20_max value: 19.8826 - type: nauc_recall_at_20_std value: 6.0004 - type: nauc_recall_at_20_diff1 value: 24.0887 - type: nauc_recall_at_100_max value: 24.428900000000002 - type: nauc_recall_at_100_std value: 18.8358 - type: nauc_recall_at_100_diff1 value: 18.6841 - type: nauc_recall_at_1000_max value: 34.9059 - type: nauc_recall_at_1000_std value: 30.6124 - type: nauc_recall_at_1000_diff1 value: 11.7067 - type: nauc_precision_at_1_max value: 22.3876 - type: nauc_precision_at_1_std value: -0.26649999999999996 - type: nauc_precision_at_1_diff1 value: 42.7688 - type: nauc_precision_at_3_max value: 24.7919 - type: nauc_precision_at_3_std value: 1.3971 - type: nauc_precision_at_3_diff1 value: 32.175599999999996 - type: nauc_precision_at_5_max value: 25.4503 - type: nauc_precision_at_5_std value: 4.4636000000000005 - type: nauc_precision_at_5_diff1 value: 25.453599999999998 - type: nauc_precision_at_10_max value: 21.1404 - type: nauc_precision_at_10_std value: 4.7988 - type: nauc_precision_at_10_diff1 value: 17.3144 - type: nauc_precision_at_20_max value: 16.4733 - type: nauc_precision_at_20_std value: 3.7228999999999997 - type: nauc_precision_at_20_diff1 value: 12.853 - type: nauc_precision_at_100_max value: 12.5551 - type: nauc_precision_at_100_std value: 6.2132 - type: nauc_precision_at_100_diff1 value: 1.2163 - type: nauc_precision_at_1000_max value: 2.706 - type: nauc_precision_at_1000_std value: -0.7363999999999999 - type: nauc_precision_at_1000_diff1 value: -6.0556 - type: nauc_mrr_at_1_max value: 22.3876 - type: nauc_mrr_at_1_std value: -0.26649999999999996 - type: nauc_mrr_at_1_diff1 value: 42.7688 - type: nauc_mrr_at_3_max value: 24.9398 - type: nauc_mrr_at_3_std value: 1.5026 - type: nauc_mrr_at_3_diff1 value: 39.2078 - type: nauc_mrr_at_5_max value: 24.9525 - type: nauc_mrr_at_5_std value: 2.2446 - type: nauc_mrr_at_5_diff1 value: 37.9502 - type: nauc_mrr_at_10_max value: 24.8361 - type: nauc_mrr_at_10_std value: 2.1445 - type: nauc_mrr_at_10_diff1 value: 37.4108 - type: nauc_mrr_at_20_max value: 24.529300000000003 - type: nauc_mrr_at_20_std value: 2.0292 - type: nauc_mrr_at_20_diff1 value: 37.3959 - type: nauc_mrr_at_100_max value: 24.627299999999998 - type: nauc_mrr_at_100_std value: 2.2496 - type: nauc_mrr_at_100_diff1 value: 37.4236 - type: nauc_mrr_at_1000_max value: 24.6481 - type: nauc_mrr_at_1000_std value: 2.2540999999999998 - type: nauc_mrr_at_1000_diff1 value: 37.4501 - type: main_score value: 32.823 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: ndcg_at_1 value: 40.135 - type: ndcg_at_3 value: 45.062999999999995 - type: ndcg_at_5 value: 47.674 - type: ndcg_at_10 value: 50.312 - type: ndcg_at_20 value: 52.349000000000004 - type: ndcg_at_100 value: 55.428 - type: ndcg_at_1000 value: 57.202 - type: map_at_1 value: 32.757 - type: map_at_3 value: 40.722 - type: map_at_5 value: 42.656 - type: map_at_10 value: 44.162 - type: map_at_20 value: 44.889 - type: map_at_100 value: 45.454 - type: map_at_1000 value: 45.562999999999995 - type: recall_at_1 value: 32.757 - type: recall_at_3 value: 48.120000000000005 - type: recall_at_5 value: 54.666000000000004 - type: recall_at_10 value: 62.632 - type: recall_at_20 value: 69.592 - type: recall_at_100 value: 83.863 - type: recall_at_1000 value: 95.065 - type: precision_at_1 value: 40.135 - type: precision_at_3 value: 21.367 - type: precision_at_5 value: 15.265 - type: precision_at_10 value: 9.057 - type: precision_at_20 value: 5.25 - type: precision_at_100 value: 1.347 - type: precision_at_1000 value: 0.169 - type: mrr_at_1 value: 40.1347 - type: mrr_at_3 value: 47.3532 - type: mrr_at_5 value: 48.8547 - type: mrr_at_10 value: 49.9016 - type: mrr_at_20 value: 50.31250000000001 - type: mrr_at_100 value: 50.6278 - type: mrr_at_1000 value: 50.6652 - type: nauc_ndcg_at_1_max value: 38.7881 - type: nauc_ndcg_at_1_std value: -8.296000000000001 - type: nauc_ndcg_at_1_diff1 value: 52.21130000000001 - type: nauc_ndcg_at_3_max value: 38.7708 - type: nauc_ndcg_at_3_std value: -6.576700000000001 - type: nauc_ndcg_at_3_diff1 value: 48.9321 - type: nauc_ndcg_at_5_max value: 38.438 - type: nauc_ndcg_at_5_std value: -6.2548 - type: nauc_ndcg_at_5_diff1 value: 48.0762 - type: nauc_ndcg_at_10_max value: 38.365899999999996 - type: nauc_ndcg_at_10_std value: -5.7385 - type: nauc_ndcg_at_10_diff1 value: 48.158899999999996 - type: nauc_ndcg_at_20_max value: 39.0394 - type: nauc_ndcg_at_20_std value: -5.0741000000000005 - type: nauc_ndcg_at_20_diff1 value: 48.540499999999994 - type: nauc_ndcg_at_100_max value: 39.7277 - type: nauc_ndcg_at_100_std value: -2.7447 - type: nauc_ndcg_at_100_diff1 value: 47.9735 - type: nauc_ndcg_at_1000_max value: 40.0211 - type: nauc_ndcg_at_1000_std value: -2.7227 - type: nauc_ndcg_at_1000_diff1 value: 48.1857 - type: nauc_map_at_1_max value: 33.7229 - type: nauc_map_at_1_std value: -12.5585 - type: nauc_map_at_1_diff1 value: 54.0852 - type: nauc_map_at_3_max value: 36.403 - type: nauc_map_at_3_std value: -9.1775 - type: nauc_map_at_3_diff1 value: 49.7749 - type: nauc_map_at_5_max value: 36.804500000000004 - type: nauc_map_at_5_std value: -8.4613 - type: nauc_map_at_5_diff1 value: 49.1705 - type: nauc_map_at_10_max value: 37.3301 - type: nauc_map_at_10_std value: -7.706200000000001 - type: nauc_map_at_10_diff1 value: 49.3899 - type: nauc_map_at_20_max value: 37.541999999999994 - type: nauc_map_at_20_std value: -7.4139 - type: nauc_map_at_20_diff1 value: 49.4555 - type: nauc_map_at_100_max value: 37.7874 - type: nauc_map_at_100_std value: -6.8967 - type: nauc_map_at_100_diff1 value: 49.336999999999996 - type: nauc_map_at_1000_max value: 37.8174 - type: nauc_map_at_1000_std value: -6.8435 - type: nauc_map_at_1000_diff1 value: 49.3269 - type: nauc_recall_at_1_max value: 33.7229 - type: nauc_recall_at_1_std value: -12.5585 - type: nauc_recall_at_1_diff1 value: 54.0852 - type: nauc_recall_at_3_max value: 34.7265 - type: nauc_recall_at_3_std value: -8.2544 - type: nauc_recall_at_3_diff1 value: 45.2066 - type: nauc_recall_at_5_max value: 34.319 - type: nauc_recall_at_5_std value: -6.7825 - type: nauc_recall_at_5_diff1 value: 41.783 - type: nauc_recall_at_10_max value: 34.5308 - type: nauc_recall_at_10_std value: -3.8527 - type: nauc_recall_at_10_diff1 value: 40.9153 - type: nauc_recall_at_20_max value: 36.6563 - type: nauc_recall_at_20_std value: -0.6942 - type: nauc_recall_at_20_diff1 value: 41.7078 - type: nauc_recall_at_100_max value: 38.7406 - type: nauc_recall_at_100_std value: 18.8691 - type: nauc_recall_at_100_diff1 value: 34.8788 - type: nauc_recall_at_1000_max value: 53.96490000000001 - type: nauc_recall_at_1000_std value: 46.1526 - type: nauc_recall_at_1000_diff1 value: 34.4075 - type: nauc_precision_at_1_max value: 38.7881 - type: nauc_precision_at_1_std value: -8.296000000000001 - type: nauc_precision_at_1_diff1 value: 52.21130000000001 - type: nauc_precision_at_3_max value: 38.4296 - type: nauc_precision_at_3_std value: 5.1817 - type: nauc_precision_at_3_diff1 value: 32.3129 - type: nauc_precision_at_5_max value: 33.9238 - type: nauc_precision_at_5_std value: 10.5533 - type: nauc_precision_at_5_diff1 value: 22.5911 - type: nauc_precision_at_10_max value: 30.967 - type: nauc_precision_at_10_std value: 16.371 - type: nauc_precision_at_10_diff1 value: 15.714 - type: nauc_precision_at_20_max value: 27.0551 - type: nauc_precision_at_20_std value: 18.2058 - type: nauc_precision_at_20_diff1 value: 10.084 - type: nauc_precision_at_100_max value: 18.493000000000002 - type: nauc_precision_at_100_std value: 25.315199999999997 - type: nauc_precision_at_100_diff1 value: -5.4256 - type: nauc_precision_at_1000_max value: 6.7 - type: nauc_precision_at_1000_std value: 22.2852 - type: nauc_precision_at_1000_diff1 value: -14.102 - type: nauc_mrr_at_1_max value: 38.7881 - type: nauc_mrr_at_1_std value: -8.296000000000001 - type: nauc_mrr_at_1_diff1 value: 52.21130000000001 - type: nauc_mrr_at_3_max value: 40.9462 - type: nauc_mrr_at_3_std value: -5.224 - type: nauc_mrr_at_3_diff1 value: 49.9567 - type: nauc_mrr_at_5_max value: 40.6606 - type: nauc_mrr_at_5_std value: -5.1892000000000005 - type: nauc_mrr_at_5_diff1 value: 49.274499999999996 - type: nauc_mrr_at_10_max value: 40.7644 - type: nauc_mrr_at_10_std value: -4.7934 - type: nauc_mrr_at_10_diff1 value: 49.2337 - type: nauc_mrr_at_20_max value: 40.8569 - type: nauc_mrr_at_20_std value: -4.7076 - type: nauc_mrr_at_20_diff1 value: 49.358999999999995 - type: nauc_mrr_at_100_max value: 40.8362 - type: nauc_mrr_at_100_std value: -4.5678 - type: nauc_mrr_at_100_diff1 value: 49.32 - type: nauc_mrr_at_1000_max value: 40.827400000000004 - type: nauc_mrr_at_1000_std value: -4.5844000000000005 - type: nauc_mrr_at_1000_diff1 value: 49.3213 - type: main_score value: 50.312 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: ndcg_at_1 value: 38.013999999999996 - type: ndcg_at_3 value: 42.824 - type: ndcg_at_5 value: 45.074999999999996 - type: ndcg_at_10 value: 47.769 - type: ndcg_at_20 value: 49.964 - type: ndcg_at_100 value: 53.271 - type: ndcg_at_1000 value: 55.217000000000006 - type: map_at_1 value: 31.751 - type: map_at_3 value: 38.95 - type: map_at_5 value: 40.681 - type: map_at_10 value: 42.097 - type: map_at_20 value: 42.892 - type: map_at_100 value: 43.472 - type: map_at_1000 value: 43.578 - type: recall_at_1 value: 31.751 - type: recall_at_3 value: 45.409 - type: recall_at_5 value: 51.373000000000005 - type: recall_at_10 value: 59.168 - type: recall_at_20 value: 66.669 - type: recall_at_100 value: 82.26400000000001 - type: recall_at_1000 value: 95.017 - type: precision_at_1 value: 38.013999999999996 - type: precision_at_3 value: 19.977 - type: precision_at_5 value: 14.11 - type: precision_at_10 value: 8.493 - type: precision_at_20 value: 5.0 - type: precision_at_100 value: 1.312 - type: precision_at_1000 value: 0.165 - type: mrr_at_1 value: 38.0137 - type: mrr_at_3 value: 44.9772 - type: mrr_at_5 value: 46.387 - type: mrr_at_10 value: 47.384100000000004 - type: mrr_at_20 value: 47.8746 - type: mrr_at_100 value: 48.2235 - type: mrr_at_1000 value: 48.2699 - type: nauc_ndcg_at_1_max value: 35.9967 - type: nauc_ndcg_at_1_std value: 4.926500000000001 - type: nauc_ndcg_at_1_diff1 value: 43.5414 - type: nauc_ndcg_at_3_max value: 35.4574 - type: nauc_ndcg_at_3_std value: 2.6951 - type: nauc_ndcg_at_3_diff1 value: 38.5888 - type: nauc_ndcg_at_5_max value: 35.7783 - type: nauc_ndcg_at_5_std value: 3.5970000000000004 - type: nauc_ndcg_at_5_diff1 value: 38.107 - type: nauc_ndcg_at_10_max value: 35.9047 - type: nauc_ndcg_at_10_std value: 5.3849 - type: nauc_ndcg_at_10_diff1 value: 37.6917 - type: nauc_ndcg_at_20_max value: 37.4203 - type: nauc_ndcg_at_20_std value: 7.5072 - type: nauc_ndcg_at_20_diff1 value: 37.9429 - type: nauc_ndcg_at_100_max value: 37.913000000000004 - type: nauc_ndcg_at_100_std value: 8.8726 - type: nauc_ndcg_at_100_diff1 value: 37.8018 - type: nauc_ndcg_at_1000_max value: 37.7521 - type: nauc_ndcg_at_1000_std value: 8.0898 - type: nauc_ndcg_at_1000_diff1 value: 38.188 - type: nauc_map_at_1_max value: 30.6039 - type: nauc_map_at_1_std value: -1.1973 - type: nauc_map_at_1_diff1 value: 44.4956 - type: nauc_map_at_3_max value: 33.79 - type: nauc_map_at_3_std value: 0.7224999999999999 - type: nauc_map_at_3_diff1 value: 40.5918 - type: nauc_map_at_5_max value: 34.799 - type: nauc_map_at_5_std value: 1.9663 - type: nauc_map_at_5_diff1 value: 40.119 - type: nauc_map_at_10_max value: 35.0036 - type: nauc_map_at_10_std value: 2.9479 - type: nauc_map_at_10_diff1 value: 39.725899999999996 - type: nauc_map_at_20_max value: 35.6907 - type: nauc_map_at_20_std value: 3.7684 - type: nauc_map_at_20_diff1 value: 39.6845 - type: nauc_map_at_100_max value: 35.8249 - type: nauc_map_at_100_std value: 4.123 - type: nauc_map_at_100_diff1 value: 39.6397 - type: nauc_map_at_1000_max value: 35.8146 - type: nauc_map_at_1000_std value: 4.100899999999999 - type: nauc_map_at_1000_diff1 value: 39.6511 - type: nauc_recall_at_1_max value: 30.6039 - type: nauc_recall_at_1_std value: -1.1973 - type: nauc_recall_at_1_diff1 value: 44.4956 - type: nauc_recall_at_3_max value: 33.9619 - type: nauc_recall_at_3_std value: 1.3599 - type: nauc_recall_at_3_diff1 value: 36.673899999999996 - type: nauc_recall_at_5_max value: 34.798899999999996 - type: nauc_recall_at_5_std value: 3.9083 - type: nauc_recall_at_5_diff1 value: 34.2275 - type: nauc_recall_at_10_max value: 34.3508 - type: nauc_recall_at_10_std value: 8.6454 - type: nauc_recall_at_10_diff1 value: 31.9422 - type: nauc_recall_at_20_max value: 39.1475 - type: nauc_recall_at_20_std value: 17.0303 - type: nauc_recall_at_20_diff1 value: 32.138099999999994 - type: nauc_recall_at_100_max value: 43.452 - type: nauc_recall_at_100_std value: 31.8449 - type: nauc_recall_at_100_diff1 value: 27.38 - type: nauc_recall_at_1000_max value: 56.720000000000006 - type: nauc_recall_at_1000_std value: 51.5088 - type: nauc_recall_at_1000_diff1 value: 28.131099999999996 - type: nauc_precision_at_1_max value: 35.9967 - type: nauc_precision_at_1_std value: 4.926500000000001 - type: nauc_precision_at_1_diff1 value: 43.5414 - type: nauc_precision_at_3_max value: 36.204 - type: nauc_precision_at_3_std value: 9.6793 - type: nauc_precision_at_3_diff1 value: 22.8807 - type: nauc_precision_at_5_max value: 34.226 - type: nauc_precision_at_5_std value: 14.0818 - type: nauc_precision_at_5_diff1 value: 16.223000000000003 - type: nauc_precision_at_10_max value: 28.3789 - type: nauc_precision_at_10_std value: 18.8125 - type: nauc_precision_at_10_diff1 value: 7.382700000000001 - type: nauc_precision_at_20_max value: 26.151600000000002 - type: nauc_precision_at_20_std value: 22.352 - type: nauc_precision_at_20_diff1 value: 1.0934 - type: nauc_precision_at_100_max value: 13.886399999999998 - type: nauc_precision_at_100_std value: 21.5356 - type: nauc_precision_at_100_diff1 value: -10.3265 - type: nauc_precision_at_1000_max value: -1.5730000000000002 - type: nauc_precision_at_1000_std value: 9.9943 - type: nauc_precision_at_1000_diff1 value: -18.5193 - type: nauc_mrr_at_1_max value: 35.9967 - type: nauc_mrr_at_1_std value: 4.926500000000001 - type: nauc_mrr_at_1_diff1 value: 43.5414 - type: nauc_mrr_at_3_max value: 37.1377 - type: nauc_mrr_at_3_std value: 5.6196 - type: nauc_mrr_at_3_diff1 value: 38.9643 - type: nauc_mrr_at_5_max value: 36.945499999999996 - type: nauc_mrr_at_5_std value: 5.9594000000000005 - type: nauc_mrr_at_5_diff1 value: 38.431 - type: nauc_mrr_at_10_max value: 37.094300000000004 - type: nauc_mrr_at_10_std value: 6.6665 - type: nauc_mrr_at_10_diff1 value: 38.4148 - type: nauc_mrr_at_20_max value: 37.283100000000005 - type: nauc_mrr_at_20_std value: 7.0301 - type: nauc_mrr_at_20_diff1 value: 38.6425 - type: nauc_mrr_at_100_max value: 37.312200000000004 - type: nauc_mrr_at_100_std value: 7.0826 - type: nauc_mrr_at_100_diff1 value: 38.689800000000005 - type: nauc_mrr_at_1000_max value: 37.319 - type: nauc_mrr_at_1000_std value: 7.0653999999999995 - type: nauc_mrr_at_1000_diff1 value: 38.7106 - type: main_score value: 47.769 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 46.10300000000001 - type: ndcg_at_10 value: 46.10300000000001 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: ndcg_at_1 value: 32.362 - type: ndcg_at_3 value: 36.026 - type: ndcg_at_5 value: 38.122 - type: ndcg_at_10 value: 40.174 - type: ndcg_at_20 value: 41.836 - type: ndcg_at_100 value: 44.444 - type: ndcg_at_1000 value: 46.929 - type: map_at_1 value: 28.871999999999996 - type: map_at_3 value: 33.613 - type: map_at_5 value: 35.007 - type: map_at_10 value: 35.976 - type: map_at_20 value: 36.496 - type: map_at_100 value: 36.895 - type: map_at_1000 value: 36.994 - type: recall_at_1 value: 28.871999999999996 - type: recall_at_3 value: 38.705 - type: recall_at_5 value: 43.821 - type: recall_at_10 value: 49.921 - type: recall_at_20 value: 56.163 - type: recall_at_100 value: 69.084 - type: recall_at_1000 value: 87.35000000000001 - type: precision_at_1 value: 32.362 - type: precision_at_3 value: 15.184000000000001 - type: precision_at_5 value: 10.583 - type: precision_at_10 value: 6.166 - type: precision_at_20 value: 3.512 - type: precision_at_100 value: 0.897 - type: precision_at_1000 value: 0.11900000000000001 - type: mrr_at_1 value: 32.362 - type: mrr_at_3 value: 36.937599999999996 - type: mrr_at_5 value: 38.1416 - type: mrr_at_10 value: 39.012299999999996 - type: mrr_at_20 value: 39.4119 - type: mrr_at_100 value: 39.745200000000004 - type: mrr_at_1000 value: 39.8191 - type: nauc_ndcg_at_1_max value: 39.396300000000004 - type: nauc_ndcg_at_1_std value: 0.8482 - type: nauc_ndcg_at_1_diff1 value: 52.376999999999995 - type: nauc_ndcg_at_3_max value: 39.0785 - type: nauc_ndcg_at_3_std value: 3.2739 - type: nauc_ndcg_at_3_diff1 value: 48.3207 - type: nauc_ndcg_at_5_max value: 38.4648 - type: nauc_ndcg_at_5_std value: 3.3379 - type: nauc_ndcg_at_5_diff1 value: 47.468500000000006 - type: nauc_ndcg_at_10_max value: 39.0329 - type: nauc_ndcg_at_10_std value: 4.0895 - type: nauc_ndcg_at_10_diff1 value: 46.1268 - type: nauc_ndcg_at_20_max value: 38.359 - type: nauc_ndcg_at_20_std value: 4.2744 - type: nauc_ndcg_at_20_diff1 value: 45.1661 - type: nauc_ndcg_at_100_max value: 39.461 - type: nauc_ndcg_at_100_std value: 7.2038 - type: nauc_ndcg_at_100_diff1 value: 44.809 - type: nauc_ndcg_at_1000_max value: 39.875699999999995 - type: nauc_ndcg_at_1000_std value: 6.9621 - type: nauc_ndcg_at_1000_diff1 value: 45.473200000000006 - type: nauc_map_at_1_max value: 35.936800000000005 - type: nauc_map_at_1_std value: -3.2637 - type: nauc_map_at_1_diff1 value: 52.3431 - type: nauc_map_at_3_max value: 37.8006 - type: nauc_map_at_3_std value: 0.7727999999999999 - type: nauc_map_at_3_diff1 value: 49.1872 - type: nauc_map_at_5_max value: 37.932300000000005 - type: nauc_map_at_5_std value: 1.4745 - type: nauc_map_at_5_diff1 value: 48.8466 - type: nauc_map_at_10_max value: 38.4041 - type: nauc_map_at_10_std value: 2.0481 - type: nauc_map_at_10_diff1 value: 48.2292 - type: nauc_map_at_20_max value: 38.1992 - type: nauc_map_at_20_std value: 2.1198 - type: nauc_map_at_20_diff1 value: 47.9169 - type: nauc_map_at_100_max value: 38.3504 - type: nauc_map_at_100_std value: 2.5100000000000002 - type: nauc_map_at_100_diff1 value: 47.8259 - type: nauc_map_at_1000_max value: 38.3865 - type: nauc_map_at_1000_std value: 2.5181999999999998 - type: nauc_map_at_1000_diff1 value: 47.853699999999996 - type: nauc_recall_at_1_max value: 35.936800000000005 - type: nauc_recall_at_1_std value: -3.2637 - type: nauc_recall_at_1_diff1 value: 52.3431 - type: nauc_recall_at_3_max value: 37.227700000000006 - type: nauc_recall_at_3_std value: 3.8813 - type: nauc_recall_at_3_diff1 value: 44.8185 - type: nauc_recall_at_5_max value: 35.963 - type: nauc_recall_at_5_std value: 4.9497 - type: nauc_recall_at_5_diff1 value: 42.6322 - type: nauc_recall_at_10_max value: 37.358000000000004 - type: nauc_recall_at_10_std value: 6.6888000000000005 - type: nauc_recall_at_10_diff1 value: 38.7639 - type: nauc_recall_at_20_max value: 34.2341 - type: nauc_recall_at_20_std value: 7.0213 - type: nauc_recall_at_20_diff1 value: 34.8021 - type: nauc_recall_at_100_max value: 39.406600000000005 - type: nauc_recall_at_100_std value: 25.7393 - type: nauc_recall_at_100_diff1 value: 29.9173 - type: nauc_recall_at_1000_max value: 45.287 - type: nauc_recall_at_1000_std value: 38.572 - type: nauc_recall_at_1000_diff1 value: 26.744 - type: nauc_precision_at_1_max value: 39.396300000000004 - type: nauc_precision_at_1_std value: 0.8482 - type: nauc_precision_at_1_diff1 value: 52.376999999999995 - type: nauc_precision_at_3_max value: 42.1919 - type: nauc_precision_at_3_std value: 13.9189 - type: nauc_precision_at_3_diff1 value: 40.2337 - type: nauc_precision_at_5_max value: 39.8644 - type: nauc_precision_at_5_std value: 15.656900000000002 - type: nauc_precision_at_5_diff1 value: 35.1421 - type: nauc_precision_at_10_max value: 40.7678 - type: nauc_precision_at_10_std value: 19.5881 - type: nauc_precision_at_10_diff1 value: 28.822300000000002 - type: nauc_precision_at_20_max value: 35.4842 - type: nauc_precision_at_20_std value: 20.6978 - type: nauc_precision_at_20_diff1 value: 21.4608 - type: nauc_precision_at_100_max value: 33.211400000000005 - type: nauc_precision_at_100_std value: 31.5029 - type: nauc_precision_at_100_diff1 value: 13.0526 - type: nauc_precision_at_1000_max value: 21.6976 - type: nauc_precision_at_1000_std value: 26.4203 - type: nauc_precision_at_1000_diff1 value: 2.6056 - type: nauc_mrr_at_1_max value: 39.396300000000004 - type: nauc_mrr_at_1_std value: 0.8482 - type: nauc_mrr_at_1_diff1 value: 52.376999999999995 - type: nauc_mrr_at_3_max value: 40.191 - type: nauc_mrr_at_3_std value: 3.9919999999999995 - type: nauc_mrr_at_3_diff1 value: 49.2714 - type: nauc_mrr_at_5_max value: 39.9654 - type: nauc_mrr_at_5_std value: 4.0258 - type: nauc_mrr_at_5_diff1 value: 48.6599 - type: nauc_mrr_at_10_max value: 40.1413 - type: nauc_mrr_at_10_std value: 4.389 - type: nauc_mrr_at_10_diff1 value: 48.0272 - type: nauc_mrr_at_20_max value: 39.9265 - type: nauc_mrr_at_20_std value: 4.3462 - type: nauc_mrr_at_20_diff1 value: 47.8592 - type: nauc_mrr_at_100_max value: 40.0623 - type: nauc_mrr_at_100_std value: 4.698 - type: nauc_mrr_at_100_diff1 value: 47.8456 - type: nauc_mrr_at_1000_max value: 40.0698 - type: nauc_mrr_at_1000_std value: 4.6803 - type: nauc_mrr_at_1000_diff1 value: 47.8659 - type: main_score value: 40.174 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: ndcg_at_1 value: 25.155 - type: ndcg_at_3 value: 29.339 - type: ndcg_at_5 value: 31.452999999999996 - type: ndcg_at_10 value: 33.937 - type: ndcg_at_20 value: 36.018 - type: ndcg_at_100 value: 39.531 - type: ndcg_at_1000 value: 42.22 - type: map_at_1 value: 20.874000000000002 - type: map_at_3 value: 26.345000000000002 - type: map_at_5 value: 27.773999999999997 - type: map_at_10 value: 28.965999999999998 - type: map_at_20 value: 29.625 - type: map_at_100 value: 30.188 - type: map_at_1000 value: 30.314000000000004 - type: recall_at_1 value: 20.874000000000002 - type: recall_at_3 value: 31.984 - type: recall_at_5 value: 37.467 - type: recall_at_10 value: 44.774 - type: recall_at_20 value: 52.323 - type: recall_at_100 value: 69.549 - type: recall_at_1000 value: 88.419 - type: precision_at_1 value: 25.155 - type: precision_at_3 value: 13.719000000000001 - type: precision_at_5 value: 9.841999999999999 - type: precision_at_10 value: 6.069999999999999 - type: precision_at_20 value: 3.6799999999999997 - type: precision_at_100 value: 1.045 - type: precision_at_1000 value: 0.146 - type: mrr_at_1 value: 25.1549 - type: mrr_at_3 value: 30.7123 - type: mrr_at_5 value: 32.0148 - type: mrr_at_10 value: 33.035199999999996 - type: mrr_at_20 value: 33.5778 - type: mrr_at_100 value: 34.0001 - type: mrr_at_1000 value: 34.070499999999996 - type: nauc_ndcg_at_1_max value: 34.6903 - type: nauc_ndcg_at_1_std value: -0.48469999999999996 - type: nauc_ndcg_at_1_diff1 value: 41.827799999999996 - type: nauc_ndcg_at_3_max value: 34.7107 - type: nauc_ndcg_at_3_std value: 1.2525 - type: nauc_ndcg_at_3_diff1 value: 36.09 - type: nauc_ndcg_at_5_max value: 34.363899999999994 - type: nauc_ndcg_at_5_std value: 1.187 - type: nauc_ndcg_at_5_diff1 value: 35.5019 - type: nauc_ndcg_at_10_max value: 34.1261 - type: nauc_ndcg_at_10_std value: 2.0704000000000002 - type: nauc_ndcg_at_10_diff1 value: 35.0098 - type: nauc_ndcg_at_20_max value: 34.5028 - type: nauc_ndcg_at_20_std value: 2.9973 - type: nauc_ndcg_at_20_diff1 value: 34.6486 - type: nauc_ndcg_at_100_max value: 34.8192 - type: nauc_ndcg_at_100_std value: 4.4281 - type: nauc_ndcg_at_100_diff1 value: 34.252500000000005 - type: nauc_ndcg_at_1000_max value: 34.8293 - type: nauc_ndcg_at_1000_std value: 4.2747 - type: nauc_ndcg_at_1000_diff1 value: 34.5083 - type: nauc_map_at_1_max value: 31.448700000000002 - type: nauc_map_at_1_std value: -1.5652 - type: nauc_map_at_1_diff1 value: 42.3532 - type: nauc_map_at_3_max value: 33.458 - type: nauc_map_at_3_std value: 0.372 - type: nauc_map_at_3_diff1 value: 37.6257 - type: nauc_map_at_5_max value: 33.3902 - type: nauc_map_at_5_std value: 0.2957 - type: nauc_map_at_5_diff1 value: 37.0708 - type: nauc_map_at_10_max value: 33.4473 - type: nauc_map_at_10_std value: 0.7451 - type: nauc_map_at_10_diff1 value: 36.7872 - type: nauc_map_at_20_max value: 33.6705 - type: nauc_map_at_20_std value: 1.0755000000000001 - type: nauc_map_at_20_diff1 value: 36.6791 - type: nauc_map_at_100_max value: 33.772200000000005 - type: nauc_map_at_100_std value: 1.308 - type: nauc_map_at_100_diff1 value: 36.5896 - type: nauc_map_at_1000_max value: 33.7881 - type: nauc_map_at_1000_std value: 1.3087 - type: nauc_map_at_1000_diff1 value: 36.5978 - type: nauc_recall_at_1_max value: 31.448700000000002 - type: nauc_recall_at_1_std value: -1.5652 - type: nauc_recall_at_1_diff1 value: 42.3532 - type: nauc_recall_at_3_max value: 33.7171 - type: nauc_recall_at_3_std value: 2.4527 - type: nauc_recall_at_3_diff1 value: 32.6832 - type: nauc_recall_at_5_max value: 32.7828 - type: nauc_recall_at_5_std value: 2.0332 - type: nauc_recall_at_5_diff1 value: 30.8446 - type: nauc_recall_at_10_max value: 31.6463 - type: nauc_recall_at_10_std value: 4.3727 - type: nauc_recall_at_10_diff1 value: 29.1731 - type: nauc_recall_at_20_max value: 31.968999999999998 - type: nauc_recall_at_20_std value: 7.5392 - type: nauc_recall_at_20_diff1 value: 26.961299999999998 - type: nauc_recall_at_100_max value: 32.9142 - type: nauc_recall_at_100_std value: 17.2332 - type: nauc_recall_at_100_diff1 value: 22.0707 - type: nauc_recall_at_1000_max value: 32.1463 - type: nauc_recall_at_1000_std value: 29.664600000000004 - type: nauc_recall_at_1000_diff1 value: 13.9131 - type: nauc_precision_at_1_max value: 34.6903 - type: nauc_precision_at_1_std value: -0.48469999999999996 - type: nauc_precision_at_1_diff1 value: 41.827799999999996 - type: nauc_precision_at_3_max value: 36.8823 - type: nauc_precision_at_3_std value: 3.7052 - type: nauc_precision_at_3_diff1 value: 29.505599999999998 - type: nauc_precision_at_5_max value: 35.106 - type: nauc_precision_at_5_std value: 3.9923 - type: nauc_precision_at_5_diff1 value: 25.684099999999997 - type: nauc_precision_at_10_max value: 32.1139 - type: nauc_precision_at_10_std value: 7.097100000000001 - type: nauc_precision_at_10_diff1 value: 20.521 - type: nauc_precision_at_20_max value: 30.3506 - type: nauc_precision_at_20_std value: 9.7899 - type: nauc_precision_at_20_diff1 value: 16.106 - type: nauc_precision_at_100_max value: 23.7062 - type: nauc_precision_at_100_std value: 12.7852 - type: nauc_precision_at_100_diff1 value: 5.9668 - type: nauc_precision_at_1000_max value: 13.6273 - type: nauc_precision_at_1000_std value: 7.0956 - type: nauc_precision_at_1000_diff1 value: -3.6863 - type: nauc_mrr_at_1_max value: 34.6903 - type: nauc_mrr_at_1_std value: -0.48469999999999996 - type: nauc_mrr_at_1_diff1 value: 41.827799999999996 - type: nauc_mrr_at_3_max value: 35.826 - type: nauc_mrr_at_3_std value: 1.3141999999999998 - type: nauc_mrr_at_3_diff1 value: 37.1995 - type: nauc_mrr_at_5_max value: 35.6178 - type: nauc_mrr_at_5_std value: 1.3211 - type: nauc_mrr_at_5_diff1 value: 36.8396 - type: nauc_mrr_at_10_max value: 35.4784 - type: nauc_mrr_at_10_std value: 1.6153 - type: nauc_mrr_at_10_diff1 value: 36.6262 - type: nauc_mrr_at_20_max value: 35.5478 - type: nauc_mrr_at_20_std value: 1.8614 - type: nauc_mrr_at_20_diff1 value: 36.5754 - type: nauc_mrr_at_100_max value: 35.5825 - type: nauc_mrr_at_100_std value: 1.9792 - type: nauc_mrr_at_100_diff1 value: 36.5758 - type: nauc_mrr_at_1000_max value: 35.5811 - type: nauc_mrr_at_1000_std value: 1.9691 - type: nauc_mrr_at_1000_diff1 value: 36.587399999999995 - type: main_score value: 33.937 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: ndcg_at_1 value: 36.381 - type: ndcg_at_3 value: 41.605 - type: ndcg_at_5 value: 43.854 - type: ndcg_at_10 value: 46.831 - type: ndcg_at_20 value: 49.114999999999995 - type: ndcg_at_100 value: 52.071 - type: ndcg_at_1000 value: 53.864999999999995 - type: map_at_1 value: 30.957 - type: map_at_3 value: 38.074999999999996 - type: map_at_5 value: 39.732 - type: map_at_10 value: 41.187000000000005 - type: map_at_20 value: 41.94 - type: map_at_100 value: 42.447 - type: map_at_1000 value: 42.536 - type: recall_at_1 value: 30.957 - type: recall_at_3 value: 45.213 - type: recall_at_5 value: 51.196 - type: recall_at_10 value: 59.724 - type: recall_at_20 value: 67.837 - type: recall_at_100 value: 81.843 - type: recall_at_1000 value: 93.91000000000001 - type: precision_at_1 value: 36.381 - type: precision_at_3 value: 18.999 - type: precision_at_5 value: 13.172 - type: precision_at_10 value: 7.938000000000001 - type: precision_at_20 value: 4.6129999999999995 - type: precision_at_100 value: 1.172 - type: precision_at_1000 value: 0.14300000000000002 - type: mrr_at_1 value: 36.3806 - type: mrr_at_3 value: 42.7239 - type: mrr_at_5 value: 44.0905 - type: mrr_at_10 value: 45.2951 - type: mrr_at_20 value: 45.8788 - type: mrr_at_100 value: 46.1807 - type: mrr_at_1000 value: 46.226800000000004 - type: nauc_ndcg_at_1_max value: 47.0214 - type: nauc_ndcg_at_1_std value: -0.8086 - type: nauc_ndcg_at_1_diff1 value: 55.931200000000004 - type: nauc_ndcg_at_3_max value: 44.829299999999996 - type: nauc_ndcg_at_3_std value: 0.6224000000000001 - type: nauc_ndcg_at_3_diff1 value: 49.7765 - type: nauc_ndcg_at_5_max value: 44.3325 - type: nauc_ndcg_at_5_std value: 0.1854 - type: nauc_ndcg_at_5_diff1 value: 49.0426 - type: nauc_ndcg_at_10_max value: 44.358599999999996 - type: nauc_ndcg_at_10_std value: 0.6905 - type: nauc_ndcg_at_10_diff1 value: 48.1902 - type: nauc_ndcg_at_20_max value: 45.018 - type: nauc_ndcg_at_20_std value: 1.555 - type: nauc_ndcg_at_20_diff1 value: 48.2645 - type: nauc_ndcg_at_100_max value: 45.3244 - type: nauc_ndcg_at_100_std value: 3.0655 - type: nauc_ndcg_at_100_diff1 value: 48.1011 - type: nauc_ndcg_at_1000_max value: 45.2297 - type: nauc_ndcg_at_1000_std value: 2.5452 - type: nauc_ndcg_at_1000_diff1 value: 48.4179 - type: nauc_map_at_1_max value: 44.1846 - type: nauc_map_at_1_std value: -2.661 - type: nauc_map_at_1_diff1 value: 58.4395 - type: nauc_map_at_3_max value: 44.7697 - type: nauc_map_at_3_std value: -0.3776 - type: nauc_map_at_3_diff1 value: 52.7119 - type: nauc_map_at_5_max value: 44.6708 - type: nauc_map_at_5_std value: -0.4622 - type: nauc_map_at_5_diff1 value: 51.8622 - type: nauc_map_at_10_max value: 44.7631 - type: nauc_map_at_10_std value: -0.2403 - type: nauc_map_at_10_diff1 value: 51.439299999999996 - type: nauc_map_at_20_max value: 45.0612 - type: nauc_map_at_20_std value: 0.0038000000000000004 - type: nauc_map_at_20_diff1 value: 51.3768 - type: nauc_map_at_100_max value: 45.137 - type: nauc_map_at_100_std value: 0.2717 - type: nauc_map_at_100_diff1 value: 51.316700000000004 - type: nauc_map_at_1000_max value: 45.1229 - type: nauc_map_at_1000_std value: 0.2513 - type: nauc_map_at_1000_diff1 value: 51.3133 - type: nauc_recall_at_1_max value: 44.1846 - type: nauc_recall_at_1_std value: -2.661 - type: nauc_recall_at_1_diff1 value: 58.4395 - type: nauc_recall_at_3_max value: 41.656 - type: nauc_recall_at_3_std value: 1.6587999999999998 - type: nauc_recall_at_3_diff1 value: 44.9322 - type: nauc_recall_at_5_max value: 40.501 - type: nauc_recall_at_5_std value: 1.1215 - type: nauc_recall_at_5_diff1 value: 41.7702 - type: nauc_recall_at_10_max value: 39.577400000000004 - type: nauc_recall_at_10_std value: 2.172 - type: nauc_recall_at_10_diff1 value: 38.0253 - type: nauc_recall_at_20_max value: 41.1537 - type: nauc_recall_at_20_std value: 6.1195 - type: nauc_recall_at_20_diff1 value: 37.391400000000004 - type: nauc_recall_at_100_max value: 42.2577 - type: nauc_recall_at_100_std value: 20.7745 - type: nauc_recall_at_100_diff1 value: 32.8151 - type: nauc_recall_at_1000_max value: 43.5594 - type: nauc_recall_at_1000_std value: 37.6573 - type: nauc_recall_at_1000_diff1 value: 29.7545 - type: nauc_precision_at_1_max value: 47.0214 - type: nauc_precision_at_1_std value: -0.8086 - type: nauc_precision_at_1_diff1 value: 55.931200000000004 - type: nauc_precision_at_3_max value: 39.4995 - type: nauc_precision_at_3_std value: 5.0051 - type: nauc_precision_at_3_diff1 value: 32.0456 - type: nauc_precision_at_5_max value: 34.972500000000004 - type: nauc_precision_at_5_std value: 5.1238 - type: nauc_precision_at_5_diff1 value: 24.2515 - type: nauc_precision_at_10_max value: 28.364099999999997 - type: nauc_precision_at_10_std value: 6.0539000000000005 - type: nauc_precision_at_10_diff1 value: 14.192599999999999 - type: nauc_precision_at_20_max value: 25.7353 - type: nauc_precision_at_20_std value: 8.860999999999999 - type: nauc_precision_at_20_diff1 value: 7.0925 - type: nauc_precision_at_100_max value: 11.8965 - type: nauc_precision_at_100_std value: 13.143099999999999 - type: nauc_precision_at_100_diff1 value: -8.5811 - type: nauc_precision_at_1000_max value: -3.7232000000000003 - type: nauc_precision_at_1000_std value: 6.392 - type: nauc_precision_at_1000_diff1 value: -20.5151 - type: nauc_mrr_at_1_max value: 47.0214 - type: nauc_mrr_at_1_std value: -0.8086 - type: nauc_mrr_at_1_diff1 value: 55.931200000000004 - type: nauc_mrr_at_3_max value: 45.6591 - type: nauc_mrr_at_3_std value: 0.6383 - type: nauc_mrr_at_3_diff1 value: 50.0407 - type: nauc_mrr_at_5_max value: 45.7236 - type: nauc_mrr_at_5_std value: 0.5502 - type: nauc_mrr_at_5_diff1 value: 49.6432 - type: nauc_mrr_at_10_max value: 45.6287 - type: nauc_mrr_at_10_std value: 0.6239 - type: nauc_mrr_at_10_diff1 value: 49.391200000000005 - type: nauc_mrr_at_20_max value: 45.704899999999995 - type: nauc_mrr_at_20_std value: 0.7987 - type: nauc_mrr_at_20_diff1 value: 49.4844 - type: nauc_mrr_at_100_max value: 45.708 - type: nauc_mrr_at_100_std value: 0.8823 - type: nauc_mrr_at_100_diff1 value: 49.5323 - type: nauc_mrr_at_1000_max value: 45.7135 - type: nauc_mrr_at_1000_std value: 0.8635999999999999 - type: nauc_mrr_at_1000_diff1 value: 49.5497 - type: main_score value: 46.831 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: ndcg_at_1 value: 34.98 - type: ndcg_at_3 value: 39.911 - type: ndcg_at_5 value: 42.21 - type: ndcg_at_10 value: 45.539 - type: ndcg_at_20 value: 47.964 - type: ndcg_at_100 value: 51.642999999999994 - type: ndcg_at_1000 value: 53.647 - type: map_at_1 value: 30.034 - type: map_at_3 value: 35.97 - type: map_at_5 value: 37.635999999999996 - type: map_at_10 value: 39.367999999999995 - type: map_at_20 value: 40.328 - type: map_at_100 value: 41.158 - type: map_at_1000 value: 41.366 - type: recall_at_1 value: 30.034 - type: recall_at_3 value: 42.006 - type: recall_at_5 value: 47.843 - type: recall_at_10 value: 57.568 - type: recall_at_20 value: 66.493 - type: recall_at_100 value: 84.136 - type: recall_at_1000 value: 95.631 - type: precision_at_1 value: 34.98 - type: precision_at_3 value: 18.116 - type: precision_at_5 value: 13.202 - type: precision_at_10 value: 8.616999999999999 - type: precision_at_20 value: 5.425 - type: precision_at_100 value: 1.6260000000000001 - type: precision_at_1000 value: 0.249 - type: mrr_at_1 value: 34.9802 - type: mrr_at_3 value: 41.172599999999996 - type: mrr_at_5 value: 42.4671 - type: mrr_at_10 value: 43.8709 - type: mrr_at_20 value: 44.4684 - type: mrr_at_100 value: 44.8617 - type: mrr_at_1000 value: 44.9033 - type: nauc_ndcg_at_1_max value: 36.1514 - type: nauc_ndcg_at_1_std value: 6.7383 - type: nauc_ndcg_at_1_diff1 value: 49.9936 - type: nauc_ndcg_at_3_max value: 38.3225 - type: nauc_ndcg_at_3_std value: 8.0985 - type: nauc_ndcg_at_3_diff1 value: 42.9416 - type: nauc_ndcg_at_5_max value: 39.4299 - type: nauc_ndcg_at_5_std value: 9.2335 - type: nauc_ndcg_at_5_diff1 value: 43.4214 - type: nauc_ndcg_at_10_max value: 39.1123 - type: nauc_ndcg_at_10_std value: 9.4134 - type: nauc_ndcg_at_10_diff1 value: 42.6415 - type: nauc_ndcg_at_20_max value: 38.9531 - type: nauc_ndcg_at_20_std value: 9.707 - type: nauc_ndcg_at_20_diff1 value: 43.0215 - type: nauc_ndcg_at_100_max value: 40.3045 - type: nauc_ndcg_at_100_std value: 11.304400000000001 - type: nauc_ndcg_at_100_diff1 value: 43.0846 - type: nauc_ndcg_at_1000_max value: 39.9421 - type: nauc_ndcg_at_1000_std value: 11.1666 - type: nauc_ndcg_at_1000_diff1 value: 43.3505 - type: nauc_map_at_1_max value: 34.735 - type: nauc_map_at_1_std value: 2.9007 - type: nauc_map_at_1_diff1 value: 52.495599999999996 - type: nauc_map_at_3_max value: 37.5749 - type: nauc_map_at_3_std value: 5.1779 - type: nauc_map_at_3_diff1 value: 46.536300000000004 - type: nauc_map_at_5_max value: 38.4721 - type: nauc_map_at_5_std value: 6.0973 - type: nauc_map_at_5_diff1 value: 46.434799999999996 - type: nauc_map_at_10_max value: 38.744299999999996 - type: nauc_map_at_10_std value: 6.7116 - type: nauc_map_at_10_diff1 value: 46.0759 - type: nauc_map_at_20_max value: 38.756 - type: nauc_map_at_20_std value: 7.263699999999999 - type: nauc_map_at_20_diff1 value: 46.0274 - type: nauc_map_at_100_max value: 38.9362 - type: nauc_map_at_100_std value: 8.0227 - type: nauc_map_at_100_diff1 value: 45.8767 - type: nauc_map_at_1000_max value: 38.7473 - type: nauc_map_at_1000_std value: 8.089 - type: nauc_map_at_1000_diff1 value: 45.8848 - type: nauc_recall_at_1_max value: 34.735 - type: nauc_recall_at_1_std value: 2.9007 - type: nauc_recall_at_1_diff1 value: 52.495599999999996 - type: nauc_recall_at_3_max value: 37.1901 - type: nauc_recall_at_3_std value: 6.4211 - type: nauc_recall_at_3_diff1 value: 38.846000000000004 - type: nauc_recall_at_5_max value: 39.8879 - type: nauc_recall_at_5_std value: 9.5204 - type: nauc_recall_at_5_diff1 value: 37.9339 - type: nauc_recall_at_10_max value: 37.181999999999995 - type: nauc_recall_at_10_std value: 9.764100000000001 - type: nauc_recall_at_10_diff1 value: 33.4855 - type: nauc_recall_at_20_max value: 35.6859 - type: nauc_recall_at_20_std value: 13.173599999999999 - type: nauc_recall_at_20_diff1 value: 33.254 - type: nauc_recall_at_100_max value: 42.728100000000005 - type: nauc_recall_at_100_std value: 25.913999999999998 - type: nauc_recall_at_100_diff1 value: 28.9205 - type: nauc_recall_at_1000_max value: 56.496900000000004 - type: nauc_recall_at_1000_std value: 56.183499999999995 - type: nauc_recall_at_1000_diff1 value: 24.8659 - type: nauc_precision_at_1_max value: 36.1514 - type: nauc_precision_at_1_std value: 6.7383 - type: nauc_precision_at_1_diff1 value: 49.9936 - type: nauc_precision_at_3_max value: 36.5767 - type: nauc_precision_at_3_std value: 14.884500000000001 - type: nauc_precision_at_3_diff1 value: 26.1181 - type: nauc_precision_at_5_max value: 33.7094 - type: nauc_precision_at_5_std value: 17.566699999999997 - type: nauc_precision_at_5_diff1 value: 20.061799999999998 - type: nauc_precision_at_10_max value: 28.034 - type: nauc_precision_at_10_std value: 23.1877 - type: nauc_precision_at_10_diff1 value: 9.646799999999999 - type: nauc_precision_at_20_max value: 17.930699999999998 - type: nauc_precision_at_20_std value: 23.0956 - type: nauc_precision_at_20_diff1 value: -0.0383 - type: nauc_precision_at_100_max value: 0.6149 - type: nauc_precision_at_100_std value: 22.7163 - type: nauc_precision_at_100_diff1 value: -8.730400000000001 - type: nauc_precision_at_1000_max value: -19.8022 - type: nauc_precision_at_1000_std value: 8.6017 - type: nauc_precision_at_1000_diff1 value: -14.161499999999998 - type: nauc_mrr_at_1_max value: 36.1514 - type: nauc_mrr_at_1_std value: 6.7383 - type: nauc_mrr_at_1_diff1 value: 49.9936 - type: nauc_mrr_at_3_max value: 37.894299999999994 - type: nauc_mrr_at_3_std value: 8.948599999999999 - type: nauc_mrr_at_3_diff1 value: 43.985400000000006 - type: nauc_mrr_at_5_max value: 38.8686 - type: nauc_mrr_at_5_std value: 9.4464 - type: nauc_mrr_at_5_diff1 value: 43.9985 - type: nauc_mrr_at_10_max value: 38.419 - type: nauc_mrr_at_10_std value: 9.4221 - type: nauc_mrr_at_10_diff1 value: 43.621700000000004 - type: nauc_mrr_at_20_max value: 38.3933 - type: nauc_mrr_at_20_std value: 9.6024 - type: nauc_mrr_at_20_diff1 value: 43.8952 - type: nauc_mrr_at_100_max value: 38.4371 - type: nauc_mrr_at_100_std value: 9.657200000000001 - type: nauc_mrr_at_100_diff1 value: 43.9457 - type: nauc_mrr_at_1000_max value: 38.4386 - type: nauc_mrr_at_1000_std value: 9.6614 - type: nauc_mrr_at_1000_diff1 value: 43.9579 - type: main_score value: 45.539 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: ndcg_at_1 value: 26.987 - type: ndcg_at_3 value: 33.056999999999995 - type: ndcg_at_5 value: 35.356 - type: ndcg_at_10 value: 38.440000000000005 - type: ndcg_at_20 value: 40.136 - type: ndcg_at_100 value: 43.473 - type: ndcg_at_1000 value: 45.687 - type: map_at_1 value: 24.651999999999997 - type: map_at_3 value: 30.416999999999998 - type: map_at_5 value: 31.863999999999997 - type: map_at_10 value: 33.253 - type: map_at_20 value: 33.756 - type: map_at_100 value: 34.257 - type: map_at_1000 value: 34.347 - type: recall_at_1 value: 24.651999999999997 - type: recall_at_3 value: 37.88 - type: recall_at_5 value: 43.136 - type: recall_at_10 value: 52.06699999999999 - type: recall_at_20 value: 58.540000000000006 - type: recall_at_100 value: 75.22 - type: recall_at_1000 value: 91.774 - type: precision_at_1 value: 26.987 - type: precision_at_3 value: 14.048 - type: precision_at_5 value: 9.871 - type: precision_at_10 value: 6.063000000000001 - type: precision_at_20 value: 3.4099999999999997 - type: precision_at_100 value: 0.922 - type: precision_at_1000 value: 0.123 - type: mrr_at_1 value: 26.9871 - type: mrr_at_3 value: 33.1485 - type: mrr_at_5 value: 34.3407 - type: mrr_at_10 value: 35.6087 - type: mrr_at_20 value: 36.0483 - type: mrr_at_100 value: 36.463699999999996 - type: mrr_at_1000 value: 36.5278 - type: nauc_ndcg_at_1_max value: 26.6537 - type: nauc_ndcg_at_1_std value: -3.9813 - type: nauc_ndcg_at_1_diff1 value: 47.8302 - type: nauc_ndcg_at_3_max value: 27.3661 - type: nauc_ndcg_at_3_std value: -2.2132 - type: nauc_ndcg_at_3_diff1 value: 39.9424 - type: nauc_ndcg_at_5_max value: 27.417799999999996 - type: nauc_ndcg_at_5_std value: -1.0684 - type: nauc_ndcg_at_5_diff1 value: 39.163599999999995 - type: nauc_ndcg_at_10_max value: 26.555400000000002 - type: nauc_ndcg_at_10_std value: 0.0103 - type: nauc_ndcg_at_10_diff1 value: 38.9487 - type: nauc_ndcg_at_20_max value: 25.963900000000002 - type: nauc_ndcg_at_20_std value: 0.7779 - type: nauc_ndcg_at_20_diff1 value: 38.7279 - type: nauc_ndcg_at_100_max value: 26.6365 - type: nauc_ndcg_at_100_std value: 3.0018 - type: nauc_ndcg_at_100_diff1 value: 38.1326 - type: nauc_ndcg_at_1000_max value: 26.52 - type: nauc_ndcg_at_1000_std value: 2.6968 - type: nauc_ndcg_at_1000_diff1 value: 38.1665 - type: nauc_map_at_1_max value: 24.950400000000002 - type: nauc_map_at_1_std value: -4.2715000000000005 - type: nauc_map_at_1_diff1 value: 48.2994 - type: nauc_map_at_3_max value: 26.4208 - type: nauc_map_at_3_std value: -3.0675 - type: nauc_map_at_3_diff1 value: 41.987 - type: nauc_map_at_5_max value: 26.641900000000003 - type: nauc_map_at_5_std value: -2.3005 - type: nauc_map_at_5_diff1 value: 41.4695 - type: nauc_map_at_10_max value: 26.2781 - type: nauc_map_at_10_std value: -1.8994 - type: nauc_map_at_10_diff1 value: 41.193000000000005 - type: nauc_map_at_20_max value: 26.0838 - type: nauc_map_at_20_std value: -1.7046999999999999 - type: nauc_map_at_20_diff1 value: 41.1128 - type: nauc_map_at_100_max value: 26.230199999999996 - type: nauc_map_at_100_std value: -1.2565 - type: nauc_map_at_100_diff1 value: 41.0271 - type: nauc_map_at_1000_max value: 26.2069 - type: nauc_map_at_1000_std value: -1.2469 - type: nauc_map_at_1000_diff1 value: 41.019 - type: nauc_recall_at_1_max value: 24.950400000000002 - type: nauc_recall_at_1_std value: -4.2715000000000005 - type: nauc_recall_at_1_diff1 value: 48.2994 - type: nauc_recall_at_3_max value: 27.2098 - type: nauc_recall_at_3_std value: -1.309 - type: nauc_recall_at_3_diff1 value: 34.4663 - type: nauc_recall_at_5_max value: 27.323700000000002 - type: nauc_recall_at_5_std value: 1.7010999999999998 - type: nauc_recall_at_5_diff1 value: 32.4911 - type: nauc_recall_at_10_max value: 24.6483 - type: nauc_recall_at_10_std value: 4.9019 - type: nauc_recall_at_10_diff1 value: 32.0585 - type: nauc_recall_at_20_max value: 22.556 - type: nauc_recall_at_20_std value: 8.1527 - type: nauc_recall_at_20_diff1 value: 30.8345 - type: nauc_recall_at_100_max value: 25.354300000000002 - type: nauc_recall_at_100_std value: 22.8578 - type: nauc_recall_at_100_diff1 value: 23.291999999999998 - type: nauc_recall_at_1000_max value: 26.523999999999997 - type: nauc_recall_at_1000_std value: 44.7733 - type: nauc_recall_at_1000_diff1 value: 3.1338 - type: nauc_precision_at_1_max value: 26.6537 - type: nauc_precision_at_1_std value: -3.9813 - type: nauc_precision_at_1_diff1 value: 47.8302 - type: nauc_precision_at_3_max value: 30.8201 - type: nauc_precision_at_3_std value: 1.7691 - type: nauc_precision_at_3_diff1 value: 33.3835 - type: nauc_precision_at_5_max value: 29.5433 - type: nauc_precision_at_5_std value: 4.4224 - type: nauc_precision_at_5_diff1 value: 28.426000000000002 - type: nauc_precision_at_10_max value: 26.0888 - type: nauc_precision_at_10_std value: 7.8104000000000005 - type: nauc_precision_at_10_diff1 value: 24.509800000000002 - type: nauc_precision_at_20_max value: 22.218799999999998 - type: nauc_precision_at_20_std value: 11.248099999999999 - type: nauc_precision_at_20_diff1 value: 20.6056 - type: nauc_precision_at_100_max value: 16.4622 - type: nauc_precision_at_100_std value: 25.735200000000003 - type: nauc_precision_at_100_diff1 value: 6.2566 - type: nauc_precision_at_1000_max value: -9.109399999999999 - type: nauc_precision_at_1000_std value: 13.820099999999998 - type: nauc_precision_at_1000_diff1 value: -7.9046 - type: nauc_mrr_at_1_max value: 26.6537 - type: nauc_mrr_at_1_std value: -3.9813 - type: nauc_mrr_at_1_diff1 value: 47.8302 - type: nauc_mrr_at_3_max value: 27.9843 - type: nauc_mrr_at_3_std value: -2.3418 - type: nauc_mrr_at_3_diff1 value: 41.4877 - type: nauc_mrr_at_5_max value: 27.9298 - type: nauc_mrr_at_5_std value: -1.7860999999999998 - type: nauc_mrr_at_5_diff1 value: 40.9261 - type: nauc_mrr_at_10_max value: 27.6814 - type: nauc_mrr_at_10_std value: -1.1542000000000001 - type: nauc_mrr_at_10_diff1 value: 40.9534 - type: nauc_mrr_at_20_max value: 27.507900000000003 - type: nauc_mrr_at_20_std value: -0.9558000000000001 - type: nauc_mrr_at_20_diff1 value: 41.0046 - type: nauc_mrr_at_100_max value: 27.5032 - type: nauc_mrr_at_100_std value: -0.7483 - type: nauc_mrr_at_100_diff1 value: 40.9239 - type: nauc_mrr_at_1000_max value: 27.4957 - type: nauc_mrr_at_1000_std value: -0.7642 - type: nauc_mrr_at_1000_diff1 value: 40.9219 - type: main_score value: 38.440000000000005 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: ndcg_at_1 value: 47.231 - type: ndcg_at_3 value: 38.605000000000004 - type: ndcg_at_5 value: 40.058 - type: ndcg_at_10 value: 43.482 - type: ndcg_at_20 value: 45.732 - type: ndcg_at_100 value: 49.062 - type: ndcg_at_1000 value: 51.605000000000004 - type: map_at_1 value: 20.674 - type: map_at_3 value: 29.375 - type: map_at_5 value: 31.872 - type: map_at_10 value: 33.846 - type: map_at_20 value: 34.733000000000004 - type: map_at_100 value: 35.411 - type: map_at_1000 value: 35.553000000000004 - type: recall_at_1 value: 20.674 - type: recall_at_3 value: 33.859 - type: recall_at_5 value: 39.76 - type: recall_at_10 value: 47.150999999999996 - type: recall_at_20 value: 53.522999999999996 - type: recall_at_100 value: 66.125 - type: recall_at_1000 value: 80.368 - type: precision_at_1 value: 47.231 - type: precision_at_3 value: 28.534 - type: precision_at_5 value: 20.782 - type: precision_at_10 value: 12.742999999999999 - type: precision_at_20 value: 7.342 - type: precision_at_100 value: 1.883 - type: precision_at_1000 value: 0.23700000000000002 - type: mrr_at_1 value: 47.2313 - type: mrr_at_3 value: 55.6352 - type: mrr_at_5 value: 56.92509999999999 - type: mrr_at_10 value: 57.833400000000005 - type: mrr_at_20 value: 58.178700000000006 - type: mrr_at_100 value: 58.385 - type: mrr_at_1000 value: 58.40919999999999 - type: nauc_ndcg_at_1_max value: 41.5456 - type: nauc_ndcg_at_1_std value: 19.2734 - type: nauc_ndcg_at_1_diff1 value: 38.0868 - type: nauc_ndcg_at_3_max value: 41.6105 - type: nauc_ndcg_at_3_std value: 19.5917 - type: nauc_ndcg_at_3_diff1 value: 29.192800000000002 - type: nauc_ndcg_at_5_max value: 42.1893 - type: nauc_ndcg_at_5_std value: 21.9984 - type: nauc_ndcg_at_5_diff1 value: 27.7412 - type: nauc_ndcg_at_10_max value: 42.5633 - type: nauc_ndcg_at_10_std value: 24.265700000000002 - type: nauc_ndcg_at_10_diff1 value: 27.0287 - type: nauc_ndcg_at_20_max value: 43.364200000000004 - type: nauc_ndcg_at_20_std value: 26.2174 - type: nauc_ndcg_at_20_diff1 value: 26.980500000000003 - type: nauc_ndcg_at_100_max value: 43.9582 - type: nauc_ndcg_at_100_std value: 28.454 - type: nauc_ndcg_at_100_diff1 value: 27.087099999999996 - type: nauc_ndcg_at_1000_max value: 44.0356 - type: nauc_ndcg_at_1000_std value: 28.64 - type: nauc_ndcg_at_1000_diff1 value: 27.1343 - type: nauc_map_at_1_max value: 39.2181 - type: nauc_map_at_1_std value: 12.4972 - type: nauc_map_at_1_diff1 value: 39.5664 - type: nauc_map_at_3_max value: 41.5441 - type: nauc_map_at_3_std value: 17.333000000000002 - type: nauc_map_at_3_diff1 value: 29.9555 - type: nauc_map_at_5_max value: 41.0041 - type: nauc_map_at_5_std value: 19.3667 - type: nauc_map_at_5_diff1 value: 28.0157 - type: nauc_map_at_10_max value: 41.2914 - type: nauc_map_at_10_std value: 21.051000000000002 - type: nauc_map_at_10_diff1 value: 27.387 - type: nauc_map_at_20_max value: 41.6964 - type: nauc_map_at_20_std value: 21.9338 - type: nauc_map_at_20_diff1 value: 27.4326 - type: nauc_map_at_100_max value: 41.8592 - type: nauc_map_at_100_std value: 22.46 - type: nauc_map_at_100_diff1 value: 27.4024 - type: nauc_map_at_1000_max value: 41.8737 - type: nauc_map_at_1000_std value: 22.4882 - type: nauc_map_at_1000_diff1 value: 27.405099999999997 - type: nauc_recall_at_1_max value: 39.2181 - type: nauc_recall_at_1_std value: 12.4972 - type: nauc_recall_at_1_diff1 value: 39.5664 - type: nauc_recall_at_3_max value: 41.3571 - type: nauc_recall_at_3_std value: 18.607699999999998 - type: nauc_recall_at_3_diff1 value: 25.8418 - type: nauc_recall_at_5_max value: 39.1225 - type: nauc_recall_at_5_std value: 22.2091 - type: nauc_recall_at_5_diff1 value: 20.9495 - type: nauc_recall_at_10_max value: 38.0045 - type: nauc_recall_at_10_std value: 25.584 - type: nauc_recall_at_10_diff1 value: 18.489 - type: nauc_recall_at_20_max value: 38.0096 - type: nauc_recall_at_20_std value: 29.3335 - type: nauc_recall_at_20_diff1 value: 17.0106 - type: nauc_recall_at_100_max value: 37.7378 - type: nauc_recall_at_100_std value: 37.0189 - type: nauc_recall_at_100_diff1 value: 14.815900000000001 - type: nauc_recall_at_1000_max value: 36.2825 - type: nauc_recall_at_1000_std value: 42.1995 - type: nauc_recall_at_1000_diff1 value: 10.5182 - type: nauc_precision_at_1_max value: 41.5456 - type: nauc_precision_at_1_std value: 19.2734 - type: nauc_precision_at_1_diff1 value: 38.0868 - type: nauc_precision_at_3_max value: 35.72 - type: nauc_precision_at_3_std value: 22.8785 - type: nauc_precision_at_3_diff1 value: 15.240200000000002 - type: nauc_precision_at_5_max value: 30.4643 - type: nauc_precision_at_5_std value: 26.2774 - type: nauc_precision_at_5_diff1 value: 8.8749 - type: nauc_precision_at_10_max value: 25.960299999999997 - type: nauc_precision_at_10_std value: 28.3825 - type: nauc_precision_at_10_diff1 value: 4.626799999999999 - type: nauc_precision_at_20_max value: 24.8278 - type: nauc_precision_at_20_std value: 32.1644 - type: nauc_precision_at_20_diff1 value: 2.5019 - type: nauc_precision_at_100_max value: 17.180999999999997 - type: nauc_precision_at_100_std value: 33.955400000000004 - type: nauc_precision_at_100_diff1 value: -1.9183 - type: nauc_precision_at_1000_max value: 4.8986 - type: nauc_precision_at_1000_std value: 26.5376 - type: nauc_precision_at_1000_diff1 value: -9.3468 - type: nauc_mrr_at_1_max value: 41.5456 - type: nauc_mrr_at_1_std value: 19.2734 - type: nauc_mrr_at_1_diff1 value: 38.0868 - type: nauc_mrr_at_3_max value: 43.7301 - type: nauc_mrr_at_3_std value: 22.409100000000002 - type: nauc_mrr_at_3_diff1 value: 34.846500000000006 - type: nauc_mrr_at_5_max value: 44.0608 - type: nauc_mrr_at_5_std value: 23.3812 - type: nauc_mrr_at_5_diff1 value: 34.5847 - type: nauc_mrr_at_10_max value: 44.026700000000005 - type: nauc_mrr_at_10_std value: 23.339399999999998 - type: nauc_mrr_at_10_diff1 value: 34.7306 - type: nauc_mrr_at_20_max value: 44.1444 - type: nauc_mrr_at_20_std value: 23.5132 - type: nauc_mrr_at_20_diff1 value: 34.6927 - type: nauc_mrr_at_100_max value: 44.1228 - type: nauc_mrr_at_100_std value: 23.5783 - type: nauc_mrr_at_100_diff1 value: 34.7193 - type: nauc_mrr_at_1000_max value: 44.1082 - type: nauc_mrr_at_1000_std value: 23.5574 - type: nauc_mrr_at_1000_diff1 value: 34.719699999999996 - type: main_score value: 43.482 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: ndcg_at_1 value: 59.25 - type: ndcg_at_3 value: 48.256 - type: ndcg_at_5 value: 45.580999999999996 - type: ndcg_at_10 value: 43.37 - type: ndcg_at_20 value: 43.106 - type: ndcg_at_100 value: 47.845 - type: ndcg_at_1000 value: 54.974999999999994 - type: map_at_1 value: 10.032 - type: map_at_3 value: 14.954 - type: map_at_5 value: 17.408 - type: map_at_10 value: 20.461 - type: map_at_20 value: 23.759 - type: map_at_100 value: 28.718 - type: map_at_1000 value: 30.406 - type: recall_at_1 value: 10.032 - type: recall_at_3 value: 15.905 - type: recall_at_5 value: 19.622999999999998 - type: recall_at_10 value: 25.125999999999998 - type: recall_at_20 value: 33.262 - type: recall_at_100 value: 52.515 - type: recall_at_1000 value: 75.224 - type: precision_at_1 value: 72.0 - type: precision_at_3 value: 50.917 - type: precision_at_5 value: 43.4 - type: precision_at_10 value: 34.175 - type: precision_at_20 value: 26.325 - type: precision_at_100 value: 10.893 - type: precision_at_1000 value: 2.0549999999999997 - type: mrr_at_1 value: 72.0 - type: mrr_at_3 value: 77.5417 - type: mrr_at_5 value: 78.2042 - type: mrr_at_10 value: 78.7173 - type: mrr_at_20 value: 78.9521 - type: mrr_at_100 value: 79.0382 - type: mrr_at_1000 value: 79.0408 - type: nauc_ndcg_at_1_max value: 49.778 - type: nauc_ndcg_at_1_std value: 20.462 - type: nauc_ndcg_at_1_diff1 value: 49.3621 - type: nauc_ndcg_at_3_max value: 44.4388 - type: nauc_ndcg_at_3_std value: 24.646 - type: nauc_ndcg_at_3_diff1 value: 33.3173 - type: nauc_ndcg_at_5_max value: 44.2179 - type: nauc_ndcg_at_5_std value: 25.597399999999997 - type: nauc_ndcg_at_5_diff1 value: 31.0886 - type: nauc_ndcg_at_10_max value: 43.7812 - type: nauc_ndcg_at_10_std value: 25.61 - type: nauc_ndcg_at_10_diff1 value: 30.667699999999996 - type: nauc_ndcg_at_20_max value: 39.4779 - type: nauc_ndcg_at_20_std value: 20.891000000000002 - type: nauc_ndcg_at_20_diff1 value: 29.492600000000003 - type: nauc_ndcg_at_100_max value: 41.511900000000004 - type: nauc_ndcg_at_100_std value: 27.340999999999998 - type: nauc_ndcg_at_100_diff1 value: 30.5701 - type: nauc_ndcg_at_1000_max value: 47.0571 - type: nauc_ndcg_at_1000_std value: 37.0976 - type: nauc_ndcg_at_1000_diff1 value: 31.5615 - type: nauc_map_at_1_max value: 0.4743 - type: nauc_map_at_1_std value: -23.7532 - type: nauc_map_at_1_diff1 value: 26.0851 - type: nauc_map_at_3_max value: 8.5131 - type: nauc_map_at_3_std value: -18.6015 - type: nauc_map_at_3_diff1 value: 21.9172 - type: nauc_map_at_5_max value: 12.295499999999999 - type: nauc_map_at_5_std value: -13.872100000000001 - type: nauc_map_at_5_diff1 value: 21.3319 - type: nauc_map_at_10_max value: 17.1428 - type: nauc_map_at_10_std value: -6.638199999999999 - type: nauc_map_at_10_diff1 value: 20.8671 - type: nauc_map_at_20_max value: 21.7306 - type: nauc_map_at_20_std value: 2.1404 - type: nauc_map_at_20_diff1 value: 20.7929 - type: nauc_map_at_100_max value: 29.677799999999998 - type: nauc_map_at_100_std value: 16.9458 - type: nauc_map_at_100_diff1 value: 22.4101 - type: nauc_map_at_1000_max value: 31.5735 - type: nauc_map_at_1000_std value: 20.5816 - type: nauc_map_at_1000_diff1 value: 22.561400000000003 - type: nauc_recall_at_1_max value: 0.4743 - type: nauc_recall_at_1_std value: -23.7532 - type: nauc_recall_at_1_diff1 value: 26.0851 - type: nauc_recall_at_3_max value: 6.851500000000001 - type: nauc_recall_at_3_std value: -18.7341 - type: nauc_recall_at_3_diff1 value: 19.703699999999998 - type: nauc_recall_at_5_max value: 10.0265 - type: nauc_recall_at_5_std value: -14.2537 - type: nauc_recall_at_5_diff1 value: 18.8765 - type: nauc_recall_at_10_max value: 14.1582 - type: nauc_recall_at_10_std value: -7.703 - type: nauc_recall_at_10_diff1 value: 17.9056 - type: nauc_recall_at_20_max value: 15.0343 - type: nauc_recall_at_20_std value: -0.9846 - type: nauc_recall_at_20_diff1 value: 14.377899999999999 - type: nauc_recall_at_100_max value: 27.904600000000002 - type: nauc_recall_at_100_std value: 24.6322 - type: nauc_recall_at_100_diff1 value: 16.869500000000002 - type: nauc_recall_at_1000_max value: 33.7755 - type: nauc_recall_at_1000_std value: 42.241800000000005 - type: nauc_recall_at_1000_diff1 value: 17.3324 - type: nauc_precision_at_1_max value: 62.3459 - type: nauc_precision_at_1_std value: 28.3277 - type: nauc_precision_at_1_diff1 value: 57.8053 - type: nauc_precision_at_3_max value: 45.8296 - type: nauc_precision_at_3_std value: 39.8642 - type: nauc_precision_at_3_diff1 value: 15.7381 - type: nauc_precision_at_5_max value: 45.331900000000005 - type: nauc_precision_at_5_std value: 45.1279 - type: nauc_precision_at_5_diff1 value: 11.473700000000001 - type: nauc_precision_at_10_max value: 42.276399999999995 - type: nauc_precision_at_10_std value: 50.9538 - type: nauc_precision_at_10_diff1 value: 6.708699999999999 - type: nauc_precision_at_20_max value: 37.961600000000004 - type: nauc_precision_at_20_std value: 52.0611 - type: nauc_precision_at_20_diff1 value: 5.9309 - type: nauc_precision_at_100_max value: 29.567 - type: nauc_precision_at_100_std value: 50.07 - type: nauc_precision_at_100_diff1 value: 3.2583 - type: nauc_precision_at_1000_max value: 5.5285 - type: nauc_precision_at_1000_std value: 20.5813 - type: nauc_precision_at_1000_diff1 value: -6.6333 - type: nauc_mrr_at_1_max value: 62.3459 - type: nauc_mrr_at_1_std value: 28.3277 - type: nauc_mrr_at_1_diff1 value: 57.8053 - type: nauc_mrr_at_3_max value: 66.5168 - type: nauc_mrr_at_3_std value: 37.4446 - type: nauc_mrr_at_3_diff1 value: 57.6125 - type: nauc_mrr_at_5_max value: 65.8343 - type: nauc_mrr_at_5_std value: 36.6396 - type: nauc_mrr_at_5_diff1 value: 56.91589999999999 - type: nauc_mrr_at_10_max value: 65.73750000000001 - type: nauc_mrr_at_10_std value: 36.4067 - type: nauc_mrr_at_10_diff1 value: 56.9594 - type: nauc_mrr_at_20_max value: 65.6623 - type: nauc_mrr_at_20_std value: 36.0989 - type: nauc_mrr_at_20_diff1 value: 56.9662 - type: nauc_mrr_at_100_max value: 65.6934 - type: nauc_mrr_at_100_std value: 36.0911 - type: nauc_mrr_at_100_diff1 value: 57.0541 - type: nauc_mrr_at_1000_max value: 65.68929999999999 - type: nauc_mrr_at_1000_std value: 36.0838 - type: nauc_mrr_at_1000_diff1 value: 57.054300000000005 - type: main_score value: 43.37 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 42.53 - type: f1 value: 38.4608 - type: f1_weighted value: 44.6927 - type: main_score value: 42.53 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: ndcg_at_1 value: 90.519 - type: ndcg_at_3 value: 91.387 - type: ndcg_at_5 value: 91.644 - type: ndcg_at_10 value: 91.91 - type: ndcg_at_20 value: 92.136 - type: ndcg_at_100 value: 92.406 - type: ndcg_at_1000 value: 92.62599999999999 - type: map_at_1 value: 83.994 - type: map_at_3 value: 88.885 - type: map_at_5 value: 89.185 - type: map_at_10 value: 89.36500000000001 - type: map_at_20 value: 89.458 - type: map_at_100 value: 89.515 - type: map_at_1000 value: 89.52799999999999 - type: recall_at_1 value: 83.994 - type: recall_at_3 value: 93.145 - type: recall_at_5 value: 94.016 - type: recall_at_10 value: 94.836 - type: recall_at_20 value: 95.56700000000001 - type: recall_at_100 value: 96.711 - type: recall_at_1000 value: 98.027 - type: precision_at_1 value: 90.519 - type: precision_at_3 value: 33.922999999999995 - type: precision_at_5 value: 20.636 - type: precision_at_10 value: 10.474 - type: precision_at_20 value: 5.316 - type: precision_at_100 value: 1.0919999999999999 - type: precision_at_1000 value: 0.11299999999999999 - type: mrr_at_1 value: 90.5191 - type: mrr_at_3 value: 94.37440000000001 - type: mrr_at_5 value: 94.4832 - type: mrr_at_10 value: 94.5215 - type: mrr_at_20 value: 94.5365 - type: mrr_at_100 value: 94.5422 - type: mrr_at_1000 value: 94.54249999999999 - type: nauc_ndcg_at_1_max value: 22.1341 - type: nauc_ndcg_at_1_std value: -11.1273 - type: nauc_ndcg_at_1_diff1 value: 81.8507 - type: nauc_ndcg_at_3_max value: 16.8937 - type: nauc_ndcg_at_3_std value: -7.1829 - type: nauc_ndcg_at_3_diff1 value: 43.892199999999995 - type: nauc_ndcg_at_5_max value: 17.9177 - type: nauc_ndcg_at_5_std value: -5.2 - type: nauc_ndcg_at_5_diff1 value: 41.9608 - type: nauc_ndcg_at_10_max value: 17.8222 - type: nauc_ndcg_at_10_std value: -3.8736 - type: nauc_ndcg_at_10_diff1 value: 41.955 - type: nauc_ndcg_at_20_max value: 18.467200000000002 - type: nauc_ndcg_at_20_std value: -2.7304 - type: nauc_ndcg_at_20_diff1 value: 42.950300000000006 - type: nauc_ndcg_at_100_max value: 18.5918 - type: nauc_ndcg_at_100_std value: -2.874 - type: nauc_ndcg_at_100_diff1 value: 44.182 - type: nauc_ndcg_at_1000_max value: 18.9498 - type: nauc_ndcg_at_1000_std value: -2.8561 - type: nauc_ndcg_at_1000_diff1 value: 45.5587 - type: nauc_map_at_1_max value: 14.943600000000002 - type: nauc_map_at_1_std value: -6.3744 - type: nauc_map_at_1_diff1 value: 51.697700000000005 - type: nauc_map_at_3_max value: 15.7558 - type: nauc_map_at_3_std value: -5.8517 - type: nauc_map_at_3_diff1 value: 41.814 - type: nauc_map_at_5_max value: 16.6287 - type: nauc_map_at_5_std value: -4.9942 - type: nauc_map_at_5_diff1 value: 41.605199999999996 - type: nauc_map_at_10_max value: 16.8146 - type: nauc_map_at_10_std value: -4.4551 - type: nauc_map_at_10_diff1 value: 41.9641 - type: nauc_map_at_20_max value: 17.0709 - type: nauc_map_at_20_std value: -4.1187000000000005 - type: nauc_map_at_20_diff1 value: 42.3292 - type: nauc_map_at_100_max value: 17.1076 - type: nauc_map_at_100_std value: -4.1089 - type: nauc_map_at_100_diff1 value: 42.5101 - type: nauc_map_at_1000_max value: 17.1309 - type: nauc_map_at_1000_std value: -4.0958000000000006 - type: nauc_map_at_1000_diff1 value: 42.5694 - type: nauc_recall_at_1_max value: 14.943600000000002 - type: nauc_recall_at_1_std value: -6.3744 - type: nauc_recall_at_1_diff1 value: 51.697700000000005 - type: nauc_recall_at_3_max value: 11.8984 - type: nauc_recall_at_3_std value: -4.224 - type: nauc_recall_at_3_diff1 value: 13.962 - type: nauc_recall_at_5_max value: 16.2434 - type: nauc_recall_at_5_std value: 1.6707 - type: nauc_recall_at_5_diff1 value: 7.788 - type: nauc_recall_at_10_max value: 16.4427 - type: nauc_recall_at_10_std value: 8.259 - type: nauc_recall_at_10_diff1 value: 4.5507 - type: nauc_recall_at_20_max value: 19.0546 - type: nauc_recall_at_20_std value: 16.7132 - type: nauc_recall_at_20_diff1 value: 3.5242000000000004 - type: nauc_recall_at_100_max value: 19.6815 - type: nauc_recall_at_100_std value: 21.4767 - type: nauc_recall_at_100_diff1 value: 1.4785 - type: nauc_recall_at_1000_max value: 26.5748 - type: nauc_recall_at_1000_std value: 37.026399999999995 - type: nauc_recall_at_1000_diff1 value: 1.512 - type: nauc_precision_at_1_max value: 22.1341 - type: nauc_precision_at_1_std value: -11.1273 - type: nauc_precision_at_1_diff1 value: 81.8507 - type: nauc_precision_at_3_max value: 13.6152 - type: nauc_precision_at_3_std value: -2.4367 - type: nauc_precision_at_3_diff1 value: 1.6237000000000001 - type: nauc_precision_at_5_max value: 13.977400000000001 - type: nauc_precision_at_5_std value: 4.3391 - type: nauc_precision_at_5_diff1 value: -6.660000000000001 - type: nauc_precision_at_10_max value: 10.4986 - type: nauc_precision_at_10_std value: 8.9132 - type: nauc_precision_at_10_diff1 value: -7.5682 - type: nauc_precision_at_20_max value: 11.0525 - type: nauc_precision_at_20_std value: 12.0579 - type: nauc_precision_at_20_diff1 value: -5.0471 - type: nauc_precision_at_100_max value: 7.1659 - type: nauc_precision_at_100_std value: 8.1754 - type: nauc_precision_at_100_diff1 value: -2.7885 - type: nauc_precision_at_1000_max value: 4.9776 - type: nauc_precision_at_1000_std value: 5.8301 - type: nauc_precision_at_1000_diff1 value: 0.18860000000000002 - type: nauc_mrr_at_1_max value: 22.1341 - type: nauc_mrr_at_1_std value: -11.1273 - type: nauc_mrr_at_1_diff1 value: 81.8507 - type: nauc_mrr_at_3_max value: 21.6738 - type: nauc_mrr_at_3_std value: -15.7016 - type: nauc_mrr_at_3_diff1 value: 81.0757 - type: nauc_mrr_at_5_max value: 22.6603 - type: nauc_mrr_at_5_std value: -14.7345 - type: nauc_mrr_at_5_diff1 value: 81.1092 - type: nauc_mrr_at_10_max value: 22.4279 - type: nauc_mrr_at_10_std value: -14.5002 - type: nauc_mrr_at_10_diff1 value: 81.11080000000001 - type: nauc_mrr_at_20_max value: 22.3604 - type: nauc_mrr_at_20_std value: -14.3058 - type: nauc_mrr_at_20_diff1 value: 81.1563 - type: nauc_mrr_at_100_max value: 22.311 - type: nauc_mrr_at_100_std value: -14.318100000000001 - type: nauc_mrr_at_100_diff1 value: 81.1586 - type: nauc_mrr_at_1000_max value: 22.307199999999998 - type: nauc_mrr_at_1000_std value: -14.3234 - type: nauc_mrr_at_1000_diff1 value: 81.1576 - type: main_score value: 91.91 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: ndcg_at_1 value: 44.753 - type: ndcg_at_3 value: 41.555 - type: ndcg_at_5 value: 42.809999999999995 - type: ndcg_at_10 value: 45.49 - type: ndcg_at_20 value: 48.287 - type: ndcg_at_100 value: 52.115 - type: ndcg_at_1000 value: 54.797 - type: map_at_1 value: 22.894000000000002 - type: map_at_3 value: 32.786 - type: map_at_5 value: 35.495 - type: map_at_10 value: 37.635000000000005 - type: map_at_20 value: 38.771 - type: map_at_100 value: 39.56 - type: map_at_1000 value: 39.734 - type: recall_at_1 value: 22.894000000000002 - type: recall_at_3 value: 37.579 - type: recall_at_5 value: 44.03 - type: recall_at_10 value: 52.61900000000001 - type: recall_at_20 value: 61.227 - type: recall_at_100 value: 76.88199999999999 - type: recall_at_1000 value: 92.534 - type: precision_at_1 value: 44.753 - type: precision_at_3 value: 27.675 - type: precision_at_5 value: 20.556 - type: precision_at_10 value: 12.592999999999998 - type: precision_at_20 value: 7.507999999999999 - type: precision_at_100 value: 1.9369999999999998 - type: precision_at_1000 value: 0.242 - type: mrr_at_1 value: 44.7531 - type: mrr_at_3 value: 50.694399999999995 - type: mrr_at_5 value: 51.990700000000004 - type: mrr_at_10 value: 52.9925 - type: mrr_at_20 value: 53.4612 - type: mrr_at_100 value: 53.7889 - type: mrr_at_1000 value: 53.8244 - type: nauc_ndcg_at_1_max value: 46.679700000000004 - type: nauc_ndcg_at_1_std value: -7.8208 - type: nauc_ndcg_at_1_diff1 value: 55.9238 - type: nauc_ndcg_at_3_max value: 39.761 - type: nauc_ndcg_at_3_std value: -7.6645 - type: nauc_ndcg_at_3_diff1 value: 43.6641 - type: nauc_ndcg_at_5_max value: 37.2506 - type: nauc_ndcg_at_5_std value: -7.574300000000001 - type: nauc_ndcg_at_5_diff1 value: 41.6025 - type: nauc_ndcg_at_10_max value: 38.1464 - type: nauc_ndcg_at_10_std value: -6.1288 - type: nauc_ndcg_at_10_diff1 value: 42.625 - type: nauc_ndcg_at_20_max value: 39.687 - type: nauc_ndcg_at_20_std value: -4.6046 - type: nauc_ndcg_at_20_diff1 value: 43.2796 - type: nauc_ndcg_at_100_max value: 41.4101 - type: nauc_ndcg_at_100_std value: -2.1537 - type: nauc_ndcg_at_100_diff1 value: 43.980599999999995 - type: nauc_ndcg_at_1000_max value: 42.0853 - type: nauc_ndcg_at_1000_std value: -2.5 - type: nauc_ndcg_at_1000_diff1 value: 44.5636 - type: nauc_map_at_1_max value: 21.019299999999998 - type: nauc_map_at_1_std value: -10.8832 - type: nauc_map_at_1_diff1 value: 45.1685 - type: nauc_map_at_3_max value: 29.0524 - type: nauc_map_at_3_std value: -9.6495 - type: nauc_map_at_3_diff1 value: 41.3844 - type: nauc_map_at_5_max value: 31.3813 - type: nauc_map_at_5_std value: -8.7888 - type: nauc_map_at_5_diff1 value: 40.1699 - type: nauc_map_at_10_max value: 33.8361 - type: nauc_map_at_10_std value: -7.9594 - type: nauc_map_at_10_diff1 value: 40.788999999999994 - type: nauc_map_at_20_max value: 34.9439 - type: nauc_map_at_20_std value: -7.382700000000001 - type: nauc_map_at_20_diff1 value: 41.134100000000004 - type: nauc_map_at_100_max value: 35.530899999999995 - type: nauc_map_at_100_std value: -6.8411 - type: nauc_map_at_100_diff1 value: 41.316 - type: nauc_map_at_1000_max value: 35.6246 - type: nauc_map_at_1000_std value: -6.828399999999999 - type: nauc_map_at_1000_diff1 value: 41.3739 - type: nauc_recall_at_1_max value: 21.019299999999998 - type: nauc_recall_at_1_std value: -10.8832 - type: nauc_recall_at_1_diff1 value: 45.1685 - type: nauc_recall_at_3_max value: 25.667499999999997 - type: nauc_recall_at_3_std value: -9.3695 - type: nauc_recall_at_3_diff1 value: 35.0424 - type: nauc_recall_at_5_max value: 26.2285 - type: nauc_recall_at_5_std value: -7.6552 - type: nauc_recall_at_5_diff1 value: 31.7068 - type: nauc_recall_at_10_max value: 29.12 - type: nauc_recall_at_10_std value: -3.5869 - type: nauc_recall_at_10_diff1 value: 31.952599999999997 - type: nauc_recall_at_20_max value: 31.5269 - type: nauc_recall_at_20_std value: 2.2824 - type: nauc_recall_at_20_diff1 value: 31.4747 - type: nauc_recall_at_100_max value: 34.533500000000004 - type: nauc_recall_at_100_std value: 18.8398 - type: nauc_recall_at_100_diff1 value: 29.525000000000002 - type: nauc_recall_at_1000_max value: 38.973600000000005 - type: nauc_recall_at_1000_std value: 37.9643 - type: nauc_recall_at_1000_diff1 value: 29.247899999999998 - type: nauc_precision_at_1_max value: 46.679700000000004 - type: nauc_precision_at_1_std value: -7.8208 - type: nauc_precision_at_1_diff1 value: 55.9238 - type: nauc_precision_at_3_max value: 46.348800000000004 - type: nauc_precision_at_3_std value: -2.4303000000000003 - type: nauc_precision_at_3_diff1 value: 31.4803 - type: nauc_precision_at_5_max value: 45.657 - type: nauc_precision_at_5_std value: 0.9887999999999999 - type: nauc_precision_at_5_diff1 value: 22.6439 - type: nauc_precision_at_10_max value: 48.147099999999995 - type: nauc_precision_at_10_std value: 5.313 - type: nauc_precision_at_10_diff1 value: 20.7803 - type: nauc_precision_at_20_max value: 47.407199999999996 - type: nauc_precision_at_20_std value: 8.8254 - type: nauc_precision_at_20_diff1 value: 17.7327 - type: nauc_precision_at_100_max value: 43.4944 - type: nauc_precision_at_100_std value: 14.8423 - type: nauc_precision_at_100_diff1 value: 11.7231 - type: nauc_precision_at_1000_max value: 36.3175 - type: nauc_precision_at_1000_std value: 14.9478 - type: nauc_precision_at_1000_diff1 value: 4.9391 - type: nauc_mrr_at_1_max value: 46.679700000000004 - type: nauc_mrr_at_1_std value: -7.8208 - type: nauc_mrr_at_1_diff1 value: 55.9238 - type: nauc_mrr_at_3_max value: 48.0241 - type: nauc_mrr_at_3_std value: -6.761100000000001 - type: nauc_mrr_at_3_diff1 value: 53.5091 - type: nauc_mrr_at_5_max value: 48.0965 - type: nauc_mrr_at_5_std value: -6.3173 - type: nauc_mrr_at_5_diff1 value: 52.9184 - type: nauc_mrr_at_10_max value: 48.3523 - type: nauc_mrr_at_10_std value: -5.6531 - type: nauc_mrr_at_10_diff1 value: 53.209399999999995 - type: nauc_mrr_at_20_max value: 48.365700000000004 - type: nauc_mrr_at_20_std value: -5.4359 - type: nauc_mrr_at_20_diff1 value: 53.16760000000001 - type: nauc_mrr_at_100_max value: 48.351699999999994 - type: nauc_mrr_at_100_std value: -5.3941 - type: nauc_mrr_at_100_diff1 value: 53.2419 - type: nauc_mrr_at_1000_max value: 48.343399999999995 - type: nauc_mrr_at_1000_std value: -5.4193 - type: nauc_mrr_at_1000_diff1 value: 53.264500000000005 - type: main_score value: 45.49 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: ndcg_at_1 value: 86.536 - type: ndcg_at_3 value: 64.485 - type: ndcg_at_5 value: 66.513 - type: ndcg_at_10 value: 68.151 - type: ndcg_at_20 value: 69.145 - type: ndcg_at_100 value: 70.552 - type: ndcg_at_1000 value: 71.772 - type: map_at_1 value: 43.268 - type: map_at_3 value: 56.013999999999996 - type: map_at_5 value: 57.69 - type: map_at_10 value: 58.709 - type: map_at_20 value: 59.122 - type: map_at_100 value: 59.418000000000006 - type: map_at_1000 value: 59.480999999999995 - type: recall_at_1 value: 43.268 - type: recall_at_3 value: 58.831999999999994 - type: recall_at_5 value: 62.829 - type: recall_at_10 value: 66.94099999999999 - type: recall_at_20 value: 70.135 - type: recall_at_100 value: 76.34 - type: recall_at_1000 value: 84.443 - type: precision_at_1 value: 86.536 - type: precision_at_3 value: 39.221000000000004 - type: precision_at_5 value: 25.131999999999998 - type: precision_at_10 value: 13.388 - type: precision_at_20 value: 7.013999999999999 - type: precision_at_100 value: 1.5270000000000001 - type: precision_at_1000 value: 0.169 - type: mrr_at_1 value: 86.5361 - type: mrr_at_3 value: 89.6151 - type: mrr_at_5 value: 89.9521 - type: mrr_at_10 value: 90.1301 - type: mrr_at_20 value: 90.201 - type: mrr_at_100 value: 90.2397 - type: mrr_at_1000 value: 90.245 - type: nauc_ndcg_at_1_max value: 57.6156 - type: nauc_ndcg_at_1_std value: -3.39 - type: nauc_ndcg_at_1_diff1 value: 83.0288 - type: nauc_ndcg_at_3_max value: 17.758599999999998 - type: nauc_ndcg_at_3_std value: 3.3521 - type: nauc_ndcg_at_3_diff1 value: 15.4846 - type: nauc_ndcg_at_5_max value: 14.6571 - type: nauc_ndcg_at_5_std value: 4.2071 - type: nauc_ndcg_at_5_diff1 value: 12.3942 - type: nauc_ndcg_at_10_max value: 12.5579 - type: nauc_ndcg_at_10_std value: 4.7895 - type: nauc_ndcg_at_10_diff1 value: 10.2189 - type: nauc_ndcg_at_20_max value: 11.5413 - type: nauc_ndcg_at_20_std value: 5.0043 - type: nauc_ndcg_at_20_diff1 value: 9.3896 - type: nauc_ndcg_at_100_max value: 10.6797 - type: nauc_ndcg_at_100_std value: 5.7805 - type: nauc_ndcg_at_100_diff1 value: 8.5649 - type: nauc_ndcg_at_1000_max value: 10.8847 - type: nauc_ndcg_at_1000_std value: 6.1945 - type: nauc_ndcg_at_1000_diff1 value: 8.539 - type: nauc_map_at_1_max value: 57.6156 - type: nauc_map_at_1_std value: -3.39 - type: nauc_map_at_1_diff1 value: 83.0288 - type: nauc_map_at_3_max value: 12.4083 - type: nauc_map_at_3_std value: 3.2297 - type: nauc_map_at_3_diff1 value: 8.2482 - type: nauc_map_at_5_max value: 10.4054 - type: nauc_map_at_5_std value: 3.7108000000000003 - type: nauc_map_at_5_diff1 value: 6.4539 - type: nauc_map_at_10_max value: 9.439300000000001 - type: nauc_map_at_10_std value: 4.0356000000000005 - type: nauc_map_at_10_diff1 value: 5.502400000000001 - type: nauc_map_at_20_max value: 9.141 - type: nauc_map_at_20_std value: 4.1145000000000005 - type: nauc_map_at_20_diff1 value: 5.2942 - type: nauc_map_at_100_max value: 9.0071 - type: nauc_map_at_100_std value: 4.2345 - type: nauc_map_at_100_diff1 value: 5.1606 - type: nauc_map_at_1000_max value: 9.017999999999999 - type: nauc_map_at_1000_std value: 4.2501 - type: nauc_map_at_1000_diff1 value: 5.162 - type: nauc_recall_at_1_max value: 57.6156 - type: nauc_recall_at_1_std value: -3.39 - type: nauc_recall_at_1_diff1 value: 83.0288 - type: nauc_recall_at_3_max value: 8.4358 - type: nauc_recall_at_3_std value: 4.925199999999999 - type: nauc_recall_at_3_diff1 value: 0.29009999999999997 - type: nauc_recall_at_5_max value: 3.2076000000000002 - type: nauc_recall_at_5_std value: 6.2316 - type: nauc_recall_at_5_diff1 value: -4.6014 - type: nauc_recall_at_10_max value: -1.7786 - type: nauc_recall_at_10_std value: 7.467300000000001 - type: nauc_recall_at_10_diff1 value: -9.6991 - type: nauc_recall_at_20_max value: -5.0717 - type: nauc_recall_at_20_std value: 8.1128 - type: nauc_recall_at_20_diff1 value: -12.5945 - type: nauc_recall_at_100_max value: -10.5434 - type: nauc_recall_at_100_std value: 11.7719 - type: nauc_recall_at_100_diff1 value: -18.394 - type: nauc_recall_at_1000_max value: -15.5908 - type: nauc_recall_at_1000_std value: 16.842399999999998 - type: nauc_recall_at_1000_diff1 value: -27.099400000000003 - type: nauc_precision_at_1_max value: 57.6156 - type: nauc_precision_at_1_std value: -3.39 - type: nauc_precision_at_1_diff1 value: 83.0288 - type: nauc_precision_at_3_max value: 8.4358 - type: nauc_precision_at_3_std value: 4.925199999999999 - type: nauc_precision_at_3_diff1 value: 0.29009999999999997 - type: nauc_precision_at_5_max value: 3.2076000000000002 - type: nauc_precision_at_5_std value: 6.2316 - type: nauc_precision_at_5_diff1 value: -4.6014 - type: nauc_precision_at_10_max value: -1.7786 - type: nauc_precision_at_10_std value: 7.467300000000001 - type: nauc_precision_at_10_diff1 value: -9.6991 - type: nauc_precision_at_20_max value: -5.0717 - type: nauc_precision_at_20_std value: 8.1128 - type: nauc_precision_at_20_diff1 value: -12.5945 - type: nauc_precision_at_100_max value: -10.5434 - type: nauc_precision_at_100_std value: 11.7719 - type: nauc_precision_at_100_diff1 value: -18.394 - type: nauc_precision_at_1000_max value: -15.5908 - type: nauc_precision_at_1000_std value: 16.842399999999998 - type: nauc_precision_at_1000_diff1 value: -27.099400000000003 - type: nauc_mrr_at_1_max value: 57.6156 - type: nauc_mrr_at_1_std value: -3.39 - type: nauc_mrr_at_1_diff1 value: 83.0288 - type: nauc_mrr_at_3_max value: 62.074 - type: nauc_mrr_at_3_std value: -0.45199999999999996 - type: nauc_mrr_at_3_diff1 value: 82.8025 - type: nauc_mrr_at_5_max value: 62.157300000000006 - type: nauc_mrr_at_5_std value: 0.2829 - type: nauc_mrr_at_5_diff1 value: 82.9913 - type: nauc_mrr_at_10_max value: 61.9838 - type: nauc_mrr_at_10_std value: 0.16670000000000001 - type: nauc_mrr_at_10_diff1 value: 82.9452 - type: nauc_mrr_at_20_max value: 61.9516 - type: nauc_mrr_at_20_std value: 0.18159999999999998 - type: nauc_mrr_at_20_diff1 value: 82.9723 - type: nauc_mrr_at_100_max value: 61.891600000000004 - type: nauc_mrr_at_100_std value: 0.1432 - type: nauc_mrr_at_100_diff1 value: 82.97489999999999 - type: nauc_mrr_at_1000_max value: 61.88249999999999 - type: nauc_mrr_at_1000_std value: 0.1357 - type: nauc_mrr_at_1000_diff1 value: 82.9723 - type: main_score value: 68.151 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 72.5444 - type: f1 value: 72.4069 - type: f1_weighted value: 72.4069 - type: ap value: 66.8419 - type: ap_weighted value: 66.8419 - type: main_score value: 72.5444 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: ndcg_at_1 value: 25.516 - type: ndcg_at_3 value: 36.687999999999995 - type: ndcg_at_5 value: 40.864 - type: ndcg_at_10 value: 44.856 - type: ndcg_at_20 value: 47.3 - type: ndcg_at_100 value: 50.062 - type: ndcg_at_1000 value: 51.085 - type: map_at_1 value: 24.782 - type: map_at_3 value: 33.668 - type: map_at_5 value: 36.010999999999996 - type: map_at_10 value: 37.702000000000005 - type: map_at_20 value: 38.391 - type: map_at_100 value: 38.798 - type: map_at_1000 value: 38.841 - type: recall_at_1 value: 24.782 - type: recall_at_3 value: 44.722 - type: recall_at_5 value: 54.769999999999996 - type: recall_at_10 value: 66.842 - type: recall_at_20 value: 76.319 - type: recall_at_100 value: 90.761 - type: recall_at_1000 value: 98.48 - type: precision_at_1 value: 25.516 - type: precision_at_3 value: 15.506 - type: precision_at_5 value: 11.413 - type: precision_at_10 value: 6.99 - type: precision_at_20 value: 4.009 - type: precision_at_100 value: 0.959 - type: precision_at_1000 value: 0.105 - type: mrr_at_1 value: 25.5014 - type: mrr_at_3 value: 34.3553 - type: mrr_at_5 value: 36.666199999999996 - type: mrr_at_10 value: 38.3084 - type: mrr_at_20 value: 38.9663 - type: mrr_at_100 value: 39.341300000000004 - type: mrr_at_1000 value: 39.3785 - type: nauc_ndcg_at_1_max value: 4.2138 - type: nauc_ndcg_at_1_std value: -24.7801 - type: nauc_ndcg_at_1_diff1 value: 37.758399999999995 - type: nauc_ndcg_at_3_max value: 5.2536 - type: nauc_ndcg_at_3_std value: -29.642200000000003 - type: nauc_ndcg_at_3_diff1 value: 32.1639 - type: nauc_ndcg_at_5_max value: 5.0839 - type: nauc_ndcg_at_5_std value: -31.3077 - type: nauc_ndcg_at_5_diff1 value: 31.5135 - type: nauc_ndcg_at_10_max value: 6.2542 - type: nauc_ndcg_at_10_std value: -30.8439 - type: nauc_ndcg_at_10_diff1 value: 31.461299999999998 - type: nauc_ndcg_at_20_max value: 6.5669 - type: nauc_ndcg_at_20_std value: -29.6288 - type: nauc_ndcg_at_20_diff1 value: 31.590200000000003 - type: nauc_ndcg_at_100_max value: 6.691800000000001 - type: nauc_ndcg_at_100_std value: -28.1768 - type: nauc_ndcg_at_100_diff1 value: 32.1699 - type: nauc_ndcg_at_1000_max value: 6.451700000000001 - type: nauc_ndcg_at_1000_std value: -28.2093 - type: nauc_ndcg_at_1000_diff1 value: 32.3573 - type: nauc_map_at_1_max value: 4.1941 - type: nauc_map_at_1_std value: -24.9531 - type: nauc_map_at_1_diff1 value: 38.099 - type: nauc_map_at_3_max value: 4.9883999999999995 - type: nauc_map_at_3_std value: -28.7062 - type: nauc_map_at_3_diff1 value: 33.5696 - type: nauc_map_at_5_max value: 4.8525 - type: nauc_map_at_5_std value: -29.6601 - type: nauc_map_at_5_diff1 value: 33.2144 - type: nauc_map_at_10_max value: 5.3533 - type: nauc_map_at_10_std value: -29.4529 - type: nauc_map_at_10_diff1 value: 33.219300000000004 - type: nauc_map_at_20_max value: 5.416300000000001 - type: nauc_map_at_20_std value: -29.1294 - type: nauc_map_at_20_diff1 value: 33.2747 - type: nauc_map_at_100_max value: 5.4547 - type: nauc_map_at_100_std value: -28.8978 - type: nauc_map_at_100_diff1 value: 33.3505 - type: nauc_map_at_1000_max value: 5.4512 - type: nauc_map_at_1000_std value: -28.8844 - type: nauc_map_at_1000_diff1 value: 33.356700000000004 - type: nauc_recall_at_1_max value: 4.1941 - type: nauc_recall_at_1_std value: -24.9531 - type: nauc_recall_at_1_diff1 value: 38.099 - type: nauc_recall_at_3_max value: 5.884799999999999 - type: nauc_recall_at_3_std value: -32.317 - type: nauc_recall_at_3_diff1 value: 28.284399999999998 - type: nauc_recall_at_5_max value: 5.4525 - type: nauc_recall_at_5_std value: -36.4055 - type: nauc_recall_at_5_diff1 value: 26.384200000000003 - type: nauc_recall_at_10_max value: 9.403400000000001 - type: nauc_recall_at_10_std value: -35.9112 - type: nauc_recall_at_10_diff1 value: 25.2415 - type: nauc_recall_at_20_max value: 12.0952 - type: nauc_recall_at_20_std value: -30.778299999999998 - type: nauc_recall_at_20_diff1 value: 24.1866 - type: nauc_recall_at_100_max value: 19.6413 - type: nauc_recall_at_100_std value: -11.9243 - type: nauc_recall_at_100_diff1 value: 24.6153 - type: nauc_recall_at_1000_max value: 48.1206 - type: nauc_recall_at_1000_std value: 48.0062 - type: nauc_recall_at_1000_diff1 value: 16.2543 - type: nauc_precision_at_1_max value: 4.2138 - type: nauc_precision_at_1_std value: -24.7801 - type: nauc_precision_at_1_diff1 value: 37.758399999999995 - type: nauc_precision_at_3_max value: 5.7985 - type: nauc_precision_at_3_std value: -31.749899999999997 - type: nauc_precision_at_3_diff1 value: 27.373399999999997 - type: nauc_precision_at_5_max value: 5.390000000000001 - type: nauc_precision_at_5_std value: -35.0586 - type: nauc_precision_at_5_diff1 value: 25.100099999999998 - type: nauc_precision_at_10_max value: 9.248199999999999 - type: nauc_precision_at_10_std value: -32.244299999999996 - type: nauc_precision_at_10_diff1 value: 22.5684 - type: nauc_precision_at_20_max value: 11.495099999999999 - type: nauc_precision_at_20_std value: -24.226300000000002 - type: nauc_precision_at_20_diff1 value: 19.6528 - type: nauc_precision_at_100_max value: 14.3649 - type: nauc_precision_at_100_std value: 0.0593 - type: nauc_precision_at_100_diff1 value: 10.9596 - type: nauc_precision_at_1000_max value: 10.9512 - type: nauc_precision_at_1000_std value: 18.288 - type: nauc_precision_at_1000_diff1 value: -3.5423000000000004 - type: nauc_mrr_at_1_max value: 4.2204 - type: nauc_mrr_at_1_std value: -24.7703 - type: nauc_mrr_at_1_diff1 value: 37.8126 - type: nauc_mrr_at_3_max value: 5.0668 - type: nauc_mrr_at_3_std value: -28.2677 - type: nauc_mrr_at_3_diff1 value: 33.3724 - type: nauc_mrr_at_5_max value: 5.0481 - type: nauc_mrr_at_5_std value: -29.133 - type: nauc_mrr_at_5_diff1 value: 33.0415 - type: nauc_mrr_at_10_max value: 5.5038 - type: nauc_mrr_at_10_std value: -28.886200000000002 - type: nauc_mrr_at_10_diff1 value: 33.0593 - type: nauc_mrr_at_20_max value: 5.5467 - type: nauc_mrr_at_20_std value: -28.5678 - type: nauc_mrr_at_20_diff1 value: 33.0916 - type: nauc_mrr_at_100_max value: 5.5636 - type: nauc_mrr_at_100_std value: -28.3877 - type: nauc_mrr_at_100_diff1 value: 33.1799 - type: nauc_mrr_at_1000_max value: 5.557 - type: nauc_mrr_at_1000_std value: -28.3796 - type: nauc_mrr_at_1000_diff1 value: 33.184999999999995 - type: main_score value: 44.856 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 93.5317 - type: f1 value: 93.1956 - type: f1_weighted value: 93.5431 - type: main_score value: 93.5317 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 67.7907 - type: f1 value: 48.2877 - type: f1_weighted value: 70.3225 - type: main_score value: 67.7907 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 71.456 - type: f1 value: 68.2268 - type: f1_weighted value: 70.4722 - type: main_score value: 71.456 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 76.21719999999999 - type: f1 value: 75.14189999999999 - type: f1_weighted value: 76.0733 - type: main_score value: 76.21719999999999 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 31.3917 - type: v_measure_std value: 1.4778 - type: main_score value: 31.3917 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 28.2408 - type: v_measure_std value: 1.1622999999999999 - type: main_score value: 28.2408 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: map value: 29.5796 - type: mrr value: 30.3081 - type: nAUC_map_max value: -24.9194 - type: nAUC_map_std value: -9.042 - type: nAUC_map_diff1 value: 12.1611 - type: nAUC_mrr_max value: -19.3867 - type: nAUC_mrr_std value: -6.3873 - type: nAUC_mrr_diff1 value: 11.8078 - type: main_score value: 29.5796 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: ndcg_at_1 value: 45.046 - type: ndcg_at_3 value: 41.704 - type: ndcg_at_5 value: 39.296 - type: ndcg_at_10 value: 35.343999999999994 - type: ndcg_at_20 value: 32.525999999999996 - type: ndcg_at_100 value: 31.352999999999998 - type: ndcg_at_1000 value: 39.772 - type: map_at_1 value: 5.833 - type: map_at_3 value: 9.953 - type: map_at_5 value: 11.549 - type: map_at_10 value: 13.38 - type: map_at_20 value: 14.706 - type: map_at_100 value: 16.422 - type: map_at_1000 value: 17.777 - type: recall_at_1 value: 5.833 - type: recall_at_3 value: 11.112 - type: recall_at_5 value: 13.834 - type: recall_at_10 value: 16.961000000000002 - type: recall_at_20 value: 20.294999999999998 - type: recall_at_100 value: 30.253000000000004 - type: recall_at_1000 value: 60.902 - type: precision_at_1 value: 46.44 - type: precision_at_3 value: 39.009 - type: precision_at_5 value: 33.745999999999995 - type: precision_at_10 value: 25.635 - type: precision_at_20 value: 18.576 - type: precision_at_100 value: 7.731000000000001 - type: precision_at_1000 value: 2.037 - type: mrr_at_1 value: 46.7492 - type: mrr_at_3 value: 54.6956 - type: mrr_at_5 value: 55.8875 - type: mrr_at_10 value: 56.3913 - type: mrr_at_20 value: 56.6265 - type: mrr_at_100 value: 56.815599999999996 - type: mrr_at_1000 value: 56.8573 - type: nauc_ndcg_at_1_max value: 43.3685 - type: nauc_ndcg_at_1_std value: 21.6124 - type: nauc_ndcg_at_1_diff1 value: 29.0317 - type: nauc_ndcg_at_3_max value: 39.8155 - type: nauc_ndcg_at_3_std value: 23.2206 - type: nauc_ndcg_at_3_diff1 value: 20.7425 - type: nauc_ndcg_at_5_max value: 40.951 - type: nauc_ndcg_at_5_std value: 24.7184 - type: nauc_ndcg_at_5_diff1 value: 19.098599999999998 - type: nauc_ndcg_at_10_max value: 41.4733 - type: nauc_ndcg_at_10_std value: 27.4588 - type: nauc_ndcg_at_10_diff1 value: 17.224800000000002 - type: nauc_ndcg_at_20_max value: 40.3519 - type: nauc_ndcg_at_20_std value: 27.2947 - type: nauc_ndcg_at_20_diff1 value: 16.502 - type: nauc_ndcg_at_100_max value: 44.0676 - type: nauc_ndcg_at_100_std value: 29.1921 - type: nauc_ndcg_at_100_diff1 value: 20.9199 - type: nauc_ndcg_at_1000_max value: 48.9082 - type: nauc_ndcg_at_1000_std value: 33.799600000000005 - type: nauc_ndcg_at_1000_diff1 value: 19.741600000000002 - type: nauc_map_at_1_max value: 19.2048 - type: nauc_map_at_1_std value: -13.564599999999999 - type: nauc_map_at_1_diff1 value: 37.601099999999995 - type: nauc_map_at_3_max value: 23.1853 - type: nauc_map_at_3_std value: -8.3204 - type: nauc_map_at_3_diff1 value: 32.5527 - type: nauc_map_at_5_max value: 26.747500000000002 - type: nauc_map_at_5_std value: -4.136 - type: nauc_map_at_5_diff1 value: 29.041800000000002 - type: nauc_map_at_10_max value: 30.492200000000004 - type: nauc_map_at_10_std value: 2.2847 - type: nauc_map_at_10_diff1 value: 25.949699999999996 - type: nauc_map_at_20_max value: 32.628800000000005 - type: nauc_map_at_20_std value: 6.2305 - type: nauc_map_at_20_diff1 value: 24.0997 - type: nauc_map_at_100_max value: 35.0282 - type: nauc_map_at_100_std value: 12.181899999999999 - type: nauc_map_at_100_diff1 value: 22.6844 - type: nauc_map_at_1000_max value: 35.274899999999995 - type: nauc_map_at_1000_std value: 14.9827 - type: nauc_map_at_1000_diff1 value: 21.4096 - type: nauc_recall_at_1_max value: 19.2048 - type: nauc_recall_at_1_std value: -13.564599999999999 - type: nauc_recall_at_1_diff1 value: 37.601099999999995 - type: nauc_recall_at_3_max value: 20.5895 - type: nauc_recall_at_3_std value: -7.8295 - type: nauc_recall_at_3_diff1 value: 28.4675 - type: nauc_recall_at_5_max value: 24.8771 - type: nauc_recall_at_5_std value: -2.869 - type: nauc_recall_at_5_diff1 value: 23.301 - type: nauc_recall_at_10_max value: 28.647299999999998 - type: nauc_recall_at_10_std value: 4.4991 - type: nauc_recall_at_10_diff1 value: 20.5606 - type: nauc_recall_at_20_max value: 30.3525 - type: nauc_recall_at_20_std value: 8.712 - type: nauc_recall_at_20_diff1 value: 17.4748 - type: nauc_recall_at_100_max value: 34.0702 - type: nauc_recall_at_100_std value: 23.3319 - type: nauc_recall_at_100_diff1 value: 17.2015 - type: nauc_recall_at_1000_max value: 27.8011 - type: nauc_recall_at_1000_std value: 21.6507 - type: nauc_recall_at_1000_diff1 value: 4.4638 - type: nauc_precision_at_1_max value: 44.6989 - type: nauc_precision_at_1_std value: 22.622 - type: nauc_precision_at_1_diff1 value: 28.881400000000003 - type: nauc_precision_at_3_max value: 39.4166 - type: nauc_precision_at_3_std value: 29.2591 - type: nauc_precision_at_3_diff1 value: 12.1577 - type: nauc_precision_at_5_max value: 39.6371 - type: nauc_precision_at_5_std value: 33.201 - type: nauc_precision_at_5_diff1 value: 7.958 - type: nauc_precision_at_10_max value: 38.2593 - type: nauc_precision_at_10_std value: 40.6097 - type: nauc_precision_at_10_diff1 value: 1.376 - type: nauc_precision_at_20_max value: 31.375999999999998 - type: nauc_precision_at_20_std value: 42.3468 - type: nauc_precision_at_20_diff1 value: -4.1699 - type: nauc_precision_at_100_max value: 16.628 - type: nauc_precision_at_100_std value: 41.800599999999996 - type: nauc_precision_at_100_diff1 value: -9.4674 - type: nauc_precision_at_1000_max value: 1.6051 - type: nauc_precision_at_1000_std value: 29.1306 - type: nauc_precision_at_1000_diff1 value: -11.1912 - type: nauc_mrr_at_1_max value: 44.4339 - type: nauc_mrr_at_1_std value: 23.6489 - type: nauc_mrr_at_1_diff1 value: 28.0393 - type: nauc_mrr_at_3_max value: 47.780899999999995 - type: nauc_mrr_at_3_std value: 31.412499999999998 - type: nauc_mrr_at_3_diff1 value: 24.1569 - type: nauc_mrr_at_5_max value: 48.732 - type: nauc_mrr_at_5_std value: 31.899100000000004 - type: nauc_mrr_at_5_diff1 value: 24.4177 - type: nauc_mrr_at_10_max value: 48.9748 - type: nauc_mrr_at_10_std value: 32.2053 - type: nauc_mrr_at_10_diff1 value: 24.0317 - type: nauc_mrr_at_20_max value: 49.0832 - type: nauc_mrr_at_20_std value: 32.0994 - type: nauc_mrr_at_20_diff1 value: 23.9777 - type: nauc_mrr_at_100_max value: 49.1731 - type: nauc_mrr_at_100_std value: 32.3179 - type: nauc_mrr_at_100_diff1 value: 24.081 - type: nauc_mrr_at_1000_max value: 49.1387 - type: nauc_mrr_at_1000_std value: 32.2738 - type: nauc_mrr_at_1000_diff1 value: 24.063200000000002 - type: main_score value: 35.343999999999994 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: ndcg_at_1 value: 44.93 - type: ndcg_at_3 value: 56.003 - type: ndcg_at_5 value: 60.150000000000006 - type: ndcg_at_10 value: 63.673 - type: ndcg_at_20 value: 65.211 - type: ndcg_at_100 value: 66.686 - type: ndcg_at_1000 value: 67.009 - type: map_at_1 value: 40.035 - type: map_at_3 value: 51.976 - type: map_at_5 value: 54.510999999999996 - type: map_at_10 value: 56.17100000000001 - type: map_at_20 value: 56.684 - type: map_at_100 value: 56.932 - type: map_at_1000 value: 56.946 - type: recall_at_1 value: 40.035 - type: recall_at_3 value: 64.224 - type: recall_at_5 value: 73.682 - type: recall_at_10 value: 83.809 - type: recall_at_20 value: 89.385 - type: recall_at_100 value: 96.705 - type: recall_at_1000 value: 99.054 - type: precision_at_1 value: 44.93 - type: precision_at_3 value: 25.019000000000002 - type: precision_at_5 value: 17.445 - type: precision_at_10 value: 10.043000000000001 - type: precision_at_20 value: 5.4 - type: precision_at_100 value: 1.174 - type: precision_at_1000 value: 0.121 - type: mrr_at_1 value: 44.9305 - type: mrr_at_3 value: 55.37370000000001 - type: mrr_at_5 value: 57.4464 - type: mrr_at_10 value: 58.680200000000006 - type: mrr_at_20 value: 59.0042 - type: mrr_at_100 value: 59.178799999999995 - type: mrr_at_1000 value: 59.188700000000004 - type: nauc_ndcg_at_1_max value: 23.8396 - type: nauc_ndcg_at_1_std value: -3.8885000000000005 - type: nauc_ndcg_at_1_diff1 value: 37.971500000000006 - type: nauc_ndcg_at_3_max value: 30.025800000000004 - type: nauc_ndcg_at_3_std value: -4.9848 - type: nauc_ndcg_at_3_diff1 value: 34.324799999999996 - type: nauc_ndcg_at_5_max value: 32.2984 - type: nauc_ndcg_at_5_std value: -3.263 - type: nauc_ndcg_at_5_diff1 value: 35.2865 - type: nauc_ndcg_at_10_max value: 32.4173 - type: nauc_ndcg_at_10_std value: -2.398 - type: nauc_ndcg_at_10_diff1 value: 34.767399999999995 - type: nauc_ndcg_at_20_max value: 32.332 - type: nauc_ndcg_at_20_std value: -1.7824 - type: nauc_ndcg_at_20_diff1 value: 35.0354 - type: nauc_ndcg_at_100_max value: 31.3774 - type: nauc_ndcg_at_100_std value: -1.4645 - type: nauc_ndcg_at_100_diff1 value: 35.255900000000004 - type: nauc_ndcg_at_1000_max value: 31.008799999999997 - type: nauc_ndcg_at_1000_std value: -1.9499 - type: nauc_ndcg_at_1000_diff1 value: 35.3522 - type: nauc_map_at_1_max value: 21.296300000000002 - type: nauc_map_at_1_std value: -6.0126 - type: nauc_map_at_1_diff1 value: 37.9216 - type: nauc_map_at_3_max value: 28.1195 - type: nauc_map_at_3_std value: -5.3494 - type: nauc_map_at_3_diff1 value: 35.0839 - type: nauc_map_at_5_max value: 29.365999999999996 - type: nauc_map_at_5_std value: -4.410200000000001 - type: nauc_map_at_5_diff1 value: 35.6342 - type: nauc_map_at_10_max value: 29.378300000000003 - type: nauc_map_at_10_std value: -4.0228 - type: nauc_map_at_10_diff1 value: 35.451 - type: nauc_map_at_20_max value: 29.3604 - type: nauc_map_at_20_std value: -3.7953 - type: nauc_map_at_20_diff1 value: 35.5496 - type: nauc_map_at_100_max value: 29.233199999999997 - type: nauc_map_at_100_std value: -3.7321 - type: nauc_map_at_100_diff1 value: 35.574099999999994 - type: nauc_map_at_1000_max value: 29.2215 - type: nauc_map_at_1000_std value: -3.7482 - type: nauc_map_at_1000_diff1 value: 35.5805 - type: nauc_recall_at_1_max value: 21.296300000000002 - type: nauc_recall_at_1_std value: -6.0126 - type: nauc_recall_at_1_diff1 value: 37.9216 - type: nauc_recall_at_3_max value: 34.2599 - type: nauc_recall_at_3_std value: -5.5474000000000006 - type: nauc_recall_at_3_diff1 value: 30.7103 - type: nauc_recall_at_5_max value: 41.6689 - type: nauc_recall_at_5_std value: -0.7705 - type: nauc_recall_at_5_diff1 value: 32.6001 - type: nauc_recall_at_10_max value: 47.236200000000004 - type: nauc_recall_at_10_std value: 3.9309999999999996 - type: nauc_recall_at_10_diff1 value: 29.277199999999997 - type: nauc_recall_at_20_max value: 53.957100000000004 - type: nauc_recall_at_20_std value: 11.282499999999999 - type: nauc_recall_at_20_diff1 value: 29.7674 - type: nauc_recall_at_100_max value: 66.87039999999999 - type: nauc_recall_at_100_std value: 46.8733 - type: nauc_recall_at_100_diff1 value: 30.0249 - type: nauc_recall_at_1000_max value: 88.33670000000001 - type: nauc_recall_at_1000_std value: 77.0724 - type: nauc_recall_at_1000_diff1 value: 34.0192 - type: nauc_precision_at_1_max value: 23.8396 - type: nauc_precision_at_1_std value: -3.8885000000000005 - type: nauc_precision_at_1_diff1 value: 37.971500000000006 - type: nauc_precision_at_3_max value: 31.053399999999996 - type: nauc_precision_at_3_std value: 0.3766 - type: nauc_precision_at_3_diff1 value: 21.5732 - type: nauc_precision_at_5_max value: 30.816100000000002 - type: nauc_precision_at_5_std value: 5.3659 - type: nauc_precision_at_5_diff1 value: 17.4728 - type: nauc_precision_at_10_max value: 25.204300000000003 - type: nauc_precision_at_10_std value: 10.6652 - type: nauc_precision_at_10_diff1 value: 7.7665 - type: nauc_precision_at_20_max value: 20.3015 - type: nauc_precision_at_20_std value: 14.1789 - type: nauc_precision_at_20_diff1 value: 3.2251000000000003 - type: nauc_precision_at_100_max value: 9.709 - type: nauc_precision_at_100_std value: 17.7706 - type: nauc_precision_at_100_diff1 value: -5.5258 - type: nauc_precision_at_1000_max value: 4.5083 - type: nauc_precision_at_1000_std value: 14.754900000000001 - type: nauc_precision_at_1000_diff1 value: -8.1761 - type: nauc_mrr_at_1_max value: 23.8396 - type: nauc_mrr_at_1_std value: -3.8885000000000005 - type: nauc_mrr_at_1_diff1 value: 37.971500000000006 - type: nauc_mrr_at_3_max value: 28.9257 - type: nauc_mrr_at_3_std value: -3.6295 - type: nauc_mrr_at_3_diff1 value: 35.390100000000004 - type: nauc_mrr_at_5_max value: 29.8503 - type: nauc_mrr_at_5_std value: -2.8144 - type: nauc_mrr_at_5_diff1 value: 35.8786 - type: nauc_mrr_at_10_max value: 29.662899999999997 - type: nauc_mrr_at_10_std value: -2.6432 - type: nauc_mrr_at_10_diff1 value: 35.708400000000005 - type: nauc_mrr_at_20_max value: 29.5659 - type: nauc_mrr_at_20_std value: -2.6337 - type: nauc_mrr_at_20_diff1 value: 35.761900000000004 - type: nauc_mrr_at_100_max value: 29.432399999999998 - type: nauc_mrr_at_100_std value: -2.6328 - type: nauc_mrr_at_100_diff1 value: 35.8182 - type: nauc_mrr_at_1000_max value: 29.4234 - type: nauc_mrr_at_1000_std value: -2.6451 - type: nauc_mrr_at_1000_diff1 value: 35.8215 - type: main_score value: 63.673 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: ndcg_at_1 value: 82.27 - type: ndcg_at_3 value: 86.28099999999999 - type: ndcg_at_5 value: 87.81400000000001 - type: ndcg_at_10 value: 89.021 - type: ndcg_at_20 value: 89.643 - type: ndcg_at_100 value: 90.13 - type: ndcg_at_1000 value: 90.226 - type: map_at_1 value: 71.43599999999999 - type: map_at_3 value: 82.49 - type: map_at_5 value: 84.331 - type: map_at_10 value: 85.416 - type: map_at_20 value: 85.827 - type: map_at_100 value: 86.024 - type: map_at_1000 value: 86.039 - type: recall_at_1 value: 71.43599999999999 - type: recall_at_3 value: 87.912 - type: recall_at_5 value: 92.30000000000001 - type: recall_at_10 value: 95.814 - type: recall_at_20 value: 97.80799999999999 - type: recall_at_100 value: 99.551 - type: recall_at_1000 value: 99.97 - type: precision_at_1 value: 82.27 - type: precision_at_3 value: 37.747 - type: precision_at_5 value: 24.782 - type: precision_at_10 value: 13.497 - type: precision_at_20 value: 7.147 - type: precision_at_100 value: 1.529 - type: precision_at_1000 value: 0.157 - type: mrr_at_1 value: 82.23 - type: mrr_at_3 value: 87.26 - type: mrr_at_5 value: 87.9305 - type: mrr_at_10 value: 88.20949999999999 - type: mrr_at_20 value: 88.2764 - type: mrr_at_100 value: 88.2967 - type: mrr_at_1000 value: 88.2976 - type: nauc_ndcg_at_1_max value: 37.0736 - type: nauc_ndcg_at_1_std value: -43.2326 - type: nauc_ndcg_at_1_diff1 value: 77.9945 - type: nauc_ndcg_at_3_max value: 33.9426 - type: nauc_ndcg_at_3_std value: -51.3108 - type: nauc_ndcg_at_3_diff1 value: 76.2559 - type: nauc_ndcg_at_5_max value: 34.927 - type: nauc_ndcg_at_5_std value: -52.50749999999999 - type: nauc_ndcg_at_5_diff1 value: 76.578 - type: nauc_ndcg_at_10_max value: 35.9905 - type: nauc_ndcg_at_10_std value: -51.808699999999995 - type: nauc_ndcg_at_10_diff1 value: 76.6957 - type: nauc_ndcg_at_20_max value: 36.119299999999996 - type: nauc_ndcg_at_20_std value: -50.1628 - type: nauc_ndcg_at_20_diff1 value: 76.6659 - type: nauc_ndcg_at_100_max value: 36.4315 - type: nauc_ndcg_at_100_std value: -48.0358 - type: nauc_ndcg_at_100_diff1 value: 76.5866 - type: nauc_ndcg_at_1000_max value: 36.459399999999995 - type: nauc_ndcg_at_1000_std value: -47.834199999999996 - type: nauc_ndcg_at_1000_diff1 value: 76.5791 - type: nauc_map_at_1_max value: 25.902199999999997 - type: nauc_map_at_1_std value: -44.6605 - type: nauc_map_at_1_diff1 value: 80.78070000000001 - type: nauc_map_at_3_max value: 31.3371 - type: nauc_map_at_3_std value: -53.9334 - type: nauc_map_at_3_diff1 value: 77.7089 - type: nauc_map_at_5_max value: 33.1663 - type: nauc_map_at_5_std value: -53.86919999999999 - type: nauc_map_at_5_diff1 value: 77.32430000000001 - type: nauc_map_at_10_max value: 34.4253 - type: nauc_map_at_10_std value: -52.423500000000004 - type: nauc_map_at_10_diff1 value: 77.0479 - type: nauc_map_at_20_max value: 34.6738 - type: nauc_map_at_20_std value: -51.095400000000005 - type: nauc_map_at_20_diff1 value: 76.88810000000001 - type: nauc_map_at_100_max value: 34.7984 - type: nauc_map_at_100_std value: -50.2705 - type: nauc_map_at_100_diff1 value: 76.8083 - type: nauc_map_at_1000_max value: 34.8162 - type: nauc_map_at_1000_std value: -50.211600000000004 - type: nauc_map_at_1000_diff1 value: 76.8047 - type: nauc_recall_at_1_max value: 25.902199999999997 - type: nauc_recall_at_1_std value: -44.6605 - type: nauc_recall_at_1_diff1 value: 80.78070000000001 - type: nauc_recall_at_3_max value: 27.693 - type: nauc_recall_at_3_std value: -61.799400000000006 - type: nauc_recall_at_3_diff1 value: 74.25 - type: nauc_recall_at_5_max value: 30.216700000000003 - type: nauc_recall_at_5_std value: -68.2919 - type: nauc_recall_at_5_diff1 value: 72.8613 - type: nauc_recall_at_10_max value: 34.4765 - type: nauc_recall_at_10_std value: -74.3633 - type: nauc_recall_at_10_diff1 value: 73.0316 - type: nauc_recall_at_20_max value: 33.812 - type: nauc_recall_at_20_std value: -72.8956 - type: nauc_recall_at_20_diff1 value: 73.4475 - type: nauc_recall_at_100_max value: 39.0326 - type: nauc_recall_at_100_std value: -42.9628 - type: nauc_recall_at_100_diff1 value: 72.66669999999999 - type: nauc_recall_at_1000_max value: 16.4069 - type: nauc_recall_at_1000_std value: 20.353099999999998 - type: nauc_recall_at_1000_diff1 value: 72.6857 - type: nauc_precision_at_1_max value: 37.0736 - type: nauc_precision_at_1_std value: -43.2326 - type: nauc_precision_at_1_diff1 value: 77.9945 - type: nauc_precision_at_3_max value: 7.225099999999999 - type: nauc_precision_at_3_std value: 5.4519 - type: nauc_precision_at_3_diff1 value: -20.1979 - type: nauc_precision_at_5_max value: 3.1125 - type: nauc_precision_at_5_std value: 17.542099999999998 - type: nauc_precision_at_5_diff1 value: -32.5768 - type: nauc_precision_at_10_max value: -0.3758 - type: nauc_precision_at_10_std value: 27.9681 - type: nauc_precision_at_10_diff1 value: -39.8065 - type: nauc_precision_at_20_max value: -2.7107 - type: nauc_precision_at_20_std value: 34.9186 - type: nauc_precision_at_20_diff1 value: -42.686800000000005 - type: nauc_precision_at_100_max value: -4.587 - type: nauc_precision_at_100_std value: 41.415600000000005 - type: nauc_precision_at_100_diff1 value: -44.357 - type: nauc_precision_at_1000_max value: -5.003 - type: nauc_precision_at_1000_std value: 42.5355 - type: nauc_precision_at_1000_diff1 value: -44.5697 - type: nauc_mrr_at_1_max value: 37.1298 - type: nauc_mrr_at_1_std value: -43.2774 - type: nauc_mrr_at_1_diff1 value: 78.0714 - type: nauc_mrr_at_3_max value: 37.644800000000004 - type: nauc_mrr_at_3_std value: -46.231 - type: nauc_mrr_at_3_diff1 value: 77.0599 - type: nauc_mrr_at_5_max value: 37.994299999999996 - type: nauc_mrr_at_5_std value: -46.0511 - type: nauc_mrr_at_5_diff1 value: 77.1377 - type: nauc_mrr_at_10_max value: 37.9206 - type: nauc_mrr_at_10_std value: -45.8065 - type: nauc_mrr_at_10_diff1 value: 77.1994 - type: nauc_mrr_at_20_max value: 37.8028 - type: nauc_mrr_at_20_std value: -45.7095 - type: nauc_mrr_at_20_diff1 value: 77.2152 - type: nauc_mrr_at_100_max value: 37.7912 - type: nauc_mrr_at_100_std value: -45.6767 - type: nauc_mrr_at_100_diff1 value: 77.2139 - type: nauc_mrr_at_1000_max value: 37.79 - type: nauc_mrr_at_1000_std value: -45.6766 - type: nauc_mrr_at_1000_diff1 value: 77.2145 - type: main_score value: 89.021 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 51.208600000000004 - type: v_measure_std value: 4.2761000000000005 - type: main_score value: 51.208600000000004 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: v_measure value: 60.372899999999994 - type: v_measure_std value: 12.0829 - type: main_score value: 60.372899999999994 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: ndcg_at_1 value: 22.400000000000002 - type: ndcg_at_3 value: 19.192 - type: ndcg_at_5 value: 16.767000000000003 - type: ndcg_at_10 value: 20.238999999999997 - type: ndcg_at_20 value: 22.720000000000002 - type: ndcg_at_100 value: 27.567999999999998 - type: ndcg_at_1000 value: 32.535 - type: map_at_1 value: 4.552 - type: map_at_3 value: 8.495999999999999 - type: map_at_5 value: 10.213999999999999 - type: map_at_10 value: 11.985 - type: map_at_20 value: 12.937000000000001 - type: map_at_100 value: 13.885 - type: map_at_1000 value: 14.155999999999999 - type: recall_at_1 value: 4.552 - type: recall_at_3 value: 11.067 - type: recall_at_5 value: 15.052 - type: recall_at_10 value: 21.422 - type: recall_at_20 value: 27.279999999999998 - type: recall_at_100 value: 42.968 - type: recall_at_1000 value: 67.232 - type: precision_at_1 value: 22.400000000000002 - type: precision_at_3 value: 18.2 - type: precision_at_5 value: 14.860000000000001 - type: precision_at_10 value: 10.58 - type: precision_at_20 value: 6.715 - type: precision_at_100 value: 2.114 - type: precision_at_1000 value: 0.331 - type: mrr_at_1 value: 22.400000000000002 - type: mrr_at_3 value: 31.0833 - type: mrr_at_5 value: 32.853300000000004 - type: mrr_at_10 value: 34.2814 - type: mrr_at_20 value: 34.814 - type: mrr_at_100 value: 35.2576 - type: mrr_at_1000 value: 35.322199999999995 - type: nauc_ndcg_at_1_max value: 23.7575 - type: nauc_ndcg_at_1_std value: 4.1697 - type: nauc_ndcg_at_1_diff1 value: 28.3995 - type: nauc_ndcg_at_3_max value: 27.5517 - type: nauc_ndcg_at_3_std value: 8.8005 - type: nauc_ndcg_at_3_diff1 value: 22.334799999999998 - type: nauc_ndcg_at_5_max value: 28.607599999999998 - type: nauc_ndcg_at_5_std value: 10.0785 - type: nauc_ndcg_at_5_diff1 value: 21.4713 - type: nauc_ndcg_at_10_max value: 30.812099999999997 - type: nauc_ndcg_at_10_std value: 14.4374 - type: nauc_ndcg_at_10_diff1 value: 20.5304 - type: nauc_ndcg_at_20_max value: 32.3888 - type: nauc_ndcg_at_20_std value: 17.8152 - type: nauc_ndcg_at_20_diff1 value: 20.2815 - type: nauc_ndcg_at_100_max value: 34.402100000000004 - type: nauc_ndcg_at_100_std value: 22.3694 - type: nauc_ndcg_at_100_diff1 value: 20.9422 - type: nauc_ndcg_at_1000_max value: 33.7269 - type: nauc_ndcg_at_1000_std value: 23.646700000000003 - type: nauc_ndcg_at_1000_diff1 value: 19.7226 - type: nauc_map_at_1_max value: 23.5069 - type: nauc_map_at_1_std value: 3.8736 - type: nauc_map_at_1_diff1 value: 28.231 - type: nauc_map_at_3_max value: 27.293 - type: nauc_map_at_3_std value: 6.9329 - type: nauc_map_at_3_diff1 value: 21.8664 - type: nauc_map_at_5_max value: 28.591100000000004 - type: nauc_map_at_5_std value: 8.2248 - type: nauc_map_at_5_diff1 value: 21.4395 - type: nauc_map_at_10_max value: 30.417300000000004 - type: nauc_map_at_10_std value: 11.615300000000001 - type: nauc_map_at_10_diff1 value: 20.624000000000002 - type: nauc_map_at_20_max value: 31.479200000000002 - type: nauc_map_at_20_std value: 13.808699999999998 - type: nauc_map_at_20_diff1 value: 20.413 - type: nauc_map_at_100_max value: 32.2613 - type: nauc_map_at_100_std value: 15.5692 - type: nauc_map_at_100_diff1 value: 20.5465 - type: nauc_map_at_1000_max value: 32.2476 - type: nauc_map_at_1000_std value: 15.7471 - type: nauc_map_at_1000_diff1 value: 20.4622 - type: nauc_recall_at_1_max value: 23.5069 - type: nauc_recall_at_1_std value: 3.8736 - type: nauc_recall_at_1_diff1 value: 28.231 - type: nauc_recall_at_3_max value: 27.970299999999998 - type: nauc_recall_at_3_std value: 10.2171 - type: nauc_recall_at_3_diff1 value: 19.403699999999997 - type: nauc_recall_at_5_max value: 28.4521 - type: nauc_recall_at_5_std value: 12.2105 - type: nauc_recall_at_5_diff1 value: 17.5747 - type: nauc_recall_at_10_max value: 30.6955 - type: nauc_recall_at_10_std value: 19.096 - type: nauc_recall_at_10_diff1 value: 15.3116 - type: nauc_recall_at_20_max value: 32.1047 - type: nauc_recall_at_20_std value: 24.823600000000003 - type: nauc_recall_at_20_diff1 value: 14.257700000000002 - type: nauc_recall_at_100_max value: 33.6062 - type: nauc_recall_at_100_std value: 33.8641 - type: nauc_recall_at_100_diff1 value: 14.5145 - type: nauc_recall_at_1000_max value: 26.848300000000002 - type: nauc_recall_at_1000_std value: 38.5884 - type: nauc_recall_at_1000_diff1 value: 5.6408 - type: nauc_precision_at_1_max value: 23.7575 - type: nauc_precision_at_1_std value: 4.1697 - type: nauc_precision_at_1_diff1 value: 28.3995 - type: nauc_precision_at_3_max value: 28.2504 - type: nauc_precision_at_3_std value: 10.6227 - type: nauc_precision_at_3_diff1 value: 19.5683 - type: nauc_precision_at_5_max value: 28.8134 - type: nauc_precision_at_5_std value: 12.518899999999999 - type: nauc_precision_at_5_diff1 value: 17.8036 - type: nauc_precision_at_10_max value: 30.9813 - type: nauc_precision_at_10_std value: 19.3506 - type: nauc_precision_at_10_diff1 value: 15.512 - type: nauc_precision_at_20_max value: 32.6743 - type: nauc_precision_at_20_std value: 24.9974 - type: nauc_precision_at_20_diff1 value: 14.794099999999998 - type: nauc_precision_at_100_max value: 34.413700000000006 - type: nauc_precision_at_100_std value: 34.0889 - type: nauc_precision_at_100_diff1 value: 15.252699999999999 - type: nauc_precision_at_1000_max value: 27.3954 - type: nauc_precision_at_1000_std value: 37.8895 - type: nauc_precision_at_1000_diff1 value: 6.587999999999999 - type: nauc_mrr_at_1_max value: 23.7575 - type: nauc_mrr_at_1_std value: 4.1697 - type: nauc_mrr_at_1_diff1 value: 28.3995 - type: nauc_mrr_at_3_max value: 26.8324 - type: nauc_mrr_at_3_std value: 8.646700000000001 - type: nauc_mrr_at_3_diff1 value: 25.5754 - type: nauc_mrr_at_5_max value: 26.8274 - type: nauc_mrr_at_5_std value: 8.911 - type: nauc_mrr_at_5_diff1 value: 25.106 - type: nauc_mrr_at_10_max value: 27.073399999999996 - type: nauc_mrr_at_10_std value: 9.7624 - type: nauc_mrr_at_10_diff1 value: 24.9405 - type: nauc_mrr_at_20_max value: 27.1229 - type: nauc_mrr_at_20_std value: 10.0676 - type: nauc_mrr_at_20_diff1 value: 24.8122 - type: nauc_mrr_at_100_max value: 27.1391 - type: nauc_mrr_at_100_std value: 9.9628 - type: nauc_mrr_at_100_diff1 value: 24.9507 - type: nauc_mrr_at_1000_max value: 27.114 - type: nauc_mrr_at_1000_std value: 9.9537 - type: nauc_mrr_at_1000_diff1 value: 24.9421 - type: main_score value: 20.238999999999997 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: pearson value: 79.5908 - type: spearman value: 73.9888 - type: cosine_pearson value: 79.5908 - type: cosine_spearman value: 73.9888 - type: manhattan_pearson value: 77.0623 - type: manhattan_spearman value: 73.7724 - type: euclidean_pearson value: 77.30890000000001 - type: euclidean_spearman value: 73.9888 - type: main_score value: 73.9888 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: pearson value: 74.0752 - type: spearman value: 71.22699999999999 - type: cosine_pearson value: 74.0752 - type: cosine_spearman value: 71.22699999999999 - type: manhattan_pearson value: 70.6037 - type: manhattan_spearman value: 70.9916 - type: euclidean_pearson value: 70.922 - type: euclidean_spearman value: 71.22699999999999 - type: main_score value: 71.22699999999999 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: pearson value: 77.8946 - type: spearman value: 80.4405 - type: cosine_pearson value: 77.8946 - type: cosine_spearman value: 80.4405 - type: manhattan_pearson value: 79.6856 - type: manhattan_spearman value: 80.1236 - type: euclidean_pearson value: 80.0315 - type: euclidean_spearman value: 80.44059999999999 - type: main_score value: 80.4405 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: pearson value: 76.2196 - type: spearman value: 75.10419999999999 - type: cosine_pearson value: 76.2196 - type: cosine_spearman value: 75.10419999999999 - type: manhattan_pearson value: 75.4647 - type: manhattan_spearman value: 74.81179999999999 - type: euclidean_pearson value: 75.8091 - type: euclidean_spearman value: 75.10419999999999 - type: main_score value: 75.10419999999999 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: pearson value: 81.2455 - type: spearman value: 82.8681 - type: cosine_pearson value: 81.2455 - type: cosine_spearman value: 82.8681 - type: manhattan_pearson value: 82.4327 - type: manhattan_spearman value: 82.7513 - type: euclidean_pearson value: 82.5635 - type: euclidean_spearman value: 82.8681 - type: main_score value: 82.8681 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: pearson value: 81.6322 - type: spearman value: 83.487 - type: cosine_pearson value: 81.6322 - type: cosine_spearman value: 83.487 - type: manhattan_pearson value: 83.0048 - type: manhattan_spearman value: 83.4064 - type: euclidean_pearson value: 83.0938 - type: euclidean_spearman value: 83.487 - type: main_score value: 83.487 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 81.1124 - type: spearman value: 84.5436 - type: cosine_pearson value: 81.1124 - type: cosine_spearman value: 84.5436 - type: manhattan_pearson value: 83.5158 - type: manhattan_spearman value: 84.596 - type: euclidean_pearson value: 83.4429 - type: euclidean_spearman value: 84.5436 - type: main_score value: 84.5436 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 62.0001 - type: spearman value: 63.631099999999996 - type: cosine_pearson value: 62.0001 - type: cosine_spearman value: 63.631099999999996 - type: manhattan_pearson value: 62.239599999999996 - type: manhattan_spearman value: 62.892199999999995 - type: euclidean_pearson value: 62.9809 - type: euclidean_spearman value: 63.631099999999996 - type: main_score value: 63.631099999999996 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 75.1556 - type: spearman value: 76.8807 - type: cosine_pearson value: 75.1556 - type: cosine_spearman value: 76.8807 - type: manhattan_pearson value: 76.2428 - type: manhattan_spearman value: 76.8101 - type: euclidean_pearson value: 76.107 - type: euclidean_spearman value: 76.8807 - type: main_score value: 76.8807 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 69.85719999999999 - type: spearman value: 71.0489 - type: cosine_pearson value: 69.85719999999999 - type: cosine_spearman value: 71.0489 - type: manhattan_pearson value: 71.08449999999999 - type: manhattan_spearman value: 71.0051 - type: euclidean_pearson value: 71.19760000000001 - type: euclidean_spearman value: 71.0489 - type: main_score value: 71.0489 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 76.1131 - type: spearman value: 78.2714 - type: cosine_pearson value: 76.1131 - type: cosine_spearman value: 78.2714 - type: manhattan_pearson value: 76.70270000000001 - type: manhattan_spearman value: 77.7803 - type: euclidean_pearson value: 77.14269999999999 - type: euclidean_spearman value: 78.2714 - type: main_score value: 78.2714 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 74.49719999999999 - type: spearman value: 76.2747 - type: cosine_pearson value: 74.49719999999999 - type: cosine_spearman value: 76.2747 - type: manhattan_pearson value: 75.071 - type: manhattan_spearman value: 75.8969 - type: euclidean_pearson value: 75.289 - type: euclidean_spearman value: 76.2747 - type: main_score value: 76.2747 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 76.7073 - type: spearman value: 79.3107 - type: cosine_pearson value: 76.7073 - type: cosine_spearman value: 79.3107 - type: manhattan_pearson value: 77.9578 - type: manhattan_spearman value: 79.3195 - type: euclidean_pearson value: 77.7386 - type: euclidean_spearman value: 79.3107 - type: main_score value: 79.3107 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: pearson value: 60.5826 - type: spearman value: 61.0502 - type: cosine_pearson value: 60.5826 - type: cosine_spearman value: 61.0502 - type: manhattan_pearson value: 61.202 - type: manhattan_spearman value: 61.2039 - type: euclidean_pearson value: 61.1915 - type: euclidean_spearman value: 61.0502 - type: main_score value: 61.0502 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 69.2521 - type: spearman value: 68.06219999999999 - type: cosine_pearson value: 69.2521 - type: cosine_spearman value: 68.06219999999999 - type: manhattan_pearson value: 70.5115 - type: manhattan_spearman value: 67.8705 - type: euclidean_pearson value: 70.68480000000001 - type: euclidean_spearman value: 68.06219999999999 - type: main_score value: 68.06219999999999 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 77.97500000000001 - type: spearman value: 76.848 - type: cosine_pearson value: 77.97500000000001 - type: cosine_spearman value: 76.848 - type: manhattan_pearson value: 76.4098 - type: manhattan_spearman value: 76.6188 - type: euclidean_pearson value: 77.17500000000001 - type: euclidean_spearman value: 76.848 - type: main_score value: 76.848 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 71.3604 - type: spearman value: 70.7891 - type: cosine_pearson value: 71.3604 - type: cosine_spearman value: 70.7891 - type: manhattan_pearson value: 73.0185 - type: manhattan_spearman value: 70.79299999999999 - type: euclidean_pearson value: 73.17620000000001 - type: euclidean_spearman value: 70.7891 - type: main_score value: 70.7891 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 77.58239999999999 - type: spearman value: 78.5907 - type: cosine_pearson value: 77.58239999999999 - type: cosine_spearman value: 78.5907 - type: manhattan_pearson value: 79.25720000000001 - type: manhattan_spearman value: 78.6249 - type: euclidean_pearson value: 79.3724 - type: euclidean_spearman value: 78.5907 - type: main_score value: 78.5907 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: pearson value: 63.324000000000005 - type: spearman value: 55.1099 - type: cosine_pearson value: 63.324000000000005 - type: cosine_spearman value: 55.1099 - type: manhattan_pearson value: 67.3128 - type: manhattan_spearman value: 56.340199999999996 - type: euclidean_pearson value: 67.12089999999999 - type: euclidean_spearman value: 55.1099 - type: main_score value: 55.1099 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: pearson value: 78.02329999999999 - type: spearman value: 79.1887 - type: cosine_pearson value: 78.02329999999999 - type: cosine_spearman value: 79.1887 - type: manhattan_pearson value: 78.8951 - type: manhattan_spearman value: 78.9444 - type: euclidean_pearson value: 79.1499 - type: euclidean_spearman value: 79.1888 - type: main_score value: 79.1887 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 78.7501 - type: mrr value: 93.9748 - type: nAUC_map_max value: 54.495599999999996 - type: nAUC_map_std value: 70.0377 - type: nAUC_map_diff1 value: 6.0146999999999995 - type: nAUC_mrr_max value: 81.1486 - type: nAUC_mrr_std value: 78.3478 - type: nAUC_mrr_diff1 value: 50.7613 - type: main_score value: 78.7501 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: ndcg_at_1 value: 58.667 - type: ndcg_at_3 value: 66.022 - type: ndcg_at_5 value: 68.508 - type: ndcg_at_10 value: 70.586 - type: ndcg_at_20 value: 71.714 - type: ndcg_at_100 value: 72.81 - type: ndcg_at_1000 value: 73.482 - type: map_at_1 value: 55.594 - type: map_at_3 value: 63.2 - type: map_at_5 value: 64.996 - type: map_at_10 value: 65.988 - type: map_at_20 value: 66.347 - type: map_at_100 value: 66.526 - type: map_at_1000 value: 66.547 - type: recall_at_1 value: 55.594 - type: recall_at_3 value: 71.22800000000001 - type: recall_at_5 value: 77.078 - type: recall_at_10 value: 83.172 - type: recall_at_20 value: 87.422 - type: recall_at_100 value: 93.167 - type: recall_at_1000 value: 98.667 - type: precision_at_1 value: 58.667 - type: precision_at_3 value: 25.778000000000002 - type: precision_at_5 value: 17.333000000000002 - type: precision_at_10 value: 9.433 - type: precision_at_20 value: 4.967 - type: precision_at_100 value: 1.06 - type: precision_at_1000 value: 0.11199999999999999 - type: mrr_at_1 value: 58.666700000000006 - type: mrr_at_3 value: 65.3889 - type: mrr_at_5 value: 66.62219999999999 - type: mrr_at_10 value: 67.3364 - type: mrr_at_20 value: 67.6046 - type: mrr_at_100 value: 67.73320000000001 - type: mrr_at_1000 value: 67.7526 - type: nauc_ndcg_at_1_max value: 60.2511 - type: nauc_ndcg_at_1_std value: 12.422 - type: nauc_ndcg_at_1_diff1 value: 74.4289 - type: nauc_ndcg_at_3_max value: 60.2109 - type: nauc_ndcg_at_3_std value: 11.0152 - type: nauc_ndcg_at_3_diff1 value: 71.0436 - type: nauc_ndcg_at_5_max value: 62.690999999999995 - type: nauc_ndcg_at_5_std value: 13.585700000000001 - type: nauc_ndcg_at_5_diff1 value: 70.4007 - type: nauc_ndcg_at_10_max value: 62.740899999999996 - type: nauc_ndcg_at_10_std value: 13.980400000000001 - type: nauc_ndcg_at_10_diff1 value: 70.0506 - type: nauc_ndcg_at_20_max value: 62.271699999999996 - type: nauc_ndcg_at_20_std value: 15.9756 - type: nauc_ndcg_at_20_diff1 value: 70.3237 - type: nauc_ndcg_at_100_max value: 62.125 - type: nauc_ndcg_at_100_std value: 15.5809 - type: nauc_ndcg_at_100_diff1 value: 70.4151 - type: nauc_ndcg_at_1000_max value: 61.9259 - type: nauc_ndcg_at_1000_std value: 15.3462 - type: nauc_ndcg_at_1000_diff1 value: 70.7346 - type: nauc_map_at_1_max value: 53.6767 - type: nauc_map_at_1_std value: 3.7751 - type: nauc_map_at_1_diff1 value: 74.60329999999999 - type: nauc_map_at_3_max value: 57.0403 - type: nauc_map_at_3_std value: 8.2272 - type: nauc_map_at_3_diff1 value: 71.7906 - type: nauc_map_at_5_max value: 59.6713 - type: nauc_map_at_5_std value: 10.8346 - type: nauc_map_at_5_diff1 value: 71.3356 - type: nauc_map_at_10_max value: 60.0086 - type: nauc_map_at_10_std value: 11.4394 - type: nauc_map_at_10_diff1 value: 71.14869999999999 - type: nauc_map_at_20_max value: 59.940599999999996 - type: nauc_map_at_20_std value: 12.0728 - type: nauc_map_at_20_diff1 value: 71.31 - type: nauc_map_at_100_max value: 59.95589999999999 - type: nauc_map_at_100_std value: 12.148299999999999 - type: nauc_map_at_100_diff1 value: 71.2142 - type: nauc_map_at_1000_max value: 59.9486 - type: nauc_map_at_1000_std value: 12.139 - type: nauc_map_at_1000_diff1 value: 71.2225 - type: nauc_recall_at_1_max value: 53.6767 - type: nauc_recall_at_1_std value: 3.7751 - type: nauc_recall_at_1_diff1 value: 74.60329999999999 - type: nauc_recall_at_3_max value: 60.4078 - type: nauc_recall_at_3_std value: 9.038300000000001 - type: nauc_recall_at_3_diff1 value: 67.60119999999999 - type: nauc_recall_at_5_max value: 68.0179 - type: nauc_recall_at_5_std value: 16.061600000000002 - type: nauc_recall_at_5_diff1 value: 65.54759999999999 - type: nauc_recall_at_10_max value: 68.7372 - type: nauc_recall_at_10_std value: 16.8637 - type: nauc_recall_at_10_diff1 value: 62.7613 - type: nauc_recall_at_20_max value: 67.1403 - type: nauc_recall_at_20_std value: 31.3919 - type: nauc_recall_at_20_diff1 value: 62.66929999999999 - type: nauc_recall_at_100_max value: 68.6366 - type: nauc_recall_at_100_std value: 32.4577 - type: nauc_recall_at_100_diff1 value: 64.52029999999999 - type: nauc_recall_at_1000_max value: 70.7166 - type: nauc_recall_at_1000_std value: 70.47149999999999 - type: nauc_recall_at_1000_diff1 value: 85.58590000000001 - type: nauc_precision_at_1_max value: 60.2511 - type: nauc_precision_at_1_std value: 12.422 - type: nauc_precision_at_1_diff1 value: 74.4289 - type: nauc_precision_at_3_max value: 58.75280000000001 - type: nauc_precision_at_3_std value: 27.605400000000003 - type: nauc_precision_at_3_diff1 value: 49.1523 - type: nauc_precision_at_5_max value: 56.4694 - type: nauc_precision_at_5_std value: 39.080799999999996 - type: nauc_precision_at_5_diff1 value: 28.8162 - type: nauc_precision_at_10_max value: 48.8894 - type: nauc_precision_at_10_std value: 43.8149 - type: nauc_precision_at_10_diff1 value: 15.0093 - type: nauc_precision_at_20_max value: 41.4059 - type: nauc_precision_at_20_std value: 50.7143 - type: nauc_precision_at_20_diff1 value: 8.3552 - type: nauc_precision_at_100_max value: 33.5064 - type: nauc_precision_at_100_std value: 52.8775 - type: nauc_precision_at_100_diff1 value: -5.0870999999999995 - type: nauc_precision_at_1000_max value: 23.9064 - type: nauc_precision_at_1000_std value: 57.784800000000004 - type: nauc_precision_at_1000_diff1 value: -20.1246 - type: nauc_mrr_at_1_max value: 60.2511 - type: nauc_mrr_at_1_std value: 12.422 - type: nauc_mrr_at_1_diff1 value: 74.4289 - type: nauc_mrr_at_3_max value: 62.663199999999996 - type: nauc_mrr_at_3_std value: 14.7348 - type: nauc_mrr_at_3_diff1 value: 72.1185 - type: nauc_mrr_at_5_max value: 63.3871 - type: nauc_mrr_at_5_std value: 15.773000000000001 - type: nauc_mrr_at_5_diff1 value: 71.6722 - type: nauc_mrr_at_10_max value: 62.8474 - type: nauc_mrr_at_10_std value: 15.1896 - type: nauc_mrr_at_10_diff1 value: 71.64110000000001 - type: nauc_mrr_at_20_max value: 62.699400000000004 - type: nauc_mrr_at_20_std value: 15.554499999999999 - type: nauc_mrr_at_20_diff1 value: 71.6049 - type: nauc_mrr_at_100_max value: 62.6665 - type: nauc_mrr_at_100_std value: 15.4586 - type: nauc_mrr_at_100_diff1 value: 71.6217 - type: nauc_mrr_at_1000_max value: 62.6641 - type: nauc_mrr_at_1000_std value: 15.4535 - type: nauc_mrr_at_1000_diff1 value: 71.6307 - type: main_score value: 70.586 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: similarity_accuracy value: 99.8416 - type: similarity_accuracy_threshold value: 74.52069999999999 - type: similarity_f1 value: 92.008 - type: similarity_f1_threshold value: 74.4529 - type: similarity_precision value: 91.9162 - type: similarity_recall value: 92.10000000000001 - type: similarity_ap value: 96.54209999999999 - type: cosine_accuracy value: 99.8416 - type: cosine_accuracy_threshold value: 74.52069999999999 - type: cosine_f1 value: 92.008 - type: cosine_f1_threshold value: 74.4529 - type: cosine_precision value: 91.9162 - type: cosine_recall value: 92.10000000000001 - type: cosine_ap value: 96.54209999999999 - type: manhattan_accuracy value: 99.8446 - type: manhattan_accuracy_threshold value: 1784.866 - type: manhattan_f1 value: 92.1539 - type: manhattan_f1_threshold value: 1787.6774 - type: manhattan_precision value: 92.1079 - type: manhattan_recall value: 92.2 - type: manhattan_ap value: 96.5207 - type: euclidean_accuracy value: 99.8416 - type: euclidean_accuracy_threshold value: 71.3853 - type: euclidean_f1 value: 92.008 - type: euclidean_f1_threshold value: 71.4803 - type: euclidean_precision value: 91.9162 - type: euclidean_recall value: 92.10000000000001 - type: euclidean_ap value: 96.54209999999999 - type: dot_accuracy value: 99.8416 - type: dot_accuracy_threshold value: 74.52069999999999 - type: dot_f1 value: 92.008 - type: dot_f1_threshold value: 74.4528 - type: dot_precision value: 91.9162 - type: dot_recall value: 92.10000000000001 - type: dot_ap value: 96.54209999999999 - type: max_accuracy value: 99.8446 - type: max_f1 value: 92.1539 - type: max_precision value: 92.1079 - type: max_recall value: 92.2 - type: max_ap value: 96.54209999999999 - type: main_score value: 96.54209999999999 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 63.4035 - type: v_measure_std value: 4.758 - type: main_score value: 63.4035 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 36.288599999999995 - type: v_measure_std value: 1.3107 - type: main_score value: 36.288599999999995 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 51.457699999999996 - type: mrr value: 52.374500000000005 - type: nAUC_map_max value: 12.912399999999998 - type: nAUC_map_std value: 6.4524 - type: nAUC_map_diff1 value: 37.2785 - type: nAUC_mrr_max value: 13.333999999999998 - type: nAUC_mrr_std value: 7.0440000000000005 - type: nAUC_mrr_diff1 value: 37.2993 - type: main_score value: 51.457699999999996 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: pearson value: 29.7101 - type: spearman value: 30.514200000000002 - type: cosine_spearman value: 30.514200000000002 - type: cosine_pearson value: 29.7101 - type: dot_spearman value: 30.514200000000002 - type: dot_pearson value: 29.7101 - type: main_score value: 30.514200000000002 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: ndcg_at_1 value: 86.0 - type: ndcg_at_3 value: 86.542 - type: ndcg_at_5 value: 85.297 - type: ndcg_at_10 value: 83.866 - type: ndcg_at_20 value: 80.553 - type: ndcg_at_100 value: 65.091 - type: ndcg_at_1000 value: 57.86900000000001 - type: map_at_1 value: 0.23500000000000001 - type: map_at_3 value: 0.7100000000000001 - type: map_at_5 value: 1.1440000000000001 - type: map_at_10 value: 2.185 - type: map_at_20 value: 4.004 - type: map_at_100 value: 13.25 - type: map_at_1000 value: 32.668 - type: recall_at_1 value: 0.23500000000000001 - type: recall_at_3 value: 0.736 - type: recall_at_5 value: 1.191 - type: recall_at_10 value: 2.323 - type: recall_at_20 value: 4.390000000000001 - type: recall_at_100 value: 15.962000000000002 - type: recall_at_1000 value: 54.290000000000006 - type: precision_at_1 value: 90.0 - type: precision_at_3 value: 92.0 - type: precision_at_5 value: 90.0 - type: precision_at_10 value: 88.6 - type: precision_at_20 value: 85.5 - type: precision_at_100 value: 67.14 - type: precision_at_1000 value: 25.81 - type: mrr_at_1 value: 90.0 - type: mrr_at_3 value: 94.6667 - type: mrr_at_5 value: 94.6667 - type: mrr_at_10 value: 94.6667 - type: mrr_at_20 value: 94.6667 - type: mrr_at_100 value: 94.6667 - type: mrr_at_1000 value: 94.6667 - type: nauc_ndcg_at_1_max value: -0.0208 - type: nauc_ndcg_at_1_std value: 9.228200000000001 - type: nauc_ndcg_at_1_diff1 value: -7.4962 - type: nauc_ndcg_at_3_max value: 16.5755 - type: nauc_ndcg_at_3_std value: 39.0511 - type: nauc_ndcg_at_3_diff1 value: -14.5975 - type: nauc_ndcg_at_5_max value: 15.326799999999999 - type: nauc_ndcg_at_5_std value: 44.2523 - type: nauc_ndcg_at_5_diff1 value: -15.004600000000002 - type: nauc_ndcg_at_10_max value: 34.5609 - type: nauc_ndcg_at_10_std value: 62.8752 - type: nauc_ndcg_at_10_diff1 value: -22.9907 - type: nauc_ndcg_at_20_max value: 35.7633 - type: nauc_ndcg_at_20_std value: 74.1826 - type: nauc_ndcg_at_20_diff1 value: -26.3264 - type: nauc_ndcg_at_100_max value: 36.939499999999995 - type: nauc_ndcg_at_100_std value: 80.702 - type: nauc_ndcg_at_100_diff1 value: -41.7784 - type: nauc_ndcg_at_1000_max value: 41.3313 - type: nauc_ndcg_at_1000_std value: 68.0671 - type: nauc_ndcg_at_1000_diff1 value: -14.6009 - type: nauc_map_at_1_max value: -15.2873 - type: nauc_map_at_1_std value: -24.4781 - type: nauc_map_at_1_diff1 value: 35.4803 - type: nauc_map_at_3_max value: -14.107700000000001 - type: nauc_map_at_3_std value: -23.197699999999998 - type: nauc_map_at_3_diff1 value: 37.8596 - type: nauc_map_at_5_max value: -12.7588 - type: nauc_map_at_5_std value: -20.174400000000002 - type: nauc_map_at_5_diff1 value: 39.575700000000005 - type: nauc_map_at_10_max value: -4.8804 - type: nauc_map_at_10_std value: -11.0753 - type: nauc_map_at_10_diff1 value: 38.2457 - type: nauc_map_at_20_max value: 0.7396 - type: nauc_map_at_20_std value: 0.3599 - type: nauc_map_at_20_diff1 value: 35.4735 - type: nauc_map_at_100_max value: 20.011000000000003 - type: nauc_map_at_100_std value: 45.2654 - type: nauc_map_at_100_diff1 value: 3.6394 - type: nauc_map_at_1000_max value: 43.317099999999996 - type: nauc_map_at_1000_std value: 74.6629 - type: nauc_map_at_1000_diff1 value: -22.509 - type: nauc_recall_at_1_max value: -15.2873 - type: nauc_recall_at_1_std value: -24.4781 - type: nauc_recall_at_1_diff1 value: 35.4803 - type: nauc_recall_at_3_max value: -14.1509 - type: nauc_recall_at_3_std value: -24.7684 - type: nauc_recall_at_3_diff1 value: 40.6736 - type: nauc_recall_at_5_max value: -13.053899999999999 - type: nauc_recall_at_5_std value: -21.7134 - type: nauc_recall_at_5_diff1 value: 42.4446 - type: nauc_recall_at_10_max value: -7.3492 - type: nauc_recall_at_10_std value: -15.7989 - type: nauc_recall_at_10_diff1 value: 41.6543 - type: nauc_recall_at_20_max value: -4.8004 - type: nauc_recall_at_20_std value: -9.6834 - type: nauc_recall_at_20_diff1 value: 41.7323 - type: nauc_recall_at_100_max value: 11.3356 - type: nauc_recall_at_100_std value: 28.1118 - type: nauc_recall_at_100_diff1 value: 15.6166 - type: nauc_recall_at_1000_max value: 39.9341 - type: nauc_recall_at_1000_std value: 54.15410000000001 - type: nauc_recall_at_1000_diff1 value: -2.0016 - type: nauc_precision_at_1_max value: 12.2035 - type: nauc_precision_at_1_std value: 24.1923 - type: nauc_precision_at_1_diff1 value: -25.368800000000004 - type: nauc_precision_at_3_max value: 31.019600000000004 - type: nauc_precision_at_3_std value: 56.08539999999999 - type: nauc_precision_at_3_diff1 value: -33.821600000000004 - type: nauc_precision_at_5_max value: 26.127699999999997 - type: nauc_precision_at_5_std value: 52.8458 - type: nauc_precision_at_5_diff1 value: -22.24 - type: nauc_precision_at_10_max value: 45.8122 - type: nauc_precision_at_10_std value: 71.9086 - type: nauc_precision_at_10_diff1 value: -28.500700000000002 - type: nauc_precision_at_20_max value: 44.2567 - type: nauc_precision_at_20_std value: 80.86410000000001 - type: nauc_precision_at_20_diff1 value: -28.518 - type: nauc_precision_at_100_max value: 42.8044 - type: nauc_precision_at_100_std value: 84.13669999999999 - type: nauc_precision_at_100_diff1 value: -47.1098 - type: nauc_precision_at_1000_max value: 40.260200000000005 - type: nauc_precision_at_1000_std value: 53.53059999999999 - type: nauc_precision_at_1000_diff1 value: -41.2652 - type: nauc_mrr_at_1_max value: 12.2035 - type: nauc_mrr_at_1_std value: 24.1923 - type: nauc_mrr_at_1_diff1 value: -25.368800000000004 - type: nauc_mrr_at_3_max value: 16.8738 - type: nauc_mrr_at_3_std value: 28.113300000000002 - type: nauc_mrr_at_3_diff1 value: -20.3198 - type: nauc_mrr_at_5_max value: 16.8738 - type: nauc_mrr_at_5_std value: 28.113300000000002 - type: nauc_mrr_at_5_diff1 value: -20.3198 - type: nauc_mrr_at_10_max value: 16.8738 - type: nauc_mrr_at_10_std value: 28.113300000000002 - type: nauc_mrr_at_10_diff1 value: -20.3198 - type: nauc_mrr_at_20_max value: 16.8738 - type: nauc_mrr_at_20_std value: 28.113300000000002 - type: nauc_mrr_at_20_diff1 value: -20.3198 - type: nauc_mrr_at_100_max value: 16.8738 - type: nauc_mrr_at_100_std value: 28.113300000000002 - type: nauc_mrr_at_100_diff1 value: -20.3198 - type: nauc_mrr_at_1000_max value: 16.8738 - type: nauc_mrr_at_1000_std value: 28.113300000000002 - type: nauc_mrr_at_1000_diff1 value: -20.3198 - type: main_score value: 83.866 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: ndcg_at_1 value: 38.775999999999996 - type: ndcg_at_3 value: 33.664 - type: ndcg_at_5 value: 31.61 - type: ndcg_at_10 value: 29.499 - type: ndcg_at_20 value: 29.772 - type: ndcg_at_100 value: 39.845000000000006 - type: ndcg_at_1000 value: 51.141999999999996 - type: map_at_1 value: 3.004 - type: map_at_3 value: 6.027 - type: map_at_5 value: 7.993 - type: map_at_10 value: 11.546 - type: map_at_20 value: 14.185 - type: map_at_100 value: 17.698 - type: map_at_1000 value: 19.364 - type: recall_at_1 value: 3.004 - type: recall_at_3 value: 7.178 - type: recall_at_5 value: 11.196 - type: recall_at_10 value: 18.584999999999997 - type: recall_at_20 value: 26.845999999999997 - type: recall_at_100 value: 49.025 - type: recall_at_1000 value: 82.884 - type: precision_at_1 value: 40.816 - type: precision_at_3 value: 33.333 - type: precision_at_5 value: 30.612000000000002 - type: precision_at_10 value: 25.714 - type: precision_at_20 value: 19.387999999999998 - type: precision_at_100 value: 7.939 - type: precision_at_1000 value: 1.545 - type: mrr_at_1 value: 40.8163 - type: mrr_at_3 value: 53.401399999999995 - type: mrr_at_5 value: 56.7687 - type: mrr_at_10 value: 57.5421 - type: mrr_at_20 value: 58.142 - type: mrr_at_100 value: 58.2307 - type: mrr_at_1000 value: 58.2307 - type: nauc_ndcg_at_1_max value: -18.0584 - type: nauc_ndcg_at_1_std value: -25.634600000000002 - type: nauc_ndcg_at_1_diff1 value: -1.7021000000000002 - type: nauc_ndcg_at_3_max value: -17.8622 - type: nauc_ndcg_at_3_std value: -20.119799999999998 - type: nauc_ndcg_at_3_diff1 value: -2.399 - type: nauc_ndcg_at_5_max value: -22.0829 - type: nauc_ndcg_at_5_std value: -22.841 - type: nauc_ndcg_at_5_diff1 value: -12.350200000000001 - type: nauc_ndcg_at_10_max value: -17.858999999999998 - type: nauc_ndcg_at_10_std value: -17.9067 - type: nauc_ndcg_at_10_diff1 value: -9.3129 - type: nauc_ndcg_at_20_max value: -24.479400000000002 - type: nauc_ndcg_at_20_std value: -16.06 - type: nauc_ndcg_at_20_diff1 value: -10.57 - type: nauc_ndcg_at_100_max value: -20.9167 - type: nauc_ndcg_at_100_std value: 9.6051 - type: nauc_ndcg_at_100_diff1 value: -0.2363 - type: nauc_ndcg_at_1000_max value: -13.6708 - type: nauc_ndcg_at_1000_std value: 17.956 - type: nauc_ndcg_at_1000_diff1 value: -2.5696 - type: nauc_map_at_1_max value: -14.276900000000001 - type: nauc_map_at_1_std value: -31.3091 - type: nauc_map_at_1_diff1 value: -1.4354 - type: nauc_map_at_3_max value: -21.7098 - type: nauc_map_at_3_std value: -32.112899999999996 - type: nauc_map_at_3_diff1 value: -8.846 - type: nauc_map_at_5_max value: -16.700200000000002 - type: nauc_map_at_5_std value: -32.643499999999996 - type: nauc_map_at_5_diff1 value: -13.9766 - type: nauc_map_at_10_max value: -13.415199999999999 - type: nauc_map_at_10_std value: -28.459200000000003 - type: nauc_map_at_10_diff1 value: -12.4042 - type: nauc_map_at_20_max value: -17.8629 - type: nauc_map_at_20_std value: -24.5837 - type: nauc_map_at_20_diff1 value: -14.9642 - type: nauc_map_at_100_max value: -15.6478 - type: nauc_map_at_100_std value: -11.4237 - type: nauc_map_at_100_diff1 value: -11.542 - type: nauc_map_at_1000_max value: -15.2149 - type: nauc_map_at_1000_std value: -8.0384 - type: nauc_map_at_1000_diff1 value: -12.984000000000002 - type: nauc_recall_at_1_max value: -14.276900000000001 - type: nauc_recall_at_1_std value: -31.3091 - type: nauc_recall_at_1_diff1 value: -1.4354 - type: nauc_recall_at_3_max value: -23.021900000000002 - type: nauc_recall_at_3_std value: -30.2834 - type: nauc_recall_at_3_diff1 value: -11.4226 - type: nauc_recall_at_5_max value: -20.596600000000002 - type: nauc_recall_at_5_std value: -33.219300000000004 - type: nauc_recall_at_5_diff1 value: -17.718999999999998 - type: nauc_recall_at_10_max value: -16.1214 - type: nauc_recall_at_10_std value: -23.9041 - type: nauc_recall_at_10_diff1 value: -11.047 - type: nauc_recall_at_20_max value: -25.603399999999997 - type: nauc_recall_at_20_std value: -15.8105 - type: nauc_recall_at_20_diff1 value: -14.546000000000001 - type: nauc_recall_at_100_max value: -16.389400000000002 - type: nauc_recall_at_100_std value: 28.5141 - type: nauc_recall_at_100_diff1 value: 6.1868 - type: nauc_recall_at_1000_max value: 11.022 - type: nauc_recall_at_1000_std value: 68.0021 - type: nauc_recall_at_1000_diff1 value: 8.426 - type: nauc_precision_at_1_max value: -17.1625 - type: nauc_precision_at_1_std value: -27.9451 - type: nauc_precision_at_1_diff1 value: 1.0831 - type: nauc_precision_at_3_max value: -17.2798 - type: nauc_precision_at_3_std value: -20.347199999999997 - type: nauc_precision_at_3_diff1 value: -5.2689 - type: nauc_precision_at_5_max value: -19.6408 - type: nauc_precision_at_5_std value: -24.157 - type: nauc_precision_at_5_diff1 value: -20.274900000000002 - type: nauc_precision_at_10_max value: -11.8033 - type: nauc_precision_at_10_std value: -7.2727 - type: nauc_precision_at_10_diff1 value: -9.3776 - type: nauc_precision_at_20_max value: -20.1541 - type: nauc_precision_at_20_std value: 9.0645 - type: nauc_precision_at_20_diff1 value: -16.1323 - type: nauc_precision_at_100_max value: 0.3701 - type: nauc_precision_at_100_std value: 67.6941 - type: nauc_precision_at_100_diff1 value: 8.0336 - type: nauc_precision_at_1000_max value: 38.8632 - type: nauc_precision_at_1000_std value: 38.0504 - type: nauc_precision_at_1000_diff1 value: 0.5907 - type: nauc_mrr_at_1_max value: -17.1625 - type: nauc_mrr_at_1_std value: -27.9451 - type: nauc_mrr_at_1_diff1 value: 1.0831 - type: nauc_mrr_at_3_max value: -20.479300000000002 - type: nauc_mrr_at_3_std value: -21.9225 - type: nauc_mrr_at_3_diff1 value: -1.5211000000000001 - type: nauc_mrr_at_5_max value: -24.8175 - type: nauc_mrr_at_5_std value: -23.805 - type: nauc_mrr_at_5_diff1 value: -7.9258 - type: nauc_mrr_at_10_max value: -22.53 - type: nauc_mrr_at_10_std value: -21.9391 - type: nauc_mrr_at_10_diff1 value: -5.7533 - type: nauc_mrr_at_20_max value: -22.7064 - type: nauc_mrr_at_20_std value: -22.4697 - type: nauc_mrr_at_20_diff1 value: -5.7068 - type: nauc_mrr_at_100_max value: -23.0016 - type: nauc_mrr_at_100_std value: -22.488 - type: nauc_mrr_at_100_diff1 value: -5.3738 - type: nauc_mrr_at_1000_max value: -23.0016 - type: nauc_mrr_at_1000_std value: -22.488 - type: nauc_mrr_at_1000_diff1 value: -5.3738 - type: main_score value: 29.499 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 65.8643 - type: f1 value: 50.6764 - type: f1_weighted value: 73.2472 - type: ap value: 12.2658 - type: ap_weighted value: 12.2658 - type: main_score value: 65.8643 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 59.6633 - type: f1 value: 59.935700000000004 - type: f1_weighted value: 59.0249 - type: main_score value: 59.6633 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 43.2311 - type: v_measure_std value: 2.3994999999999997 - type: main_score value: 43.2311 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: similarity_accuracy value: 83.8469 - type: similarity_accuracy_threshold value: 77.6695 - type: similarity_f1 value: 62.3159 - type: similarity_f1_threshold value: 71.6554 - type: similarity_precision value: 59.114599999999996 - type: similarity_recall value: 65.8839 - type: similarity_ap value: 67.00930000000001 - type: cosine_accuracy value: 83.8469 - type: cosine_accuracy_threshold value: 77.6695 - type: cosine_f1 value: 62.3159 - type: cosine_f1_threshold value: 71.6554 - type: cosine_precision value: 59.114599999999996 - type: cosine_recall value: 65.8839 - type: cosine_ap value: 67.00930000000001 - type: manhattan_accuracy value: 83.7694 - type: manhattan_accuracy_threshold value: 1677.8293999999999 - type: manhattan_f1 value: 62.1324 - type: manhattan_f1_threshold value: 1848.6641 - type: manhattan_precision value: 61.839999999999996 - type: manhattan_recall value: 62.4274 - type: manhattan_ap value: 66.8849 - type: euclidean_accuracy value: 83.8469 - type: euclidean_accuracy_threshold value: 66.8288 - type: euclidean_f1 value: 62.3159 - type: euclidean_f1_threshold value: 75.2922 - type: euclidean_precision value: 59.114599999999996 - type: euclidean_recall value: 65.8839 - type: euclidean_ap value: 67.00930000000001 - type: dot_accuracy value: 83.8469 - type: dot_accuracy_threshold value: 77.6695 - type: dot_f1 value: 62.3159 - type: dot_f1_threshold value: 71.6554 - type: dot_precision value: 59.114599999999996 - type: dot_recall value: 65.8839 - type: dot_ap value: 67.00930000000001 - type: max_accuracy value: 83.8469 - type: max_f1 value: 62.3159 - type: max_precision value: 61.839999999999996 - type: max_recall value: 65.8839 - type: max_ap value: 67.00930000000001 - type: main_score value: 67.00930000000001 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: similarity_accuracy value: 88.8811 - type: similarity_accuracy_threshold value: 71.1053 - type: similarity_f1 value: 77.9005 - type: similarity_f1_threshold value: 67.5068 - type: similarity_precision value: 75.5609 - type: similarity_recall value: 80.3896 - type: similarity_ap value: 85.459 - type: cosine_accuracy value: 88.8811 - type: cosine_accuracy_threshold value: 71.1053 - type: cosine_f1 value: 77.9005 - type: cosine_f1_threshold value: 67.5068 - type: cosine_precision value: 75.5609 - type: cosine_recall value: 80.3896 - type: cosine_ap value: 85.459 - type: manhattan_accuracy value: 88.8598 - type: manhattan_accuracy_threshold value: 1928.9173 - type: manhattan_f1 value: 77.9172 - type: manhattan_f1_threshold value: 2007.8883999999998 - type: manhattan_precision value: 76.29310000000001 - type: manhattan_recall value: 79.6119 - type: manhattan_ap value: 85.4464 - type: euclidean_accuracy value: 88.8811 - type: euclidean_accuracy_threshold value: 76.0193 - type: euclidean_f1 value: 77.9005 - type: euclidean_f1_threshold value: 80.6141 - type: euclidean_precision value: 75.5609 - type: euclidean_recall value: 80.3896 - type: euclidean_ap value: 85.459 - type: dot_accuracy value: 88.8811 - type: dot_accuracy_threshold value: 71.1053 - type: dot_f1 value: 77.9005 - type: dot_f1_threshold value: 67.5068 - type: dot_precision value: 75.5609 - type: dot_recall value: 80.3896 - type: dot_ap value: 85.459 - type: max_accuracy value: 88.8811 - type: max_f1 value: 77.9172 - type: max_precision value: 76.29310000000001 - type: max_recall value: 80.3896 - type: max_ap value: 85.459 - type: main_score value: 85.459 --- <h1 align="center">Snowflake's Arctic-embed-l-v2.0</h1> <h4 align="center"> <p> <a href=#news>News</a> | <a href=#models>Models</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#contact">Contact</a> | <a href="#faq">FAQ</a> <a href="#license">License</a> | <a href="#acknowledgement">Acknowledgement</a> <p> </h4> <img referrerpolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=18f5b1a3-da66-4f25-92d3-21da829509c3" /> ## News - 12/11/2024: Release of [Technical Report](https://arxiv.org/abs/2412.04506) - 12/04/2024: Release of [snowflake-arctic-embed-l-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-l-v2.0) and [snowflake-arctic-embed-m-v2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-v2.0) our newest models with multilingual workloads in mind. ## Models Snowflake arctic-embed-l-v2.0 is the newest addition to the suite of embedding models Snowflake has released optimizing for retrieval performance and inference efficiency. Arctic Embed 2.0 introduces a new standard for multilingual embedding models, combining high-quality multilingual text retrieval without sacrificing performance in English. Released under the permissive Apache 2.0 license, Arctic Embed 2.0 is ideal for applications that demand reliable, enterprise-grade multilingual search and retrieval at scale. Key Features: 1. Multilingual without compromise: Excels in English and non-English retrieval, outperforming leading open-source and proprietary models on benchmarks like MTEB Retrieval, CLEF, and MIRACL. 2. Inference efficiency: Its 303m non-embedding parameters inference is fast and efficient for any scale. 3. Compression-friendly: Achieves high-quality retrieval with embeddings as small as 128 bytes/vector using Matryoshka Representation Learning (MRL) and quantization-aware embedding training. 4. Drop-In Replacement: arctic-embed-l-v2.0 builds on BAAI/bge-m3-retromae](https://huggingface.co/BAAI/bge-m3-retromae) which allows direct drop-in inference replacement with any form of new libraries, kernels, inference engines etc. 5. Long Context Support: arctic-embed-l-v2.0 builds on [BAAI/bge-m3-retromae](https://huggingface.co/BAAI/bge-m3-retromae) which can support a context window of up to 8192 via the use of RoPE. ### Quality Benchmarks Unlike most other open-source models, Arctic-embed-l-v2.0 excels across English (via MTEB Retrieval) and multilingual (via MIRACL and CLEF). You no longer need to support models to empower high-quality English and multilingual retrieval. All numbers mentioned below are the average NDCG@10 across the dataset being discussed. | Model Name | # params | # non-emb params | # dimensions | BEIR (15) | MIRACL (4) | CLEF (Focused) | CLEF (Full) | |---|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | **snowflake-arctic-l-v2.0** | 568M | 303M | 1024 | **55.6** | 55.8 | **52.9** | **54.3** | | snowflake-arctic-m | 109M | 86M | 768 | 54.9 | 24.9 | 34.4 | 29.1 | | snowflake-arctic-l | 335M | 303M | 1024 | 56.0 | 34.8 | 38.2 | 33.7 | | me5 base | 560M | 303M | 1024 | 51.4 | 54.0 | 43.0 | 34.6 | | bge-m3 (BAAI) | 568M | 303M | 1024 | 48.8 | **56.8** | 40.8 | 41.3 | | gte (Alibaba) | 305M | 113M | 768 | 51.1 | 52.3 | 47.7 | 53.1 | Aside from high-quality retrieval arctic delivers embeddings that are easily compressible. Leverage vector truncation via MRL to decrease vector size by 4x with less than 3% degredation in quality. Combine MRLed vectors with vector compression (Int4) to power retrieval in 128 bytes per doc. | Model | | BEIR (15) | Relative Performance | MIRACL (4) | Relative Performance | CLEF (5) | Relative Performance | CLEF (Full) | Relative Performance | |---|---|:---:|:---:|:---:|:---:|:---:|---|---|---| | snowflake-arctic-l-v2.0 | 1024 | 55.6 | N/A | 55.8 | N/A | 52.9 | N/A | 54.3 | N/A | | snowflake-arctic-l-v2.0 | 256 | 54.3 | -0.18% | 54.3 | -2.70% | 51.9 | -1.81% | 53.4 | -1.53% | ## Usage ### Using Sentence Transformers ```python from sentence_transformers import SentenceTransformer # Load the model model_name = 'Snowflake/snowflake-arctic-embed-l-v2.0' model = SentenceTransformer(model_name) # Define the queries and documents queries = ['what is snowflake?', 'Where can I get the best tacos?'] documents = ['The Data Cloud!', 'Mexico City of Course!'] # Compute embeddings: use `prompt_name="query"` to encode queries! query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) # Compute cosine similarity scores scores = model.similarity(query_embeddings, document_embeddings) # Output the results for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` ### Using Huggingface Transformers You can use the transformers package to use Snowflake's arctic-embed model, as shown below. For optimal retrieval quality, use the CLS token to embed each text portion and use the query prefix below (just on the query). ```python import torch from transformers import AutoModel, AutoTokenizer model_name = 'Snowflake/snowflake-arctic-embed-l-v2.0' tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModel.from_pretrained(model_name, add_pooling_layer=False) model.eval() query_prefix = 'query: ' queries = ['what is snowflake?', 'Where can I get the best tacos?'] queries_with_prefix = ["{}{}".format(query_prefix, i) for i in queries] query_tokens = tokenizer(queries_with_prefix, padding=True, truncation=True, return_tensors='pt', max_length=8192) documents = ['The Data Cloud!', 'Mexico City of Course!'] document_tokens = tokenizer(documents, padding=True, truncation=True, return_tensors='pt', max_length=8192) # Compute token embeddings with torch.no_grad(): query_embeddings = model(**query_tokens)[0][:, 0] document_embeddings = model(**document_tokens)[0][:, 0] # normalize embeddings query_embeddings = torch.nn.functional.normalize(query_embeddings, p=2, dim=1) document_embeddings = torch.nn.functional.normalize(document_embeddings, p=2, dim=1) scores = torch.mm(query_embeddings, document_embeddings.transpose(0, 1)) for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) #Output passages & scores print("Query:", query) for document, score in doc_score_pairs: print(score, document) ``` This should produce the following scores ``` Query: what is snowflake? tensor(0.2715) The Data Cloud! tensor(0.0661) Mexico City of Course! Query: Where can I get the best tacos? tensor(0.2797) Mexico City of Course! tensor(0.1250) The Data Cloud! ``` ### Using Huggingface Transformers.js If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@huggingface/transformers) using: ```bash npm i @huggingface/transformers ``` You can then use the model for retrieval, as follows: ```js import { pipeline, dot } from '@huggingface/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Snowflake/snowflake-arctic-embed-m-v2.0', { dtype: 'q8', }); // Generate sentence embeddings const sentences = [ 'query: what is snowflake?', 'The Data Cloud!', 'Mexico City of Course!', ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => dot(source_embeddings, x)); console.log(similarities); // [0.24783534471401417, 0.05313122704326892] ``` ## Contact Feel free to open an issue or pull request if you have any questions or suggestions about this project. You also can email Daniel Campos([email protected]). ## License Arctic is licensed under the [Apache-2](https://www.apache.org/licenses/LICENSE-2.0). The released models can be used for commercial purposes free of charge.
[ "BIOSSES", "SCIFACT" ]
minishlab/M2V_base_output
minishlab
null
[ "model2vec", "onnx", "safetensors", "embeddings", "static-embeddings", "mteb", "sentence-transformers", "en", "base_model:BAAI/bge-base-en-v1.5", "base_model:quantized:BAAI/bge-base-en-v1.5", "license:mit", "model-index", "region:us" ]
"2024-09-19T18:03:34Z"
2025-01-21T19:16:22+00:00
76,963
10
--- base_model: - BAAI/bge-base-en-v1.5 language: - en library_name: model2vec license: mit tags: - embeddings - static-embeddings - mteb - sentence-transformers model-index: - name: M2V_base_output results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 69.1904047976012 - type: ap value: 19.610682715583142 - type: ap_weighted value: 19.610682715583142 - type: f1 value: 57.14831247701502 - type: f1_weighted value: 75.0407024695743 - type: main_score value: 69.1904047976012 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.1044776119403 - type: ap value: 33.83428171392154 - type: ap_weighted value: 33.83428171392154 - type: f1 value: 65.18431700199532 - type: f1_weighted value: 73.90467162513829 - type: main_score value: 71.1044776119403 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 67.328075 - type: ap value: 62.26238067958846 - type: ap_weighted value: 62.26238067958846 - type: f1 value: 66.93195816551996 - type: f1_weighted value: 66.93195816551996 - type: main_score value: 67.328075 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 32.589999999999996 - type: f1 value: 32.11760053698346 - type: f1_weighted value: 32.11760053698346 - type: main_score value: 32.589999999999996 - task: type: Retrieval dataset: name: MTEB ArguAna (default) type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 29.183999999999997 - type: map_at_1 value: 14.011000000000001 - type: map_at_10 value: 23.748 - type: map_at_100 value: 24.808 - type: map_at_1000 value: 24.89 - type: map_at_20 value: 24.354 - type: map_at_3 value: 20.721 - type: map_at_5 value: 22.509 - type: mrr_at_1 value: 14.509246088193455 - type: mrr_at_10 value: 23.930067285330413 - type: mrr_at_100 value: 24.990313023015393 - type: mrr_at_1000 value: 25.071881804001343 - type: mrr_at_20 value: 24.53573559987519 - type: mrr_at_3 value: 20.88667614983403 - type: mrr_at_5 value: 22.7038880986249 - type: nauc_map_at_1000_diff1 value: 10.066441521146057 - type: nauc_map_at_1000_max value: -0.5837671794505647 - type: nauc_map_at_1000_std value: 12.356714430015906 - type: nauc_map_at_100_diff1 value: 10.076633271522182 - type: nauc_map_at_100_max value: -0.5731496124067438 - type: nauc_map_at_100_std value: 12.415984202967115 - type: nauc_map_at_10_diff1 value: 9.867302245745831 - type: nauc_map_at_10_max value: -0.8261964947948097 - type: nauc_map_at_10_std value: 11.57502900905332 - type: nauc_map_at_1_diff1 value: 10.389795558592775 - type: nauc_map_at_1_max value: -4.511506238918001 - type: nauc_map_at_1_std value: 9.62435943787401 - type: nauc_map_at_20_diff1 value: 10.114926370948476 - type: nauc_map_at_20_max value: -0.38257232900731064 - type: nauc_map_at_20_std value: 12.070421408069302 - type: nauc_map_at_3_diff1 value: 8.840416555242445 - type: nauc_map_at_3_max value: -2.284214343720665 - type: nauc_map_at_3_std value: 9.41211373407306 - type: nauc_map_at_5_diff1 value: 9.4616046565665 - type: nauc_map_at_5_max value: -1.8580221033457682 - type: nauc_map_at_5_std value: 10.252697423331279 - type: nauc_mrr_at_1000_diff1 value: 8.50590042077137 - type: nauc_mrr_at_1000_max value: -0.9532348980220058 - type: nauc_mrr_at_1000_std value: 11.917718432821042 - type: nauc_mrr_at_100_diff1 value: 8.519603663729045 - type: nauc_mrr_at_100_max value: -0.941843377489153 - type: nauc_mrr_at_100_std value: 11.977460275257405 - type: nauc_mrr_at_10_diff1 value: 8.324129262175067 - type: nauc_mrr_at_10_max value: -1.1819451563051036 - type: nauc_mrr_at_10_std value: 11.143112974385687 - type: nauc_mrr_at_1_diff1 value: 7.923019186157461 - type: nauc_mrr_at_1_max value: -3.8622428906009336 - type: nauc_mrr_at_1_std value: 8.574254762702411 - type: nauc_mrr_at_20_diff1 value: 8.57172824197632 - type: nauc_mrr_at_20_max value: -0.7479018550868611 - type: nauc_mrr_at_20_std value: 11.638538106885681 - type: nauc_mrr_at_3_diff1 value: 7.176947665978892 - type: nauc_mrr_at_3_max value: -2.8140949706898937 - type: nauc_mrr_at_3_std value: 8.966233266672026 - type: nauc_mrr_at_5_diff1 value: 7.921651668561097 - type: nauc_mrr_at_5_max value: -2.1687598838347353 - type: nauc_mrr_at_5_std value: 9.810384238460967 - type: nauc_ndcg_at_1000_diff1 value: 11.09862326017166 - type: nauc_ndcg_at_1000_max value: 1.6567266738852608 - type: nauc_ndcg_at_1000_std value: 16.06391490264334 - type: nauc_ndcg_at_100_diff1 value: 11.372692796637454 - type: nauc_ndcg_at_100_max value: 1.8759976608604172 - type: nauc_ndcg_at_100_std value: 17.653326421438013 - type: nauc_ndcg_at_10_diff1 value: 10.629937509771837 - type: nauc_ndcg_at_10_max value: 1.3739681707601088 - type: nauc_ndcg_at_10_std value: 13.688730163159986 - type: nauc_ndcg_at_1_diff1 value: 10.389795558592775 - type: nauc_ndcg_at_1_max value: -4.511506238918001 - type: nauc_ndcg_at_1_std value: 9.62435943787401 - type: nauc_ndcg_at_20_diff1 value: 11.486521194068173 - type: nauc_ndcg_at_20_max value: 2.855255358038754 - type: nauc_ndcg_at_20_std value: 15.394981206314688 - type: nauc_ndcg_at_3_diff1 value: 8.680000272030385 - type: nauc_ndcg_at_3_max value: -1.6634044566640975 - type: nauc_ndcg_at_3_std value: 9.268472321517171 - type: nauc_ndcg_at_5_diff1 value: 9.711071086647511 - type: nauc_ndcg_at_5_max value: -0.9491120105126298 - type: nauc_ndcg_at_5_std value: 10.68847112511071 - type: nauc_precision_at_1000_diff1 value: 20.67453341943155 - type: nauc_precision_at_1000_max value: 21.6433346658854 - type: nauc_precision_at_1000_std value: 50.563552510430355 - type: nauc_precision_at_100_diff1 value: 17.05138860576984 - type: nauc_precision_at_100_max value: 10.671778777967742 - type: nauc_precision_at_100_std value: 42.815464007080514 - type: nauc_precision_at_10_diff1 value: 12.834245751753656 - type: nauc_precision_at_10_max value: 7.237728992777975 - type: nauc_precision_at_10_std value: 19.637476638724 - type: nauc_precision_at_1_diff1 value: 10.389795558592775 - type: nauc_precision_at_1_max value: -4.511506238918001 - type: nauc_precision_at_1_std value: 9.62435943787401 - type: nauc_precision_at_20_diff1 value: 15.960793242410434 - type: nauc_precision_at_20_max value: 12.642865380113017 - type: nauc_precision_at_20_std value: 25.900201704789065 - type: nauc_precision_at_3_diff1 value: 8.364265704499747 - type: nauc_precision_at_3_max value: -0.20060414550763578 - type: nauc_precision_at_3_std value: 8.910638511394128 - type: nauc_precision_at_5_diff1 value: 10.43686249937682 - type: nauc_precision_at_5_max value: 1.2061629814752834 - type: nauc_precision_at_5_std value: 11.812984132266987 - type: nauc_recall_at_1000_diff1 value: 20.674533419431576 - type: nauc_recall_at_1000_max value: 21.643334665885174 - type: nauc_recall_at_1000_std value: 50.563552510430256 - type: nauc_recall_at_100_diff1 value: 17.05138860576987 - type: nauc_recall_at_100_max value: 10.671778777967747 - type: nauc_recall_at_100_std value: 42.81546400708045 - type: nauc_recall_at_10_diff1 value: 12.83424575175363 - type: nauc_recall_at_10_max value: 7.237728992777978 - type: nauc_recall_at_10_std value: 19.637476638724007 - type: nauc_recall_at_1_diff1 value: 10.389795558592775 - type: nauc_recall_at_1_max value: -4.511506238918001 - type: nauc_recall_at_1_std value: 9.62435943787401 - type: nauc_recall_at_20_diff1 value: 15.960793242410464 - type: nauc_recall_at_20_max value: 12.642865380113033 - type: nauc_recall_at_20_std value: 25.900201704789094 - type: nauc_recall_at_3_diff1 value: 8.364265704499777 - type: nauc_recall_at_3_max value: -0.2006041455076358 - type: nauc_recall_at_3_std value: 8.910638511394144 - type: nauc_recall_at_5_diff1 value: 10.436862499376828 - type: nauc_recall_at_5_max value: 1.2061629814752328 - type: nauc_recall_at_5_std value: 11.81298413226698 - type: ndcg_at_1 value: 14.011000000000001 - type: ndcg_at_10 value: 29.183999999999997 - type: ndcg_at_100 value: 34.618 - type: ndcg_at_1000 value: 37.006 - type: ndcg_at_20 value: 31.371 - type: ndcg_at_3 value: 22.991 - type: ndcg_at_5 value: 26.244 - type: precision_at_1 value: 14.011000000000001 - type: precision_at_10 value: 4.651000000000001 - type: precision_at_100 value: 0.7250000000000001 - type: precision_at_1000 value: 0.092 - type: precision_at_20 value: 2.7560000000000002 - type: precision_at_3 value: 9.862 - type: precision_at_5 value: 7.510999999999999 - type: recall_at_1 value: 14.011000000000001 - type: recall_at_10 value: 46.515 - type: recall_at_100 value: 72.54599999999999 - type: recall_at_1000 value: 91.821 - type: recall_at_20 value: 55.120999999999995 - type: recall_at_3 value: 29.587000000000003 - type: recall_at_5 value: 37.553 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 31.259738106366225 - type: v_measure value: 31.259738106366225 - type: v_measure_std value: 14.320141623571129 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 20.744213693691467 - type: v_measure value: 20.744213693691467 - type: v_measure_std value: 15.404721116239472 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 51.62795895312553 - type: map value: 51.62795895312553 - type: mrr value: 65.83135470254582 - type: nAUC_map_diff1 value: 14.141914127697058 - type: nAUC_map_max value: 15.463053892954765 - type: nAUC_map_std value: 6.690591989325812 - type: nAUC_mrr_diff1 value: 17.935217602773022 - type: nAUC_mrr_max value: 20.50394658394339 - type: nAUC_mrr_std value: 11.867431280645176 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 73.32741772202057 - type: cosine_spearman value: 73.42938398170034 - type: euclidean_pearson value: 52.53960842495785 - type: euclidean_spearman value: 55.20186022147138 - type: main_score value: 73.42938398170034 - type: manhattan_pearson value: 51.2857441475548 - type: manhattan_spearman value: 53.75062233475454 - type: pearson value: 73.32741772202057 - type: spearman value: 73.42938398170034 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 71.90909090909092 - type: f1 value: 71.98225635322173 - type: f1_weighted value: 71.98225635322173 - type: main_score value: 71.90909090909092 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 26.532893125445977 - type: v_measure value: 26.532893125445977 - type: v_measure_std value: 0.6837586171917341 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 14.036948167749145 - type: v_measure value: 14.036948167749145 - type: v_measure_std value: 0.5714236374163745 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval (default) type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: main_score value: 28.679 - type: map_at_1 value: 18.546000000000003 - type: map_at_10 value: 24.42 - type: map_at_100 value: 25.495 - type: map_at_1000 value: 25.633 - type: map_at_20 value: 24.967 - type: map_at_3 value: 22.375 - type: map_at_5 value: 23.369999999999997 - type: mrr_at_1 value: 23.74821173104435 - type: mrr_at_10 value: 29.62997025228784 - type: mrr_at_100 value: 30.509005070582297 - type: mrr_at_1000 value: 30.57992301494201 - type: mrr_at_20 value: 30.087957677199494 - type: mrr_at_3 value: 27.944682880305205 - type: mrr_at_5 value: 28.70290891750119 - type: nauc_map_at_1000_diff1 value: 41.91741127467118 - type: nauc_map_at_1000_max value: 29.343811648500857 - type: nauc_map_at_1000_std value: -10.94124792488155 - type: nauc_map_at_100_diff1 value: 41.9257059722684 - type: nauc_map_at_100_max value: 29.312977236968447 - type: nauc_map_at_100_std value: -10.964994215476203 - type: nauc_map_at_10_diff1 value: 42.23276701935884 - type: nauc_map_at_10_max value: 28.927475882624865 - type: nauc_map_at_10_std value: -11.387774428133683 - type: nauc_map_at_1_diff1 value: 47.30172597053699 - type: nauc_map_at_1_max value: 29.662552695406873 - type: nauc_map_at_1_std value: -11.737219447429663 - type: nauc_map_at_20_diff1 value: 41.92458662433504 - type: nauc_map_at_20_max value: 29.174781873350845 - type: nauc_map_at_20_std value: -11.124043543527577 - type: nauc_map_at_3_diff1 value: 43.129372455872165 - type: nauc_map_at_3_max value: 28.848842418769422 - type: nauc_map_at_3_std value: -12.285962277168842 - type: nauc_map_at_5_diff1 value: 42.83044499601317 - type: nauc_map_at_5_max value: 28.98993975777227 - type: nauc_map_at_5_std value: -11.92018253024468 - type: nauc_mrr_at_1000_diff1 value: 40.82041172984889 - type: nauc_mrr_at_1000_max value: 30.480885490296473 - type: nauc_mrr_at_1000_std value: -12.106796913247855 - type: nauc_mrr_at_100_diff1 value: 40.80133713998306 - type: nauc_mrr_at_100_max value: 30.47453951479006 - type: nauc_mrr_at_100_std value: -12.124703479791053 - type: nauc_mrr_at_10_diff1 value: 41.09211981274445 - type: nauc_mrr_at_10_max value: 30.497262535612556 - type: nauc_mrr_at_10_std value: -12.563263045952947 - type: nauc_mrr_at_1_diff1 value: 45.0389906310178 - type: nauc_mrr_at_1_max value: 32.16914824564583 - type: nauc_mrr_at_1_std value: -13.19897745721674 - type: nauc_mrr_at_20_diff1 value: 40.821901422240764 - type: nauc_mrr_at_20_max value: 30.545295646645254 - type: nauc_mrr_at_20_std value: -12.196074023168364 - type: nauc_mrr_at_3_diff1 value: 41.57196675439484 - type: nauc_mrr_at_3_max value: 30.700923825692193 - type: nauc_mrr_at_3_std value: -13.269209066277213 - type: nauc_mrr_at_5_diff1 value: 41.591753620602994 - type: nauc_mrr_at_5_max value: 30.63135138641901 - type: nauc_mrr_at_5_std value: -12.87020601984748 - type: nauc_ndcg_at_1000_diff1 value: 38.92537692516828 - type: nauc_ndcg_at_1000_max value: 29.68260722943582 - type: nauc_ndcg_at_1000_std value: -8.602092840233484 - type: nauc_ndcg_at_100_diff1 value: 38.64203362764584 - type: nauc_ndcg_at_100_max value: 29.393224511276372 - type: nauc_ndcg_at_100_std value: -9.191485720275928 - type: nauc_ndcg_at_10_diff1 value: 39.88534566732229 - type: nauc_ndcg_at_10_max value: 28.986279143641227 - type: nauc_ndcg_at_10_std value: -11.318342616747607 - type: nauc_ndcg_at_1_diff1 value: 45.0389906310178 - type: nauc_ndcg_at_1_max value: 32.16914824564583 - type: nauc_ndcg_at_1_std value: -13.19897745721674 - type: nauc_ndcg_at_20_diff1 value: 38.94952491835268 - type: nauc_ndcg_at_20_max value: 29.206603792767904 - type: nauc_ndcg_at_20_std value: -10.304566017193741 - type: nauc_ndcg_at_3_diff1 value: 40.7977929353434 - type: nauc_ndcg_at_3_max value: 29.580955663728076 - type: nauc_ndcg_at_3_std value: -12.648223472095015 - type: nauc_ndcg_at_5_diff1 value: 40.74984554791671 - type: nauc_ndcg_at_5_max value: 29.59605805593679 - type: nauc_ndcg_at_5_std value: -12.139160076565458 - type: nauc_precision_at_1000_diff1 value: 4.7568680155941925 - type: nauc_precision_at_1000_max value: 7.5355032131826984 - type: nauc_precision_at_1000_std value: -2.0414131984483914 - type: nauc_precision_at_100_diff1 value: 11.527472092658552 - type: nauc_precision_at_100_max value: 21.514326888623554 - type: nauc_precision_at_100_std value: -2.625060194142745 - type: nauc_precision_at_10_diff1 value: 24.503150439921896 - type: nauc_precision_at_10_max value: 28.670536590094265 - type: nauc_precision_at_10_std value: -8.197131538769034 - type: nauc_precision_at_1_diff1 value: 45.0389906310178 - type: nauc_precision_at_1_max value: 32.16914824564583 - type: nauc_precision_at_1_std value: -13.19897745721674 - type: nauc_precision_at_20_diff1 value: 17.864116269261178 - type: nauc_precision_at_20_max value: 27.6641030785838 - type: nauc_precision_at_20_std value: -7.076744708977724 - type: nauc_precision_at_3_diff1 value: 33.5854284842399 - type: nauc_precision_at_3_max value: 29.14301466077523 - type: nauc_precision_at_3_std value: -13.269490261877111 - type: nauc_precision_at_5_diff1 value: 29.98097033677175 - type: nauc_precision_at_5_max value: 29.294311210263995 - type: nauc_precision_at_5_std value: -10.994820836992847 - type: nauc_recall_at_1000_diff1 value: 23.22014562996405 - type: nauc_recall_at_1000_max value: 27.193319559932988 - type: nauc_recall_at_1000_std value: 12.472685466473857 - type: nauc_recall_at_100_diff1 value: 25.23024173971804 - type: nauc_recall_at_100_max value: 25.082403028027738 - type: nauc_recall_at_100_std value: -0.052423861070247414 - type: nauc_recall_at_10_diff1 value: 33.12106610160164 - type: nauc_recall_at_10_max value: 24.918229663001544 - type: nauc_recall_at_10_std value: -8.549535177480411 - type: nauc_recall_at_1_diff1 value: 47.30172597053699 - type: nauc_recall_at_1_max value: 29.662552695406873 - type: nauc_recall_at_1_std value: -11.737219447429663 - type: nauc_recall_at_20_diff1 value: 28.81435708597515 - type: nauc_recall_at_20_max value: 25.47943694144538 - type: nauc_recall_at_20_std value: -5.307500208427278 - type: nauc_recall_at_3_diff1 value: 36.830405146866575 - type: nauc_recall_at_3_max value: 26.435300017685588 - type: nauc_recall_at_3_std value: -12.224084159115286 - type: nauc_recall_at_5_diff1 value: 36.17592797525086 - type: nauc_recall_at_5_max value: 26.135745335293564 - type: nauc_recall_at_5_std value: -10.854448931576895 - type: ndcg_at_1 value: 23.748 - type: ndcg_at_10 value: 28.679 - type: ndcg_at_100 value: 33.849000000000004 - type: ndcg_at_1000 value: 36.903999999999996 - type: ndcg_at_20 value: 30.389 - type: ndcg_at_3 value: 25.602999999999998 - type: ndcg_at_5 value: 26.66 - type: precision_at_1 value: 23.748 - type: precision_at_10 value: 5.479 - type: precision_at_100 value: 1.0070000000000001 - type: precision_at_1000 value: 0.156 - type: precision_at_20 value: 3.3689999999999998 - type: precision_at_3 value: 12.303 - type: precision_at_5 value: 8.784 - type: recall_at_1 value: 18.546000000000003 - type: recall_at_10 value: 36.062 - type: recall_at_100 value: 59.622 - type: recall_at_1000 value: 80.49199999999999 - type: recall_at_20 value: 42.459 - type: recall_at_3 value: 26.346000000000004 - type: recall_at_5 value: 29.685 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval (default) type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: main_score value: 24.201 - type: map_at_1 value: 15.659 - type: map_at_10 value: 20.72 - type: map_at_100 value: 21.494 - type: map_at_1000 value: 21.61 - type: map_at_20 value: 21.118000000000002 - type: map_at_3 value: 19.112000000000002 - type: map_at_5 value: 20.018 - type: mrr_at_1 value: 20.191082802547772 - type: mrr_at_10 value: 25.214639571327467 - type: mrr_at_100 value: 25.923135895788356 - type: mrr_at_1000 value: 25.99481688491863 - type: mrr_at_20 value: 25.587003181612815 - type: mrr_at_3 value: 23.736730360934178 - type: mrr_at_5 value: 24.590233545647543 - type: nauc_map_at_1000_diff1 value: 43.16887932091616 - type: nauc_map_at_1000_max value: 13.001793350069521 - type: nauc_map_at_1000_std value: -3.240745072009945 - type: nauc_map_at_100_diff1 value: 43.186513856436335 - type: nauc_map_at_100_max value: 12.974985819420635 - type: nauc_map_at_100_std value: -3.2702208916272513 - type: nauc_map_at_10_diff1 value: 43.564640578903344 - type: nauc_map_at_10_max value: 13.229537802390597 - type: nauc_map_at_10_std value: -3.7960991209188033 - type: nauc_map_at_1_diff1 value: 49.188047470455324 - type: nauc_map_at_1_max value: 12.622228914711336 - type: nauc_map_at_1_std value: -5.079814609778495 - type: nauc_map_at_20_diff1 value: 43.34504671504679 - type: nauc_map_at_20_max value: 13.053303288029316 - type: nauc_map_at_20_std value: -3.53357011925504 - type: nauc_map_at_3_diff1 value: 44.804892782636394 - type: nauc_map_at_3_max value: 13.58725707185815 - type: nauc_map_at_3_std value: -3.8777357887480894 - type: nauc_map_at_5_diff1 value: 43.72391951178523 - type: nauc_map_at_5_max value: 13.568707067556259 - type: nauc_map_at_5_std value: -4.038106969015966 - type: nauc_mrr_at_1000_diff1 value: 40.667038144431636 - type: nauc_mrr_at_1000_max value: 14.384125598011202 - type: nauc_mrr_at_1000_std value: -2.444399832932607 - type: nauc_mrr_at_100_diff1 value: 40.65910143040065 - type: nauc_mrr_at_100_max value: 14.375036584618234 - type: nauc_mrr_at_100_std value: -2.4274195136508547 - type: nauc_mrr_at_10_diff1 value: 40.89131817246553 - type: nauc_mrr_at_10_max value: 14.581024560636887 - type: nauc_mrr_at_10_std value: -2.703373098942388 - type: nauc_mrr_at_1_diff1 value: 45.09051009190851 - type: nauc_mrr_at_1_max value: 15.831915244565245 - type: nauc_mrr_at_1_std value: -4.310101948715212 - type: nauc_mrr_at_20_diff1 value: 40.78860474631307 - type: nauc_mrr_at_20_max value: 14.4782017138514 - type: nauc_mrr_at_20_std value: -2.5161572751678998 - type: nauc_mrr_at_3_diff1 value: 41.68191255304641 - type: nauc_mrr_at_3_max value: 15.041970652494102 - type: nauc_mrr_at_3_std value: -2.865017831776156 - type: nauc_mrr_at_5_diff1 value: 40.93732895812152 - type: nauc_mrr_at_5_max value: 14.810999495708327 - type: nauc_mrr_at_5_std value: -2.922166723623921 - type: nauc_ndcg_at_1000_diff1 value: 39.4110066143245 - type: nauc_ndcg_at_1000_max value: 12.821827433441005 - type: nauc_ndcg_at_1000_std value: -0.8108384214632934 - type: nauc_ndcg_at_100_diff1 value: 39.62118270064326 - type: nauc_ndcg_at_100_max value: 12.037720650973109 - type: nauc_ndcg_at_100_std value: -0.9362771831617082 - type: nauc_ndcg_at_10_diff1 value: 40.95447674096302 - type: nauc_ndcg_at_10_max value: 13.154418607273124 - type: nauc_ndcg_at_10_std value: -2.8988540864843886 - type: nauc_ndcg_at_1_diff1 value: 45.09051009190851 - type: nauc_ndcg_at_1_max value: 15.831915244565245 - type: nauc_ndcg_at_1_std value: -4.310101948715212 - type: nauc_ndcg_at_20_diff1 value: 40.63851149738437 - type: nauc_ndcg_at_20_max value: 12.604171957141656 - type: nauc_ndcg_at_20_std value: -2.1910058415334763 - type: nauc_ndcg_at_3_diff1 value: 42.10101502571804 - type: nauc_ndcg_at_3_max value: 14.519710397645364 - type: nauc_ndcg_at_3_std value: -3.1565026643410667 - type: nauc_ndcg_at_5_diff1 value: 40.94273285512494 - type: nauc_ndcg_at_5_max value: 14.054440556480834 - type: nauc_ndcg_at_5_std value: -3.442189925092899 - type: nauc_precision_at_1000_diff1 value: -0.9565223011446182 - type: nauc_precision_at_1000_max value: 11.675006301584128 - type: nauc_precision_at_1000_std value: 8.093690013766537 - type: nauc_precision_at_100_diff1 value: 11.288302809626888 - type: nauc_precision_at_100_max value: 10.960387422561148 - type: nauc_precision_at_100_std value: 8.591223668593777 - type: nauc_precision_at_10_diff1 value: 25.64615042863472 - type: nauc_precision_at_10_max value: 14.069756217267985 - type: nauc_precision_at_10_std value: 0.08978592105584715 - type: nauc_precision_at_1_diff1 value: 45.09051009190851 - type: nauc_precision_at_1_max value: 15.831915244565245 - type: nauc_precision_at_1_std value: -4.310101948715212 - type: nauc_precision_at_20_diff1 value: 22.097468653407866 - type: nauc_precision_at_20_max value: 12.949212539250343 - type: nauc_precision_at_20_std value: 2.868048305908803 - type: nauc_precision_at_3_diff1 value: 33.24608090774321 - type: nauc_precision_at_3_max value: 16.588047560522053 - type: nauc_precision_at_3_std value: -1.2432725324047462 - type: nauc_precision_at_5_diff1 value: 28.89668943912206 - type: nauc_precision_at_5_max value: 16.25456580555215 - type: nauc_precision_at_5_std value: -2.0273998006444134 - type: nauc_recall_at_1000_diff1 value: 24.86548627119768 - type: nauc_recall_at_1000_max value: 10.68002967962002 - type: nauc_recall_at_1000_std value: 8.076769436730153 - type: nauc_recall_at_100_diff1 value: 28.204939299147387 - type: nauc_recall_at_100_max value: 6.159717806964745 - type: nauc_recall_at_100_std value: 6.145682430435217 - type: nauc_recall_at_10_diff1 value: 35.339197660807436 - type: nauc_recall_at_10_max value: 10.955842694171421 - type: nauc_recall_at_10_std value: -2.050234322464136 - type: nauc_recall_at_1_diff1 value: 49.188047470455324 - type: nauc_recall_at_1_max value: 12.622228914711336 - type: nauc_recall_at_1_std value: -5.079814609778495 - type: nauc_recall_at_20_diff1 value: 33.66153319489103 - type: nauc_recall_at_20_max value: 9.045136466332934 - type: nauc_recall_at_20_std value: 0.6362560055945043 - type: nauc_recall_at_3_diff1 value: 39.33078959934067 - type: nauc_recall_at_3_max value: 12.943838756532871 - type: nauc_recall_at_3_std value: -2.617759316161476 - type: nauc_recall_at_5_diff1 value: 36.121619339589245 - type: nauc_recall_at_5_max value: 12.417874949270544 - type: nauc_recall_at_5_std value: -3.091748807456823 - type: ndcg_at_1 value: 20.191 - type: ndcg_at_10 value: 24.201 - type: ndcg_at_100 value: 27.955999999999996 - type: ndcg_at_1000 value: 30.773 - type: ndcg_at_20 value: 25.44 - type: ndcg_at_3 value: 21.806 - type: ndcg_at_5 value: 22.905 - type: precision_at_1 value: 20.191 - type: precision_at_10 value: 4.573 - type: precision_at_100 value: 0.8059999999999999 - type: precision_at_1000 value: 0.13 - type: precision_at_20 value: 2.7449999999999997 - type: precision_at_3 value: 10.679 - type: precision_at_5 value: 7.580000000000001 - type: recall_at_1 value: 15.659 - type: recall_at_10 value: 29.968 - type: recall_at_100 value: 46.98 - type: recall_at_1000 value: 66.286 - type: recall_at_20 value: 34.621 - type: recall_at_3 value: 22.572 - type: recall_at_5 value: 25.787 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval (default) type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: main_score value: 34.102 - type: map_at_1 value: 22.269 - type: map_at_10 value: 29.754 - type: map_at_100 value: 30.692999999999998 - type: map_at_1000 value: 30.786 - type: map_at_20 value: 30.225 - type: map_at_3 value: 27.392 - type: map_at_5 value: 28.831 - type: mrr_at_1 value: 25.956112852664575 - type: mrr_at_10 value: 32.77869831318104 - type: mrr_at_100 value: 33.60378795088834 - type: mrr_at_1000 value: 33.66340064366992 - type: mrr_at_20 value: 33.18375173610909 - type: mrr_at_3 value: 30.647857889237173 - type: mrr_at_5 value: 31.980146290491067 - type: nauc_map_at_1000_diff1 value: 42.023422411516016 - type: nauc_map_at_1000_max value: 24.046890902960552 - type: nauc_map_at_1000_std value: -6.94632372002679 - type: nauc_map_at_100_diff1 value: 42.00488415137851 - type: nauc_map_at_100_max value: 24.029258386148577 - type: nauc_map_at_100_std value: -7.013947866427552 - type: nauc_map_at_10_diff1 value: 42.060086712211344 - type: nauc_map_at_10_max value: 23.998218675756625 - type: nauc_map_at_10_std value: -7.599227449673994 - type: nauc_map_at_1_diff1 value: 45.27837491202271 - type: nauc_map_at_1_max value: 23.873436707472766 - type: nauc_map_at_1_std value: -10.458746042802577 - type: nauc_map_at_20_diff1 value: 41.98597500237269 - type: nauc_map_at_20_max value: 24.07819180945319 - type: nauc_map_at_20_std value: -7.320963413971682 - type: nauc_map_at_3_diff1 value: 42.69809960018882 - type: nauc_map_at_3_max value: 23.63846349891855 - type: nauc_map_at_3_std value: -8.732892056046317 - type: nauc_map_at_5_diff1 value: 42.23446934702989 - type: nauc_map_at_5_max value: 23.905384542219803 - type: nauc_map_at_5_std value: -7.643670989026166 - type: nauc_mrr_at_1000_diff1 value: 42.122071790378016 - type: nauc_mrr_at_1000_max value: 25.86760736591077 - type: nauc_mrr_at_1000_std value: -5.266317827181621 - type: nauc_mrr_at_100_diff1 value: 42.10647973553166 - type: nauc_mrr_at_100_max value: 25.85687545921025 - type: nauc_mrr_at_100_std value: -5.270766368901785 - type: nauc_mrr_at_10_diff1 value: 42.24735092990674 - type: nauc_mrr_at_10_max value: 25.994930434678004 - type: nauc_mrr_at_10_std value: -5.6601281070075355 - type: nauc_mrr_at_1_diff1 value: 46.582933896071864 - type: nauc_mrr_at_1_max value: 27.228911381467753 - type: nauc_mrr_at_1_std value: -8.734962232415343 - type: nauc_mrr_at_20_diff1 value: 42.07873815943869 - type: nauc_mrr_at_20_max value: 25.963756082386645 - type: nauc_mrr_at_20_std value: -5.478617831866867 - type: nauc_mrr_at_3_diff1 value: 42.98246412395152 - type: nauc_mrr_at_3_max value: 26.158635453239686 - type: nauc_mrr_at_3_std value: -6.3931010500997125 - type: nauc_mrr_at_5_diff1 value: 42.43712298159192 - type: nauc_mrr_at_5_max value: 26.20143695371023 - type: nauc_mrr_at_5_std value: -5.622650253873388 - type: nauc_ndcg_at_1000_diff1 value: 40.40682446150754 - type: nauc_ndcg_at_1000_max value: 23.975034312446894 - type: nauc_ndcg_at_1000_std value: -2.645144894917121 - type: nauc_ndcg_at_100_diff1 value: 39.96263062735843 - type: nauc_ndcg_at_100_max value: 23.583706441511858 - type: nauc_ndcg_at_100_std value: -3.3869912444384114 - type: nauc_ndcg_at_10_diff1 value: 40.39533814272208 - type: nauc_ndcg_at_10_max value: 24.293062837455782 - type: nauc_ndcg_at_10_std value: -6.100075124875855 - type: nauc_ndcg_at_1_diff1 value: 46.582933896071864 - type: nauc_ndcg_at_1_max value: 27.228911381467753 - type: nauc_ndcg_at_1_std value: -8.734962232415343 - type: nauc_ndcg_at_20_diff1 value: 39.9687058773172 - type: nauc_ndcg_at_20_max value: 24.316546572139725 - type: nauc_ndcg_at_20_std value: -5.284472590592323 - type: nauc_ndcg_at_3_diff1 value: 41.76544027471963 - type: nauc_ndcg_at_3_max value: 24.275838336051923 - type: nauc_ndcg_at_3_std value: -7.5019513901932715 - type: nauc_ndcg_at_5_diff1 value: 40.90262427804706 - type: nauc_ndcg_at_5_max value: 24.491396294279173 - type: nauc_ndcg_at_5_std value: -6.148208697652546 - type: nauc_precision_at_1000_diff1 value: 8.310979675445102 - type: nauc_precision_at_1000_max value: 10.177503506631384 - type: nauc_precision_at_1000_std value: 27.06496193087599 - type: nauc_precision_at_100_diff1 value: 19.055469058991463 - type: nauc_precision_at_100_max value: 15.143082019798745 - type: nauc_precision_at_100_std value: 17.5613526737176 - type: nauc_precision_at_10_diff1 value: 30.60558520635145 - type: nauc_precision_at_10_max value: 23.899102367494276 - type: nauc_precision_at_10_std value: 1.49034477139435 - type: nauc_precision_at_1_diff1 value: 46.582933896071864 - type: nauc_precision_at_1_max value: 27.228911381467753 - type: nauc_precision_at_1_std value: -8.734962232415343 - type: nauc_precision_at_20_diff1 value: 27.34257473822076 - type: nauc_precision_at_20_max value: 23.166488954967583 - type: nauc_precision_at_20_std value: 5.306163418928192 - type: nauc_precision_at_3_diff1 value: 36.77034283418537 - type: nauc_precision_at_3_max value: 24.9271454504654 - type: nauc_precision_at_3_std value: -3.396946642230245 - type: nauc_precision_at_5_diff1 value: 34.27058913291088 - type: nauc_precision_at_5_max value: 24.976805100785057 - type: nauc_precision_at_5_std value: 0.7181940371896616 - type: nauc_recall_at_1000_diff1 value: 30.723778900213063 - type: nauc_recall_at_1000_max value: 18.638473722404548 - type: nauc_recall_at_1000_std value: 24.489955439065092 - type: nauc_recall_at_100_diff1 value: 29.354618599167313 - type: nauc_recall_at_100_max value: 16.731640777347838 - type: nauc_recall_at_100_std value: 9.835673177366234 - type: nauc_recall_at_10_diff1 value: 33.89120435058154 - type: nauc_recall_at_10_max value: 22.177696671277435 - type: nauc_recall_at_10_std value: -3.7985335869625865 - type: nauc_recall_at_1_diff1 value: 45.27837491202271 - type: nauc_recall_at_1_max value: 23.873436707472766 - type: nauc_recall_at_1_std value: -10.458746042802577 - type: nauc_recall_at_20_diff1 value: 31.924635267680696 - type: nauc_recall_at_20_max value: 22.051909242943513 - type: nauc_recall_at_20_std value: -0.8097713224498396 - type: nauc_recall_at_3_diff1 value: 38.150042036072456 - type: nauc_recall_at_3_max value: 22.400370920900837 - type: nauc_recall_at_3_std value: -6.80660585255143 - type: nauc_recall_at_5_diff1 value: 36.054052572950056 - type: nauc_recall_at_5_max value: 23.311864516504208 - type: nauc_recall_at_5_std value: -3.9369960666204302 - type: ndcg_at_1 value: 25.956000000000003 - type: ndcg_at_10 value: 34.102 - type: ndcg_at_100 value: 38.815 - type: ndcg_at_1000 value: 41.091 - type: ndcg_at_20 value: 35.616 - type: ndcg_at_3 value: 29.757 - type: ndcg_at_5 value: 32.054 - type: precision_at_1 value: 25.956000000000003 - type: precision_at_10 value: 5.5489999999999995 - type: precision_at_100 value: 0.8699999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_20 value: 3.1850000000000005 - type: precision_at_3 value: 13.375 - type: precision_at_5 value: 9.492 - type: recall_at_1 value: 22.269 - type: recall_at_10 value: 44.487 - type: recall_at_100 value: 66.065 - type: recall_at_1000 value: 82.711 - type: recall_at_20 value: 50.002 - type: recall_at_3 value: 32.769999999999996 - type: recall_at_5 value: 38.411 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval (default) type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: main_score value: 15.504999999999999 - type: map_at_1 value: 9.322 - type: map_at_10 value: 13.215 - type: map_at_100 value: 13.995 - type: map_at_1000 value: 14.088999999999999 - type: map_at_20 value: 13.669 - type: map_at_3 value: 11.939 - type: map_at_5 value: 12.809000000000001 - type: mrr_at_1 value: 9.830508474576272 - type: mrr_at_10 value: 14.040713837324004 - type: mrr_at_100 value: 14.834528048636288 - type: mrr_at_1000 value: 14.922257469530024 - type: mrr_at_20 value: 14.506623042541603 - type: mrr_at_3 value: 12.749529190207154 - type: mrr_at_5 value: 13.574387947269303 - type: nauc_map_at_1000_diff1 value: 30.529210655419025 - type: nauc_map_at_1000_max value: 17.46667029242714 - type: nauc_map_at_1000_std value: -14.147892794296949 - type: nauc_map_at_100_diff1 value: 30.556449269344828 - type: nauc_map_at_100_max value: 17.43087617092459 - type: nauc_map_at_100_std value: -14.144527288140976 - type: nauc_map_at_10_diff1 value: 31.239447478533798 - type: nauc_map_at_10_max value: 18.11892096077464 - type: nauc_map_at_10_std value: -14.62039767845138 - type: nauc_map_at_1_diff1 value: 37.4160233030578 - type: nauc_map_at_1_max value: 17.016815986268263 - type: nauc_map_at_1_std value: -17.425864228691612 - type: nauc_map_at_20_diff1 value: 30.719067002092494 - type: nauc_map_at_20_max value: 17.39019156487201 - type: nauc_map_at_20_std value: -14.270868979007783 - type: nauc_map_at_3_diff1 value: 32.92206407639439 - type: nauc_map_at_3_max value: 17.835953557611468 - type: nauc_map_at_3_std value: -16.031495528857608 - type: nauc_map_at_5_diff1 value: 31.584976124274416 - type: nauc_map_at_5_max value: 18.197826240384625 - type: nauc_map_at_5_std value: -15.441753419032448 - type: nauc_mrr_at_1000_diff1 value: 30.046493575807283 - type: nauc_mrr_at_1000_max value: 19.498590306501473 - type: nauc_mrr_at_1000_std value: -13.402207800669682 - type: nauc_mrr_at_100_diff1 value: 30.05195056678437 - type: nauc_mrr_at_100_max value: 19.47592918375251 - type: nauc_mrr_at_100_std value: -13.39845392251263 - type: nauc_mrr_at_10_diff1 value: 30.791850768635605 - type: nauc_mrr_at_10_max value: 20.313444672627128 - type: nauc_mrr_at_10_std value: -13.935914370733792 - type: nauc_mrr_at_1_diff1 value: 37.50338859979288 - type: nauc_mrr_at_1_max value: 19.34649504621331 - type: nauc_mrr_at_1_std value: -17.116234597672054 - type: nauc_mrr_at_20_diff1 value: 30.18891093563501 - type: nauc_mrr_at_20_max value: 19.5511248509084 - type: nauc_mrr_at_20_std value: -13.5427820185682 - type: nauc_mrr_at_3_diff1 value: 32.07301084185587 - type: nauc_mrr_at_3_max value: 20.191966663668733 - type: nauc_mrr_at_3_std value: -15.04405001225193 - type: nauc_mrr_at_5_diff1 value: 31.086216757720575 - type: nauc_mrr_at_5_max value: 20.277903224593523 - type: nauc_mrr_at_5_std value: -14.65307477545357 - type: nauc_ndcg_at_1000_diff1 value: 25.991511686309938 - type: nauc_ndcg_at_1000_max value: 16.945396948437562 - type: nauc_ndcg_at_1000_std value: -11.694443736831037 - type: nauc_ndcg_at_100_diff1 value: 25.980124756057325 - type: nauc_ndcg_at_100_max value: 15.99158676356653 - type: nauc_ndcg_at_100_std value: -11.398279572216548 - type: nauc_ndcg_at_10_diff1 value: 28.892093125361416 - type: nauc_ndcg_at_10_max value: 18.71513717736543 - type: nauc_ndcg_at_10_std value: -12.779856403033296 - type: nauc_ndcg_at_1_diff1 value: 37.50338859979288 - type: nauc_ndcg_at_1_max value: 19.34649504621331 - type: nauc_ndcg_at_1_std value: -17.116234597672054 - type: nauc_ndcg_at_20_diff1 value: 27.25547422800403 - type: nauc_ndcg_at_20_max value: 16.331067486313643 - type: nauc_ndcg_at_20_std value: -11.8817415790308 - type: nauc_ndcg_at_3_diff1 value: 31.29985811872621 - type: nauc_ndcg_at_3_max value: 18.454751997552098 - type: nauc_ndcg_at_3_std value: -15.465471013016707 - type: nauc_ndcg_at_5_diff1 value: 29.493811341594938 - type: nauc_ndcg_at_5_max value: 18.707691943439258 - type: nauc_ndcg_at_5_std value: -14.433807530159697 - type: nauc_precision_at_1000_diff1 value: 3.2218446674363315 - type: nauc_precision_at_1000_max value: 17.12404764623586 - type: nauc_precision_at_1000_std value: -2.3264575552583064 - type: nauc_precision_at_100_diff1 value: 11.214321152283215 - type: nauc_precision_at_100_max value: 13.446815680526637 - type: nauc_precision_at_100_std value: -4.987683964997388 - type: nauc_precision_at_10_diff1 value: 21.60303059518948 - type: nauc_precision_at_10_max value: 21.596284543805293 - type: nauc_precision_at_10_std value: -8.082031256092737 - type: nauc_precision_at_1_diff1 value: 37.50338859979288 - type: nauc_precision_at_1_max value: 19.34649504621331 - type: nauc_precision_at_1_std value: -17.116234597672054 - type: nauc_precision_at_20_diff1 value: 17.14261533352261 - type: nauc_precision_at_20_max value: 15.233652532515793 - type: nauc_precision_at_20_std value: -6.647366892331011 - type: nauc_precision_at_3_diff1 value: 27.367720137858804 - type: nauc_precision_at_3_max value: 21.090504124786534 - type: nauc_precision_at_3_std value: -13.306298994563972 - type: nauc_precision_at_5_diff1 value: 23.025359281940467 - type: nauc_precision_at_5_max value: 21.211267803594005 - type: nauc_precision_at_5_std value: -11.168476315146755 - type: nauc_recall_at_1000_diff1 value: 14.071492953686532 - type: nauc_recall_at_1000_max value: 13.218836154700018 - type: nauc_recall_at_1000_std value: -7.084593581668587 - type: nauc_recall_at_100_diff1 value: 15.116097572344975 - type: nauc_recall_at_100_max value: 9.307648936748858 - type: nauc_recall_at_100_std value: -5.895474365275694 - type: nauc_recall_at_10_diff1 value: 23.78022904015662 - type: nauc_recall_at_10_max value: 17.85241980538583 - type: nauc_recall_at_10_std value: -8.86829209909465 - type: nauc_recall_at_1_diff1 value: 37.4160233030578 - type: nauc_recall_at_1_max value: 17.016815986268263 - type: nauc_recall_at_1_std value: -17.425864228691612 - type: nauc_recall_at_20_diff1 value: 19.986562587692415 - type: nauc_recall_at_20_max value: 11.164823338332948 - type: nauc_recall_at_20_std value: -6.930345717632131 - type: nauc_recall_at_3_diff1 value: 28.54476711344997 - type: nauc_recall_at_3_max value: 17.72982315112114 - type: nauc_recall_at_3_std value: -14.767629842747809 - type: nauc_recall_at_5_diff1 value: 25.47233991251944 - type: nauc_recall_at_5_max value: 17.87587712457761 - type: nauc_recall_at_5_std value: -12.93959613517254 - type: ndcg_at_1 value: 9.831 - type: ndcg_at_10 value: 15.504999999999999 - type: ndcg_at_100 value: 19.721 - type: ndcg_at_1000 value: 22.746 - type: ndcg_at_20 value: 17.177 - type: ndcg_at_3 value: 13.020999999999999 - type: ndcg_at_5 value: 14.517 - type: precision_at_1 value: 9.831 - type: precision_at_10 value: 2.475 - type: precision_at_100 value: 0.484 - type: precision_at_1000 value: 0.079 - type: precision_at_20 value: 1.6099999999999999 - type: precision_at_3 value: 5.612 - type: precision_at_5 value: 4.202999999999999 - type: recall_at_1 value: 9.322 - type: recall_at_10 value: 21.706 - type: recall_at_100 value: 41.837 - type: recall_at_1000 value: 65.78500000000001 - type: recall_at_20 value: 28.173 - type: recall_at_3 value: 15.167 - type: recall_at_5 value: 18.765 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval (default) type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: main_score value: 10.506 - type: map_at_1 value: 5.097 - type: map_at_10 value: 8.123 - type: map_at_100 value: 8.81 - type: map_at_1000 value: 8.921999999999999 - type: map_at_20 value: 8.445 - type: map_at_3 value: 7.058000000000001 - type: map_at_5 value: 7.509 - type: mrr_at_1 value: 6.343283582089552 - type: mrr_at_10 value: 9.93554055121219 - type: mrr_at_100 value: 10.666175665806625 - type: mrr_at_1000 value: 10.766018190354108 - type: mrr_at_20 value: 10.29873635991652 - type: mrr_at_3 value: 8.706467661691544 - type: mrr_at_5 value: 9.20398009950249 - type: nauc_map_at_1000_diff1 value: 16.71617773929961 - type: nauc_map_at_1000_max value: 12.836522190717654 - type: nauc_map_at_1000_std value: -1.5931324340434574 - type: nauc_map_at_100_diff1 value: 16.708450779332075 - type: nauc_map_at_100_max value: 12.864987872701173 - type: nauc_map_at_100_std value: -1.7749974459670648 - type: nauc_map_at_10_diff1 value: 16.889847999156434 - type: nauc_map_at_10_max value: 13.821580163360652 - type: nauc_map_at_10_std value: -1.5413513436151478 - type: nauc_map_at_1_diff1 value: 27.620803823439566 - type: nauc_map_at_1_max value: 9.946991002221708 - type: nauc_map_at_1_std value: -2.4262680356943087 - type: nauc_map_at_20_diff1 value: 16.674045845919565 - type: nauc_map_at_20_max value: 13.011303701592054 - type: nauc_map_at_20_std value: -1.6544743278320506 - type: nauc_map_at_3_diff1 value: 17.421817131869087 - type: nauc_map_at_3_max value: 13.332677540146523 - type: nauc_map_at_3_std value: -3.3965199354497257 - type: nauc_map_at_5_diff1 value: 16.472303139269965 - type: nauc_map_at_5_max value: 12.957712628879412 - type: nauc_map_at_5_std value: -2.5301777339577662 - type: nauc_mrr_at_1000_diff1 value: 16.593077375065533 - type: nauc_mrr_at_1000_max value: 15.24914299560567 - type: nauc_mrr_at_1000_std value: -0.8209741469268466 - type: nauc_mrr_at_100_diff1 value: 16.526988857467 - type: nauc_mrr_at_100_max value: 15.282403514335552 - type: nauc_mrr_at_100_std value: -0.9336495531936128 - type: nauc_mrr_at_10_diff1 value: 17.112649116787765 - type: nauc_mrr_at_10_max value: 15.998287559296745 - type: nauc_mrr_at_10_std value: -0.4972479310956255 - type: nauc_mrr_at_1_diff1 value: 26.390769480452008 - type: nauc_mrr_at_1_max value: 12.666086436004754 - type: nauc_mrr_at_1_std value: -0.8290506693110757 - type: nauc_mrr_at_20_diff1 value: 16.570263118716873 - type: nauc_mrr_at_20_max value: 15.41609638468375 - type: nauc_mrr_at_20_std value: -0.7638194854818602 - type: nauc_mrr_at_3_diff1 value: 17.337541518148672 - type: nauc_mrr_at_3_max value: 16.054253099766 - type: nauc_mrr_at_3_std value: -2.4609668986558098 - type: nauc_mrr_at_5_diff1 value: 16.631292764650276 - type: nauc_mrr_at_5_max value: 15.260426520080111 - type: nauc_mrr_at_5_std value: -1.7287159836379042 - type: nauc_ndcg_at_1000_diff1 value: 14.52634093171492 - type: nauc_ndcg_at_1000_max value: 12.171421632471498 - type: nauc_ndcg_at_1000_std value: 2.162794094660827 - type: nauc_ndcg_at_100_diff1 value: 13.83799208322184 - type: nauc_ndcg_at_100_max value: 12.724714757328384 - type: nauc_ndcg_at_100_std value: -0.6192472371565176 - type: nauc_ndcg_at_10_diff1 value: 14.905057185135432 - type: nauc_ndcg_at_10_max value: 15.671185607261256 - type: nauc_ndcg_at_10_std value: 0.4794457018671312 - type: nauc_ndcg_at_1_diff1 value: 26.390769480452008 - type: nauc_ndcg_at_1_max value: 12.666086436004754 - type: nauc_ndcg_at_1_std value: -0.8290506693110757 - type: nauc_ndcg_at_20_diff1 value: 14.177586694378425 - type: nauc_ndcg_at_20_max value: 13.309923186895894 - type: nauc_ndcg_at_20_std value: 0.08485334685153047 - type: nauc_ndcg_at_3_diff1 value: 14.464832485633236 - type: nauc_ndcg_at_3_max value: 15.376082832680266 - type: nauc_ndcg_at_3_std value: -3.4289150318270947 - type: nauc_ndcg_at_5_diff1 value: 13.479314775515663 - type: nauc_ndcg_at_5_max value: 14.170795142756146 - type: nauc_ndcg_at_5_std value: -1.8374279611217414 - type: nauc_precision_at_1000_diff1 value: 5.5461386139543984 - type: nauc_precision_at_1000_max value: 8.173550020362248 - type: nauc_precision_at_1000_std value: 4.711143690664535 - type: nauc_precision_at_100_diff1 value: 5.8834541815278945 - type: nauc_precision_at_100_max value: 11.091665205495271 - type: nauc_precision_at_100_std value: -2.7393617901866123 - type: nauc_precision_at_10_diff1 value: 10.751011614623913 - type: nauc_precision_at_10_max value: 17.777588721031616 - type: nauc_precision_at_10_std value: 3.707970494956657 - type: nauc_precision_at_1_diff1 value: 26.390769480452008 - type: nauc_precision_at_1_max value: 12.666086436004754 - type: nauc_precision_at_1_std value: -0.8290506693110757 - type: nauc_precision_at_20_diff1 value: 8.974996734936457 - type: nauc_precision_at_20_max value: 12.402565300274947 - type: nauc_precision_at_20_std value: 0.937988804595429 - type: nauc_precision_at_3_diff1 value: 8.383569631006118 - type: nauc_precision_at_3_max value: 18.173716740568526 - type: nauc_precision_at_3_std value: -3.3910150432001407 - type: nauc_precision_at_5_diff1 value: 6.544996015375691 - type: nauc_precision_at_5_max value: 16.558965673469203 - type: nauc_precision_at_5_std value: -0.21836542541876836 - type: nauc_recall_at_1000_diff1 value: 12.181437110921324 - type: nauc_recall_at_1000_max value: 6.492207340417336 - type: nauc_recall_at_1000_std value: 12.361749723553077 - type: nauc_recall_at_100_diff1 value: 9.509974939632396 - type: nauc_recall_at_100_max value: 9.708468343147787 - type: nauc_recall_at_100_std value: 0.8884655075061652 - type: nauc_recall_at_10_diff1 value: 11.525566414075342 - type: nauc_recall_at_10_max value: 17.809901918053168 - type: nauc_recall_at_10_std value: 3.6346449630612487 - type: nauc_recall_at_1_diff1 value: 27.620803823439566 - type: nauc_recall_at_1_max value: 9.946991002221708 - type: nauc_recall_at_1_std value: -2.4262680356943087 - type: nauc_recall_at_20_diff1 value: 10.29372538890332 - type: nauc_recall_at_20_max value: 11.376497210412108 - type: nauc_recall_at_20_std value: 2.7197830344340495 - type: nauc_recall_at_3_diff1 value: 8.828406903593496 - type: nauc_recall_at_3_max value: 16.884533927784844 - type: nauc_recall_at_3_std value: -2.9509441866607156 - type: nauc_recall_at_5_diff1 value: 7.436661727727917 - type: nauc_recall_at_5_max value: 14.355008485341797 - type: nauc_recall_at_5_std value: -0.38874284690465266 - type: ndcg_at_1 value: 6.343 - type: ndcg_at_10 value: 10.506 - type: ndcg_at_100 value: 14.41 - type: ndcg_at_1000 value: 17.698 - type: ndcg_at_20 value: 11.73 - type: ndcg_at_3 value: 8.257 - type: ndcg_at_5 value: 8.996 - type: precision_at_1 value: 6.343 - type: precision_at_10 value: 2.114 - type: precision_at_100 value: 0.48 - type: precision_at_1000 value: 0.08800000000000001 - type: precision_at_20 value: 1.393 - type: precision_at_3 value: 4.146 - type: precision_at_5 value: 3.0349999999999997 - type: recall_at_1 value: 5.097 - type: recall_at_10 value: 16.204 - type: recall_at_100 value: 34.223 - type: recall_at_1000 value: 58.553999999999995 - type: recall_at_20 value: 20.76 - type: recall_at_3 value: 9.809 - type: recall_at_5 value: 11.623 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval (default) type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: main_score value: 22.942999999999998 - type: map_at_1 value: 14.446 - type: map_at_10 value: 19.377 - type: map_at_100 value: 20.482 - type: map_at_1000 value: 20.626 - type: map_at_20 value: 19.991 - type: map_at_3 value: 17.714 - type: map_at_5 value: 18.665000000000003 - type: mrr_at_1 value: 17.613089509143407 - type: mrr_at_10 value: 23.370533327222446 - type: mrr_at_100 value: 24.317061726895137 - type: mrr_at_1000 value: 24.406259672604996 - type: mrr_at_20 value: 23.908661597522798 - type: mrr_at_3 value: 21.607314725697798 - type: mrr_at_5 value: 22.632338787295485 - type: nauc_map_at_1000_diff1 value: 34.036197058710755 - type: nauc_map_at_1000_max value: 22.301224424803703 - type: nauc_map_at_1000_std value: -2.9723475399352406 - type: nauc_map_at_100_diff1 value: 34.0267334259839 - type: nauc_map_at_100_max value: 22.263450935087985 - type: nauc_map_at_100_std value: -3.0314992417234246 - type: nauc_map_at_10_diff1 value: 34.242902742320005 - type: nauc_map_at_10_max value: 21.903826727642166 - type: nauc_map_at_10_std value: -3.542446159080337 - type: nauc_map_at_1_diff1 value: 42.19086537913616 - type: nauc_map_at_1_max value: 25.7185835567139 - type: nauc_map_at_1_std value: -5.103024290179066 - type: nauc_map_at_20_diff1 value: 34.125155980117164 - type: nauc_map_at_20_max value: 22.087294494234595 - type: nauc_map_at_20_std value: -3.36424948880508 - type: nauc_map_at_3_diff1 value: 35.64286121660639 - type: nauc_map_at_3_max value: 22.590710495131557 - type: nauc_map_at_3_std value: -4.624441576366177 - type: nauc_map_at_5_diff1 value: 34.14898021930562 - type: nauc_map_at_5_max value: 22.026354986165444 - type: nauc_map_at_5_std value: -3.783104198258874 - type: nauc_mrr_at_1000_diff1 value: 32.070052005179754 - type: nauc_mrr_at_1000_max value: 25.5635144887676 - type: nauc_mrr_at_1000_std value: -2.5922525361206037 - type: nauc_mrr_at_100_diff1 value: 32.02165293253879 - type: nauc_mrr_at_100_max value: 25.569836435013784 - type: nauc_mrr_at_100_std value: -2.598052553655546 - type: nauc_mrr_at_10_diff1 value: 32.11316242036246 - type: nauc_mrr_at_10_max value: 25.54775740017834 - type: nauc_mrr_at_10_std value: -2.9438839044554044 - type: nauc_mrr_at_1_diff1 value: 40.40685592638284 - type: nauc_mrr_at_1_max value: 30.0134595827404 - type: nauc_mrr_at_1_std value: -3.9985970334007477 - type: nauc_mrr_at_20_diff1 value: 32.120466540461 - type: nauc_mrr_at_20_max value: 25.549273895185305 - type: nauc_mrr_at_20_std value: -2.6763999823702553 - type: nauc_mrr_at_3_diff1 value: 33.66614272732434 - type: nauc_mrr_at_3_max value: 26.430879923148343 - type: nauc_mrr_at_3_std value: -4.0205614730618215 - type: nauc_mrr_at_5_diff1 value: 32.166578904190416 - type: nauc_mrr_at_5_max value: 25.776645936774095 - type: nauc_mrr_at_5_std value: -3.302080351323094 - type: nauc_ndcg_at_1000_diff1 value: 30.266233773630375 - type: nauc_ndcg_at_1000_max value: 22.08745825058941 - type: nauc_ndcg_at_1000_std value: 0.7729160122149865 - type: nauc_ndcg_at_100_diff1 value: 29.84343294166904 - type: nauc_ndcg_at_100_max value: 21.578448258757316 - type: nauc_ndcg_at_100_std value: 0.11264370081458419 - type: nauc_ndcg_at_10_diff1 value: 31.11895748690149 - type: nauc_ndcg_at_10_max value: 20.84767764918772 - type: nauc_ndcg_at_10_std value: -2.325520203137333 - type: nauc_ndcg_at_1_diff1 value: 40.40685592638284 - type: nauc_ndcg_at_1_max value: 30.0134595827404 - type: nauc_ndcg_at_1_std value: -3.9985970334007477 - type: nauc_ndcg_at_20_diff1 value: 30.76844239689582 - type: nauc_ndcg_at_20_max value: 21.158453354191884 - type: nauc_ndcg_at_20_std value: -1.6168879431876966 - type: nauc_ndcg_at_3_diff1 value: 33.47831071332028 - type: nauc_ndcg_at_3_max value: 23.430301462229234 - type: nauc_ndcg_at_3_std value: -4.236230987770694 - type: nauc_ndcg_at_5_diff1 value: 31.118155990902537 - type: nauc_ndcg_at_5_max value: 21.836987185909415 - type: nauc_ndcg_at_5_std value: -2.9140434980631045 - type: nauc_precision_at_1000_diff1 value: 0.9998952314883321 - type: nauc_precision_at_1000_max value: 15.224526827087908 - type: nauc_precision_at_1000_std value: 12.857731911721679 - type: nauc_precision_at_100_diff1 value: 9.206315178802491 - type: nauc_precision_at_100_max value: 23.2931840220031 - type: nauc_precision_at_100_std value: 10.762622088086484 - type: nauc_precision_at_10_diff1 value: 21.76866798095069 - type: nauc_precision_at_10_max value: 22.882457871450608 - type: nauc_precision_at_10_std value: 1.4688800239255935 - type: nauc_precision_at_1_diff1 value: 40.40685592638284 - type: nauc_precision_at_1_max value: 30.0134595827404 - type: nauc_precision_at_1_std value: -3.9985970334007477 - type: nauc_precision_at_20_diff1 value: 18.273394428921403 - type: nauc_precision_at_20_max value: 24.006501989084022 - type: nauc_precision_at_20_std value: 3.992091565975308 - type: nauc_precision_at_3_diff1 value: 27.442581369093507 - type: nauc_precision_at_3_max value: 24.691098910221115 - type: nauc_precision_at_3_std value: -2.5539232493084634 - type: nauc_precision_at_5_diff1 value: 22.309274572791644 - type: nauc_precision_at_5_max value: 23.275965057073243 - type: nauc_precision_at_5_std value: -0.30106646052885566 - type: nauc_recall_at_1000_diff1 value: 15.627777661804606 - type: nauc_recall_at_1000_max value: 12.338415154004217 - type: nauc_recall_at_1000_std value: 19.86929715112502 - type: nauc_recall_at_100_diff1 value: 16.96732400913716 - type: nauc_recall_at_100_max value: 12.701326286720368 - type: nauc_recall_at_100_std value: 9.758216731399271 - type: nauc_recall_at_10_diff1 value: 23.87551744396225 - type: nauc_recall_at_10_max value: 14.166646301822277 - type: nauc_recall_at_10_std value: 0.1988619766549251 - type: nauc_recall_at_1_diff1 value: 42.19086537913616 - type: nauc_recall_at_1_max value: 25.7185835567139 - type: nauc_recall_at_1_std value: -5.103024290179066 - type: nauc_recall_at_20_diff1 value: 22.82940257737179 - type: nauc_recall_at_20_max value: 14.380915615760875 - type: nauc_recall_at_20_std value: 2.254636975248318 - type: nauc_recall_at_3_diff1 value: 28.766021778938168 - type: nauc_recall_at_3_max value: 17.976609326976067 - type: nauc_recall_at_3_std value: -3.702494785254991 - type: nauc_recall_at_5_diff1 value: 23.908633564651726 - type: nauc_recall_at_5_max value: 15.914031250219566 - type: nauc_recall_at_5_std value: -1.1174655358936727 - type: ndcg_at_1 value: 17.613 - type: ndcg_at_10 value: 22.942999999999998 - type: ndcg_at_100 value: 28.433999999999997 - type: ndcg_at_1000 value: 31.757 - type: ndcg_at_20 value: 24.98 - type: ndcg_at_3 value: 20.048 - type: ndcg_at_5 value: 21.477 - type: precision_at_1 value: 17.613 - type: precision_at_10 value: 4.196 - type: precision_at_100 value: 0.857 - type: precision_at_1000 value: 0.134 - type: precision_at_20 value: 2.738 - type: precision_at_3 value: 9.4 - type: precision_at_5 value: 6.795 - type: recall_at_1 value: 14.446 - type: recall_at_10 value: 29.834 - type: recall_at_100 value: 54.201 - type: recall_at_1000 value: 77.404 - type: recall_at_20 value: 37.076 - type: recall_at_3 value: 21.634 - type: recall_at_5 value: 25.354 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval (default) type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: main_score value: 16.134999999999998 - type: map_at_1 value: 9.081 - type: map_at_10 value: 13.055 - type: map_at_100 value: 13.983 - type: map_at_1000 value: 14.121 - type: map_at_20 value: 13.572999999999999 - type: map_at_3 value: 11.356 - type: map_at_5 value: 12.374 - type: mrr_at_1 value: 11.643835616438356 - type: mrr_at_10 value: 16.14947995941146 - type: mrr_at_100 value: 16.997282843006918 - type: mrr_at_1000 value: 17.102618713993394 - type: mrr_at_20 value: 16.618159190946415 - type: mrr_at_3 value: 14.193302891933024 - type: mrr_at_5 value: 15.431887366818875 - type: nauc_map_at_1000_diff1 value: 32.88432231391895 - type: nauc_map_at_1000_max value: 21.28374560046097 - type: nauc_map_at_1000_std value: 0.7870656250135109 - type: nauc_map_at_100_diff1 value: 32.88983126213636 - type: nauc_map_at_100_max value: 21.22458501691853 - type: nauc_map_at_100_std value: 0.7745608524382627 - type: nauc_map_at_10_diff1 value: 33.3173574829305 - type: nauc_map_at_10_max value: 21.367334815121904 - type: nauc_map_at_10_std value: -0.2609532870073169 - type: nauc_map_at_1_diff1 value: 41.49865569300206 - type: nauc_map_at_1_max value: 23.806102705106763 - type: nauc_map_at_1_std value: -1.0768247293103315 - type: nauc_map_at_20_diff1 value: 32.88467310993085 - type: nauc_map_at_20_max value: 21.327573738016785 - type: nauc_map_at_20_std value: 0.316045501648052 - type: nauc_map_at_3_diff1 value: 35.81885176476419 - type: nauc_map_at_3_max value: 22.563058822026658 - type: nauc_map_at_3_std value: -0.8297325146016894 - type: nauc_map_at_5_diff1 value: 33.77483401790263 - type: nauc_map_at_5_max value: 22.13376627990081 - type: nauc_map_at_5_std value: -1.298858688329888 - type: nauc_mrr_at_1000_diff1 value: 30.18097667250221 - type: nauc_mrr_at_1000_max value: 23.047341870142613 - type: nauc_mrr_at_1000_std value: -0.7406764235969188 - type: nauc_mrr_at_100_diff1 value: 30.137374263969996 - type: nauc_mrr_at_100_max value: 23.00586275774131 - type: nauc_mrr_at_100_std value: -0.7248089045016322 - type: nauc_mrr_at_10_diff1 value: 30.5170004176012 - type: nauc_mrr_at_10_max value: 23.164562505110673 - type: nauc_mrr_at_10_std value: -1.337649573306133 - type: nauc_mrr_at_1_diff1 value: 37.46155722071317 - type: nauc_mrr_at_1_max value: 25.00725832122006 - type: nauc_mrr_at_1_std value: -1.0496408564552728 - type: nauc_mrr_at_20_diff1 value: 30.072298950513115 - type: nauc_mrr_at_20_max value: 23.12382481107441 - type: nauc_mrr_at_20_std value: -1.0529732263666112 - type: nauc_mrr_at_3_diff1 value: 33.48101600704272 - type: nauc_mrr_at_3_max value: 24.320755907805154 - type: nauc_mrr_at_3_std value: -1.1307908969215423 - type: nauc_mrr_at_5_diff1 value: 31.18888034831575 - type: nauc_mrr_at_5_max value: 24.06227117989202 - type: nauc_mrr_at_5_std value: -1.6797432122873692 - type: nauc_ndcg_at_1000_diff1 value: 28.432037664372412 - type: nauc_ndcg_at_1000_max value: 20.70102200502625 - type: nauc_ndcg_at_1000_std value: 4.336326682724843 - type: nauc_ndcg_at_100_diff1 value: 28.34454571794967 - type: nauc_ndcg_at_100_max value: 19.24223569564877 - type: nauc_ndcg_at_100_std value: 4.362280599906417 - type: nauc_ndcg_at_10_diff1 value: 29.501926407603296 - type: nauc_ndcg_at_10_max value: 20.201609309464548 - type: nauc_ndcg_at_10_std value: 0.24089058436514194 - type: nauc_ndcg_at_1_diff1 value: 37.46155722071317 - type: nauc_ndcg_at_1_max value: 25.00725832122006 - type: nauc_ndcg_at_1_std value: -1.0496408564552728 - type: nauc_ndcg_at_20_diff1 value: 28.16170312615381 - type: nauc_ndcg_at_20_max value: 19.972996583494282 - type: nauc_ndcg_at_20_std value: 1.7952491904498078 - type: nauc_ndcg_at_3_diff1 value: 33.81225087762225 - type: nauc_ndcg_at_3_max value: 22.806027738516985 - type: nauc_ndcg_at_3_std value: -0.3936571571120077 - type: nauc_ndcg_at_5_diff1 value: 30.443042638323213 - type: nauc_ndcg_at_5_max value: 22.16102145420267 - type: nauc_ndcg_at_5_std value: -1.406251026694119 - type: nauc_precision_at_1000_diff1 value: 0.4741273357484423 - type: nauc_precision_at_1000_max value: 11.280228116288542 - type: nauc_precision_at_1000_std value: 2.6901820584724363 - type: nauc_precision_at_100_diff1 value: 12.332309998132743 - type: nauc_precision_at_100_max value: 13.961289532548982 - type: nauc_precision_at_100_std value: 11.085111649559586 - type: nauc_precision_at_10_diff1 value: 19.283822581631675 - type: nauc_precision_at_10_max value: 18.7473146500872 - type: nauc_precision_at_10_std value: 0.35093524054436415 - type: nauc_precision_at_1_diff1 value: 37.46155722071317 - type: nauc_precision_at_1_max value: 25.00725832122006 - type: nauc_precision_at_1_std value: -1.0496408564552728 - type: nauc_precision_at_20_diff1 value: 16.254451730745757 - type: nauc_precision_at_20_max value: 17.364228546817166 - type: nauc_precision_at_20_std value: 5.773553761500332 - type: nauc_precision_at_3_diff1 value: 28.259681463765514 - type: nauc_precision_at_3_max value: 22.873732037017984 - type: nauc_precision_at_3_std value: -0.8527795522416294 - type: nauc_precision_at_5_diff1 value: 21.21485623284622 - type: nauc_precision_at_5_max value: 23.001097117924832 - type: nauc_precision_at_5_std value: -3.108687061513337 - type: nauc_recall_at_1000_diff1 value: 18.73323624525636 - type: nauc_recall_at_1000_max value: 18.04287551295194 - type: nauc_recall_at_1000_std value: 17.786418942777992 - type: nauc_recall_at_100_diff1 value: 19.919117945258254 - type: nauc_recall_at_100_max value: 11.16087760657872 - type: nauc_recall_at_100_std value: 14.566488048537535 - type: nauc_recall_at_10_diff1 value: 22.078090142518782 - type: nauc_recall_at_10_max value: 14.941344831772128 - type: nauc_recall_at_10_std value: 2.4737147250843186 - type: nauc_recall_at_1_diff1 value: 41.49865569300206 - type: nauc_recall_at_1_max value: 23.806102705106763 - type: nauc_recall_at_1_std value: -1.0768247293103315 - type: nauc_recall_at_20_diff1 value: 18.725160188681265 - type: nauc_recall_at_20_max value: 14.46073981800366 - type: nauc_recall_at_20_std value: 6.133778343325667 - type: nauc_recall_at_3_diff1 value: 30.297623023451 - type: nauc_recall_at_3_max value: 20.126404905370183 - type: nauc_recall_at_3_std value: -0.03367599947778304 - type: nauc_recall_at_5_diff1 value: 24.35960314497861 - type: nauc_recall_at_5_max value: 19.26030564870987 - type: nauc_recall_at_5_std value: -1.3740373839056597 - type: ndcg_at_1 value: 11.644 - type: ndcg_at_10 value: 16.134999999999998 - type: ndcg_at_100 value: 20.696 - type: ndcg_at_1000 value: 24.43 - type: ndcg_at_20 value: 17.861 - type: ndcg_at_3 value: 12.842999999999998 - type: ndcg_at_5 value: 14.618 - type: precision_at_1 value: 11.644 - type: precision_at_10 value: 3.116 - type: precision_at_100 value: 0.6459999999999999 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_20 value: 2.032 - type: precision_at_3 value: 6.012 - type: precision_at_5 value: 4.84 - type: recall_at_1 value: 9.081 - type: recall_at_10 value: 22.554 - type: recall_at_100 value: 42.531 - type: recall_at_1000 value: 69.706 - type: recall_at_20 value: 28.743999999999996 - type: recall_at_3 value: 13.977 - type: recall_at_5 value: 18.169 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval (default) type: CQADupstackRetrieval_is_a_combined_dataset config: default split: test revision: CQADupstackRetrieval_is_a_combined_dataset metrics: - type: main_score value: 19.09025 - type: ndcg_at_10 value: 19.09025 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval (default) type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: main_score value: 14.333000000000002 - type: map_at_1 value: 8.547 - type: map_at_10 value: 11.93 - type: map_at_100 value: 12.684000000000001 - type: map_at_1000 value: 12.78 - type: map_at_20 value: 12.337 - type: map_at_3 value: 10.588000000000001 - type: map_at_5 value: 11.323 - type: mrr_at_1 value: 10.122699386503067 - type: mrr_at_10 value: 13.70039682539683 - type: mrr_at_100 value: 14.482783387597786 - type: mrr_at_1000 value: 14.570668290032126 - type: mrr_at_20 value: 14.13394542086446 - type: mrr_at_3 value: 12.295501022494888 - type: mrr_at_5 value: 13.02402862985685 - type: nauc_map_at_1000_diff1 value: 29.11861352274418 - type: nauc_map_at_1000_max value: 22.56892542189849 - type: nauc_map_at_1000_std value: -4.54004259281444 - type: nauc_map_at_100_diff1 value: 29.19511912412574 - type: nauc_map_at_100_max value: 22.603999738779653 - type: nauc_map_at_100_std value: -4.52144665211894 - type: nauc_map_at_10_diff1 value: 29.29630212567994 - type: nauc_map_at_10_max value: 23.196908629656825 - type: nauc_map_at_10_std value: -5.360014721454885 - type: nauc_map_at_1_diff1 value: 35.230641193187914 - type: nauc_map_at_1_max value: 24.41472808203692 - type: nauc_map_at_1_std value: -5.821919201147339 - type: nauc_map_at_20_diff1 value: 29.12584787266228 - type: nauc_map_at_20_max value: 22.709732890112623 - type: nauc_map_at_20_std value: -4.817143968480547 - type: nauc_map_at_3_diff1 value: 31.57056394160851 - type: nauc_map_at_3_max value: 25.825155522604348 - type: nauc_map_at_3_std value: -6.542610906472262 - type: nauc_map_at_5_diff1 value: 29.231256950912947 - type: nauc_map_at_5_max value: 24.156600281291105 - type: nauc_map_at_5_std value: -5.997363843824488 - type: nauc_mrr_at_1000_diff1 value: 28.820090702123135 - type: nauc_mrr_at_1000_max value: 24.263679220309907 - type: nauc_mrr_at_1000_std value: -2.0607371843303057 - type: nauc_mrr_at_100_diff1 value: 28.847376535658203 - type: nauc_mrr_at_100_max value: 24.272497169069386 - type: nauc_mrr_at_100_std value: -2.03120306488999 - type: nauc_mrr_at_10_diff1 value: 28.911942194319707 - type: nauc_mrr_at_10_max value: 25.035362298602738 - type: nauc_mrr_at_10_std value: -2.5392409774079616 - type: nauc_mrr_at_1_diff1 value: 34.94101066582577 - type: nauc_mrr_at_1_max value: 26.610522376067564 - type: nauc_mrr_at_1_std value: -2.6534637926597697 - type: nauc_mrr_at_20_diff1 value: 28.743299849543636 - type: nauc_mrr_at_20_max value: 24.32003719178884 - type: nauc_mrr_at_20_std value: -2.3279080552115117 - type: nauc_mrr_at_3_diff1 value: 31.04805054489257 - type: nauc_mrr_at_3_max value: 27.616725290738337 - type: nauc_mrr_at_3_std value: -2.9076820433664667 - type: nauc_mrr_at_5_diff1 value: 28.865005001724242 - type: nauc_mrr_at_5_max value: 26.103439275448775 - type: nauc_mrr_at_5_std value: -3.0680396311703184 - type: nauc_ndcg_at_1000_diff1 value: 25.539587234316798 - type: nauc_ndcg_at_1000_max value: 18.820788497321356 - type: nauc_ndcg_at_1000_std value: -1.9960462357498938 - type: nauc_ndcg_at_100_diff1 value: 27.14578048184198 - type: nauc_ndcg_at_100_max value: 19.4851567283788 - type: nauc_ndcg_at_100_std value: -1.3749625199938715 - type: nauc_ndcg_at_10_diff1 value: 26.87364502097816 - type: nauc_ndcg_at_10_max value: 21.485890355501102 - type: nauc_ndcg_at_10_std value: -3.894125413998458 - type: nauc_ndcg_at_1_diff1 value: 34.94101066582577 - type: nauc_ndcg_at_1_max value: 26.610522376067564 - type: nauc_ndcg_at_1_std value: -2.6534637926597697 - type: nauc_ndcg_at_20_diff1 value: 26.341280976454417 - type: nauc_ndcg_at_20_max value: 19.721515866258724 - type: nauc_ndcg_at_20_std value: -2.9319224524709053 - type: nauc_ndcg_at_3_diff1 value: 30.74558316757148 - type: nauc_ndcg_at_3_max value: 26.40338736609146 - type: nauc_ndcg_at_3_std value: -5.561920759375321 - type: nauc_ndcg_at_5_diff1 value: 26.881257783685893 - type: nauc_ndcg_at_5_max value: 23.650417322561335 - type: nauc_ndcg_at_5_std value: -5.175161111887432 - type: nauc_precision_at_1000_diff1 value: 10.873007976618808 - type: nauc_precision_at_1000_max value: 12.030734880352934 - type: nauc_precision_at_1000_std value: 6.381355734825803 - type: nauc_precision_at_100_diff1 value: 22.401720874248873 - type: nauc_precision_at_100_max value: 16.830307472250432 - type: nauc_precision_at_100_std value: 8.759369364769308 - type: nauc_precision_at_10_diff1 value: 22.232090186151392 - type: nauc_precision_at_10_max value: 21.76539255838225 - type: nauc_precision_at_10_std value: 0.8339023471047621 - type: nauc_precision_at_1_diff1 value: 34.94101066582577 - type: nauc_precision_at_1_max value: 26.610522376067564 - type: nauc_precision_at_1_std value: -2.6534637926597697 - type: nauc_precision_at_20_diff1 value: 20.89118655581876 - type: nauc_precision_at_20_max value: 17.90131790057651 - type: nauc_precision_at_20_std value: 2.415900033993976 - type: nauc_precision_at_3_diff1 value: 29.199646185693584 - type: nauc_precision_at_3_max value: 28.71542597359216 - type: nauc_precision_at_3_std value: -3.324231635814199 - type: nauc_precision_at_5_diff1 value: 21.160582630949655 - type: nauc_precision_at_5_max value: 25.038152290365755 - type: nauc_precision_at_5_std value: -1.5349945089073667 - type: nauc_recall_at_1000_diff1 value: 12.802585970938592 - type: nauc_recall_at_1000_max value: 4.416753173413089 - type: nauc_recall_at_1000_std value: 0.3242070873030706 - type: nauc_recall_at_100_diff1 value: 23.177406841712212 - type: nauc_recall_at_100_max value: 10.168312031699122 - type: nauc_recall_at_100_std value: 3.5202491358448733 - type: nauc_recall_at_10_diff1 value: 21.179523585586825 - type: nauc_recall_at_10_max value: 14.696510947366045 - type: nauc_recall_at_10_std value: -2.77056987911708 - type: nauc_recall_at_1_diff1 value: 35.230641193187914 - type: nauc_recall_at_1_max value: 24.41472808203692 - type: nauc_recall_at_1_std value: -5.821919201147339 - type: nauc_recall_at_20_diff1 value: 19.96336555562722 - type: nauc_recall_at_20_max value: 10.265926044858517 - type: nauc_recall_at_20_std value: -0.785259475171776 - type: nauc_recall_at_3_diff1 value: 27.2666741731745 - type: nauc_recall_at_3_max value: 24.921261035370843 - type: nauc_recall_at_3_std value: -6.520343024542523 - type: nauc_recall_at_5_diff1 value: 20.830145482657233 - type: nauc_recall_at_5_max value: 19.70605027355368 - type: nauc_recall_at_5_std value: -5.524187480821078 - type: ndcg_at_1 value: 10.123 - type: ndcg_at_10 value: 14.333000000000002 - type: ndcg_at_100 value: 18.242 - type: ndcg_at_1000 value: 21.185000000000002 - type: ndcg_at_20 value: 15.795 - type: ndcg_at_3 value: 11.737 - type: ndcg_at_5 value: 12.875 - type: precision_at_1 value: 10.123 - type: precision_at_10 value: 2.5 - type: precision_at_100 value: 0.48 - type: precision_at_1000 value: 0.079 - type: precision_at_20 value: 1.595 - type: precision_at_3 value: 5.164 - type: precision_at_5 value: 3.8649999999999998 - type: recall_at_1 value: 8.547 - type: recall_at_10 value: 20.152 - type: recall_at_100 value: 38.274 - type: recall_at_1000 value: 61.097 - type: recall_at_20 value: 25.672 - type: recall_at_3 value: 12.866 - type: recall_at_5 value: 15.717999999999998 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval (default) type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: main_score value: 10.942 - type: map_at_1 value: 5.989 - type: map_at_10 value: 8.927 - type: map_at_100 value: 9.539 - type: map_at_1000 value: 9.649000000000001 - type: map_at_20 value: 9.225 - type: map_at_3 value: 8.126 - type: map_at_5 value: 8.541 - type: mrr_at_1 value: 7.5361321403991735 - type: mrr_at_10 value: 10.92452397338838 - type: mrr_at_100 value: 11.592043752074652 - type: mrr_at_1000 value: 11.685734091169346 - type: mrr_at_20 value: 11.258548816571706 - type: mrr_at_3 value: 10.002294104152327 - type: mrr_at_5 value: 10.47373250745583 - type: nauc_map_at_1000_diff1 value: 29.70785865819864 - type: nauc_map_at_1000_max value: 15.814071189887855 - type: nauc_map_at_1000_std value: -5.60669413451568 - type: nauc_map_at_100_diff1 value: 29.74148513545459 - type: nauc_map_at_100_max value: 15.779846885727725 - type: nauc_map_at_100_std value: -5.76117228213773 - type: nauc_map_at_10_diff1 value: 30.592884755466276 - type: nauc_map_at_10_max value: 16.2821343956555 - type: nauc_map_at_10_std value: -6.31218509821274 - type: nauc_map_at_1_diff1 value: 40.50426071077868 - type: nauc_map_at_1_max value: 17.251422635161674 - type: nauc_map_at_1_std value: -7.3319940985741505 - type: nauc_map_at_20_diff1 value: 30.161839436701044 - type: nauc_map_at_20_max value: 15.822488590611552 - type: nauc_map_at_20_std value: -6.216851050714664 - type: nauc_map_at_3_diff1 value: 32.80171391466759 - type: nauc_map_at_3_max value: 16.819928931516028 - type: nauc_map_at_3_std value: -6.8482887648089354 - type: nauc_map_at_5_diff1 value: 31.68769336125935 - type: nauc_map_at_5_max value: 16.551544446521724 - type: nauc_map_at_5_std value: -6.610571158449323 - type: nauc_mrr_at_1000_diff1 value: 30.032074926432294 - type: nauc_mrr_at_1000_max value: 17.31359992105279 - type: nauc_mrr_at_1000_std value: -5.051808537297404 - type: nauc_mrr_at_100_diff1 value: 30.04561152587057 - type: nauc_mrr_at_100_max value: 17.31510991897843 - type: nauc_mrr_at_100_std value: -5.112171695450907 - type: nauc_mrr_at_10_diff1 value: 30.7919588089329 - type: nauc_mrr_at_10_max value: 17.747307159609047 - type: nauc_mrr_at_10_std value: -5.620809668288058 - type: nauc_mrr_at_1_diff1 value: 40.37820294671241 - type: nauc_mrr_at_1_max value: 19.515958567233955 - type: nauc_mrr_at_1_std value: -7.6128600910040385 - type: nauc_mrr_at_20_diff1 value: 30.43076360217774 - type: nauc_mrr_at_20_max value: 17.42098412102074 - type: nauc_mrr_at_20_std value: -5.502999723295499 - type: nauc_mrr_at_3_diff1 value: 33.35073752739181 - type: nauc_mrr_at_3_max value: 18.381225141274406 - type: nauc_mrr_at_3_std value: -6.281341542296808 - type: nauc_mrr_at_5_diff1 value: 32.01975103837218 - type: nauc_mrr_at_5_max value: 18.248575553624875 - type: nauc_mrr_at_5_std value: -5.975629240088075 - type: nauc_ndcg_at_1000_diff1 value: 23.480424968554473 - type: nauc_ndcg_at_1000_max value: 14.422280226661046 - type: nauc_ndcg_at_1000_std value: 0.037198763992900716 - type: nauc_ndcg_at_100_diff1 value: 23.74556359447292 - type: nauc_ndcg_at_100_max value: 14.02306375423822 - type: nauc_ndcg_at_100_std value: -2.7832737496474014 - type: nauc_ndcg_at_10_diff1 value: 27.151201274571196 - type: nauc_ndcg_at_10_max value: 15.7704175776716 - type: nauc_ndcg_at_10_std value: -5.561215786484417 - type: nauc_ndcg_at_1_diff1 value: 40.37820294671241 - type: nauc_ndcg_at_1_max value: 19.515958567233955 - type: nauc_ndcg_at_1_std value: -7.6128600910040385 - type: nauc_ndcg_at_20_diff1 value: 26.066768096454577 - type: nauc_ndcg_at_20_max value: 14.454961291556554 - type: nauc_ndcg_at_20_std value: -5.335984929547714 - type: nauc_ndcg_at_3_diff1 value: 31.42782503500614 - type: nauc_ndcg_at_3_max value: 17.29083202850581 - type: nauc_ndcg_at_3_std value: -6.593661694626304 - type: nauc_ndcg_at_5_diff1 value: 29.47414868567076 - type: nauc_ndcg_at_5_max value: 16.6743658195434 - type: nauc_ndcg_at_5_std value: -6.167442909277885 - type: nauc_precision_at_1000_diff1 value: 11.55307594597712 - type: nauc_precision_at_1000_max value: 16.664194862533392 - type: nauc_precision_at_1000_std value: 15.574570590140713 - type: nauc_precision_at_100_diff1 value: 14.135107624877794 - type: nauc_precision_at_100_max value: 15.965921007390795 - type: nauc_precision_at_100_std value: 5.476527761120489 - type: nauc_precision_at_10_diff1 value: 20.463049463514587 - type: nauc_precision_at_10_max value: 17.478921279030477 - type: nauc_precision_at_10_std value: -3.491880161936641 - type: nauc_precision_at_1_diff1 value: 40.37820294671241 - type: nauc_precision_at_1_max value: 19.515958567233955 - type: nauc_precision_at_1_std value: -7.6128600910040385 - type: nauc_precision_at_20_diff1 value: 19.35511923391385 - type: nauc_precision_at_20_max value: 16.263201003355583 - type: nauc_precision_at_20_std value: -2.9385217665021464 - type: nauc_precision_at_3_diff1 value: 28.582340371655384 - type: nauc_precision_at_3_max value: 18.900608189348805 - type: nauc_precision_at_3_std value: -6.399404023285527 - type: nauc_precision_at_5_diff1 value: 24.787631563267876 - type: nauc_precision_at_5_max value: 18.566603285207357 - type: nauc_precision_at_5_std value: -4.640787291262861 - type: nauc_recall_at_1000_diff1 value: 10.018323646441084 - type: nauc_recall_at_1000_max value: 8.971012069559492 - type: nauc_recall_at_1000_std value: 14.894521585422476 - type: nauc_recall_at_100_diff1 value: 11.541962873194024 - type: nauc_recall_at_100_max value: 8.63730681762965 - type: nauc_recall_at_100_std value: 3.1924288769214333 - type: nauc_recall_at_10_diff1 value: 18.978099632146687 - type: nauc_recall_at_10_max value: 12.346880908932615 - type: nauc_recall_at_10_std value: -4.380348720563592 - type: nauc_recall_at_1_diff1 value: 40.50426071077868 - type: nauc_recall_at_1_max value: 17.251422635161674 - type: nauc_recall_at_1_std value: -7.3319940985741505 - type: nauc_recall_at_20_diff1 value: 17.100697713020164 - type: nauc_recall_at_20_max value: 9.171514181265934 - type: nauc_recall_at_20_std value: -3.88622450782914 - type: nauc_recall_at_3_diff1 value: 26.205669264205483 - type: nauc_recall_at_3_max value: 15.070286201579593 - type: nauc_recall_at_3_std value: -6.677898784102143 - type: nauc_recall_at_5_diff1 value: 23.246637868945513 - type: nauc_recall_at_5_max value: 13.77133525419923 - type: nauc_recall_at_5_std value: -6.145815156982035 - type: ndcg_at_1 value: 7.536 - type: ndcg_at_10 value: 10.942 - type: ndcg_at_100 value: 14.376 - type: ndcg_at_1000 value: 17.66 - type: ndcg_at_20 value: 12.012 - type: ndcg_at_3 value: 9.39 - type: ndcg_at_5 value: 10.036000000000001 - type: precision_at_1 value: 7.536 - type: precision_at_10 value: 2.051 - type: precision_at_100 value: 0.455 - type: precision_at_1000 value: 0.08800000000000001 - type: precision_at_20 value: 1.321 - type: precision_at_3 value: 4.565 - type: precision_at_5 value: 3.2620000000000005 - type: recall_at_1 value: 5.989 - type: recall_at_10 value: 15.112 - type: recall_at_100 value: 31.176 - type: recall_at_1000 value: 55.789 - type: recall_at_20 value: 19.139 - type: recall_at_3 value: 10.610999999999999 - type: recall_at_5 value: 12.302 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval (default) type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: main_score value: 17.887 - type: map_at_1 value: 10.67 - type: map_at_10 value: 14.939 - type: map_at_100 value: 15.573999999999998 - type: map_at_1000 value: 15.684999999999999 - type: map_at_20 value: 15.223999999999998 - type: map_at_3 value: 13.799 - type: map_at_5 value: 14.332 - type: mrr_at_1 value: 12.87313432835821 - type: mrr_at_10 value: 17.731469142383315 - type: mrr_at_100 value: 18.402040803136185 - type: mrr_at_1000 value: 18.494079224741526 - type: mrr_at_20 value: 18.03607538846422 - type: mrr_at_3 value: 16.52674129353234 - type: mrr_at_5 value: 17.067786069651746 - type: nauc_map_at_1000_diff1 value: 36.76823844116977 - type: nauc_map_at_1000_max value: 27.94572035162958 - type: nauc_map_at_1000_std value: -4.388884836241252 - type: nauc_map_at_100_diff1 value: 36.79665558424098 - type: nauc_map_at_100_max value: 27.941945040312184 - type: nauc_map_at_100_std value: -4.490876272760181 - type: nauc_map_at_10_diff1 value: 37.41372310249964 - type: nauc_map_at_10_max value: 27.990160762136163 - type: nauc_map_at_10_std value: -4.795820943975423 - type: nauc_map_at_1_diff1 value: 49.98661453454098 - type: nauc_map_at_1_max value: 31.818725397600282 - type: nauc_map_at_1_std value: -5.925260594592982 - type: nauc_map_at_20_diff1 value: 36.92254435057548 - type: nauc_map_at_20_max value: 28.05940570041997 - type: nauc_map_at_20_std value: -4.780726661563721 - type: nauc_map_at_3_diff1 value: 38.36192130512335 - type: nauc_map_at_3_max value: 28.858454287203205 - type: nauc_map_at_3_std value: -5.755340070358489 - type: nauc_map_at_5_diff1 value: 37.9499946188146 - type: nauc_map_at_5_max value: 28.32627898637564 - type: nauc_map_at_5_std value: -4.900005922457714 - type: nauc_mrr_at_1000_diff1 value: 34.880119065480486 - type: nauc_mrr_at_1000_max value: 29.378414409768304 - type: nauc_mrr_at_1000_std value: -3.704128162920646 - type: nauc_mrr_at_100_diff1 value: 34.89626548489115 - type: nauc_mrr_at_100_max value: 29.389675765322654 - type: nauc_mrr_at_100_std value: -3.7494327131452114 - type: nauc_mrr_at_10_diff1 value: 35.32636142642244 - type: nauc_mrr_at_10_max value: 29.471010072155597 - type: nauc_mrr_at_10_std value: -3.996358264478763 - type: nauc_mrr_at_1_diff1 value: 47.90151427745923 - type: nauc_mrr_at_1_max value: 34.205811607428565 - type: nauc_mrr_at_1_std value: -4.355308541351635 - type: nauc_mrr_at_20_diff1 value: 34.939420448282924 - type: nauc_mrr_at_20_max value: 29.514028377508296 - type: nauc_mrr_at_20_std value: -4.031940468517912 - type: nauc_mrr_at_3_diff1 value: 36.37081496865121 - type: nauc_mrr_at_3_max value: 30.668982407799405 - type: nauc_mrr_at_3_std value: -4.8471296781069 - type: nauc_mrr_at_5_diff1 value: 35.88679509747269 - type: nauc_mrr_at_5_max value: 29.912822412299523 - type: nauc_mrr_at_5_std value: -4.38559630771512 - type: nauc_ndcg_at_1000_diff1 value: 30.958587739289474 - type: nauc_ndcg_at_1000_max value: 25.494852381225762 - type: nauc_ndcg_at_1000_std value: -0.1616411394145601 - type: nauc_ndcg_at_100_diff1 value: 31.25494182437728 - type: nauc_ndcg_at_100_max value: 25.646920642171217 - type: nauc_ndcg_at_100_std value: -2.4513554125960777 - type: nauc_ndcg_at_10_diff1 value: 33.12811377055696 - type: nauc_ndcg_at_10_max value: 26.464639927253046 - type: nauc_ndcg_at_10_std value: -4.2881824335959395 - type: nauc_ndcg_at_1_diff1 value: 47.90151427745923 - type: nauc_ndcg_at_1_max value: 34.205811607428565 - type: nauc_ndcg_at_1_std value: -4.355308541351635 - type: nauc_ndcg_at_20_diff1 value: 31.675421489903904 - type: nauc_ndcg_at_20_max value: 26.522154184809644 - type: nauc_ndcg_at_20_std value: -4.284414659369125 - type: nauc_ndcg_at_3_diff1 value: 34.46164089418861 - type: nauc_ndcg_at_3_max value: 28.686854091455782 - type: nauc_ndcg_at_3_std value: -5.695127299581537 - type: nauc_ndcg_at_5_diff1 value: 34.06268264335981 - type: nauc_ndcg_at_5_max value: 27.41462353998668 - type: nauc_ndcg_at_5_std value: -4.615408130218053 - type: nauc_precision_at_1000_diff1 value: 6.773761974306285 - type: nauc_precision_at_1000_max value: 13.042896531679865 - type: nauc_precision_at_1000_std value: 18.281508859789664 - type: nauc_precision_at_100_diff1 value: 15.924429001932866 - type: nauc_precision_at_100_max value: 20.457743309047 - type: nauc_precision_at_100_std value: 6.6080283991640005 - type: nauc_precision_at_10_diff1 value: 22.315260994226936 - type: nauc_precision_at_10_max value: 25.482668659182643 - type: nauc_precision_at_10_std value: -2.8176143138195253 - type: nauc_precision_at_1_diff1 value: 47.90151427745923 - type: nauc_precision_at_1_max value: 34.205811607428565 - type: nauc_precision_at_1_std value: -4.355308541351635 - type: nauc_precision_at_20_diff1 value: 18.144557360729884 - type: nauc_precision_at_20_max value: 25.204053277572914 - type: nauc_precision_at_20_std value: -1.7144259829502586 - type: nauc_precision_at_3_diff1 value: 25.226557583051363 - type: nauc_precision_at_3_max value: 28.206981851597934 - type: nauc_precision_at_3_std value: -6.113164957842488 - type: nauc_precision_at_5_diff1 value: 24.106572466155253 - type: nauc_precision_at_5_max value: 26.92943346868415 - type: nauc_precision_at_5_std value: -3.554052142651989 - type: nauc_recall_at_1000_diff1 value: 14.589862576218504 - type: nauc_recall_at_1000_max value: 12.701428869835372 - type: nauc_recall_at_1000_std value: 17.690486024774614 - type: nauc_recall_at_100_diff1 value: 18.41380873008093 - type: nauc_recall_at_100_max value: 16.44325189297761 - type: nauc_recall_at_100_std value: 2.5676496026723403 - type: nauc_recall_at_10_diff1 value: 23.836353717326148 - type: nauc_recall_at_10_max value: 20.488533672940427 - type: nauc_recall_at_10_std value: -3.6412149465751766 - type: nauc_recall_at_1_diff1 value: 49.98661453454098 - type: nauc_recall_at_1_max value: 31.818725397600282 - type: nauc_recall_at_1_std value: -5.925260594592982 - type: nauc_recall_at_20_diff1 value: 19.95415850300592 - type: nauc_recall_at_20_max value: 20.479489049982334 - type: nauc_recall_at_20_std value: -3.748045332843713 - type: nauc_recall_at_3_diff1 value: 27.51330288671981 - type: nauc_recall_at_3_max value: 25.233366697694947 - type: nauc_recall_at_3_std value: -6.416003335135423 - type: nauc_recall_at_5_diff1 value: 25.92079220648793 - type: nauc_recall_at_5_max value: 22.63598503654417 - type: nauc_recall_at_5_std value: -4.241913243082138 - type: ndcg_at_1 value: 12.873000000000001 - type: ndcg_at_10 value: 17.887 - type: ndcg_at_100 value: 21.487000000000002 - type: ndcg_at_1000 value: 24.596 - type: ndcg_at_20 value: 18.891 - type: ndcg_at_3 value: 15.65 - type: ndcg_at_5 value: 16.438 - type: precision_at_1 value: 12.873000000000001 - type: precision_at_10 value: 3.06 - type: precision_at_100 value: 0.549 - type: precision_at_1000 value: 0.091 - type: precision_at_20 value: 1.81 - type: precision_at_3 value: 7.338 - type: precision_at_5 value: 4.925 - type: recall_at_1 value: 10.67 - type: recall_at_10 value: 24.332 - type: recall_at_100 value: 41.046 - type: recall_at_1000 value: 64.17399999999999 - type: recall_at_20 value: 27.894999999999996 - type: recall_at_3 value: 17.807000000000002 - type: recall_at_5 value: 20.003 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval (default) type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: main_score value: 20.995 - type: map_at_1 value: 12.667 - type: map_at_10 value: 17.408 - type: map_at_100 value: 18.318 - type: map_at_1000 value: 18.512 - type: map_at_20 value: 17.829 - type: map_at_3 value: 15.676000000000002 - type: map_at_5 value: 16.799 - type: mrr_at_1 value: 16.007905138339922 - type: mrr_at_10 value: 20.940460505677894 - type: mrr_at_100 value: 21.69143441032759 - type: mrr_at_1000 value: 21.80204722464657 - type: mrr_at_20 value: 21.26643246728539 - type: mrr_at_3 value: 19.400527009222657 - type: mrr_at_5 value: 20.25032938076416 - type: nauc_map_at_1000_diff1 value: 32.47046839846832 - type: nauc_map_at_1000_max value: 27.66353273195947 - type: nauc_map_at_1000_std value: -8.343684598764236 - type: nauc_map_at_100_diff1 value: 32.50616188089244 - type: nauc_map_at_100_max value: 27.73747518277514 - type: nauc_map_at_100_std value: -8.490341693700493 - type: nauc_map_at_10_diff1 value: 33.1288027398037 - type: nauc_map_at_10_max value: 28.241224798228394 - type: nauc_map_at_10_std value: -9.02951560885345 - type: nauc_map_at_1_diff1 value: 40.80511648851529 - type: nauc_map_at_1_max value: 30.162361476463918 - type: nauc_map_at_1_std value: -9.17387155813208 - type: nauc_map_at_20_diff1 value: 32.48172255854906 - type: nauc_map_at_20_max value: 27.83782940642731 - type: nauc_map_at_20_std value: -9.00070653423497 - type: nauc_map_at_3_diff1 value: 35.33220107117886 - type: nauc_map_at_3_max value: 28.50773685929629 - type: nauc_map_at_3_std value: -9.774728282721654 - type: nauc_map_at_5_diff1 value: 33.86289360627357 - type: nauc_map_at_5_max value: 28.381915762969662 - type: nauc_map_at_5_std value: -8.724453393790323 - type: nauc_mrr_at_1000_diff1 value: 34.76838356916287 - type: nauc_mrr_at_1000_max value: 25.973225541804773 - type: nauc_mrr_at_1000_std value: -7.858456592729439 - type: nauc_mrr_at_100_diff1 value: 34.75307312922653 - type: nauc_mrr_at_100_max value: 25.954557036353204 - type: nauc_mrr_at_100_std value: -7.871516537547969 - type: nauc_mrr_at_10_diff1 value: 35.358922810256225 - type: nauc_mrr_at_10_max value: 26.210843274361505 - type: nauc_mrr_at_10_std value: -8.214417147884735 - type: nauc_mrr_at_1_diff1 value: 43.64078480508238 - type: nauc_mrr_at_1_max value: 29.370755170129257 - type: nauc_mrr_at_1_std value: -9.550993425629777 - type: nauc_mrr_at_20_diff1 value: 34.78113781898558 - type: nauc_mrr_at_20_max value: 25.928518389732314 - type: nauc_mrr_at_20_std value: -8.239044555830915 - type: nauc_mrr_at_3_diff1 value: 37.21568768239239 - type: nauc_mrr_at_3_max value: 26.65360940386168 - type: nauc_mrr_at_3_std value: -8.62478361674192 - type: nauc_mrr_at_5_diff1 value: 35.82015322695793 - type: nauc_mrr_at_5_max value: 26.501248513872365 - type: nauc_mrr_at_5_std value: -8.032258195748593 - type: nauc_ndcg_at_1000_diff1 value: 28.090450093791965 - type: nauc_ndcg_at_1000_max value: 25.0164560812251 - type: nauc_ndcg_at_1000_std value: -4.751492810050399 - type: nauc_ndcg_at_100_diff1 value: 28.006371102918905 - type: nauc_ndcg_at_100_max value: 24.96330175676876 - type: nauc_ndcg_at_100_std value: -5.191836591721473 - type: nauc_ndcg_at_10_diff1 value: 29.742355909419842 - type: nauc_ndcg_at_10_max value: 25.560202565798097 - type: nauc_ndcg_at_10_std value: -8.454314293252533 - type: nauc_ndcg_at_1_diff1 value: 43.64078480508238 - type: nauc_ndcg_at_1_max value: 29.370755170129257 - type: nauc_ndcg_at_1_std value: -9.550993425629777 - type: nauc_ndcg_at_20_diff1 value: 27.8673417820888 - type: nauc_ndcg_at_20_max value: 24.752636378864707 - type: nauc_ndcg_at_20_std value: -8.142335775441563 - type: nauc_ndcg_at_3_diff1 value: 34.12983925479383 - type: nauc_ndcg_at_3_max value: 25.383414977354768 - type: nauc_ndcg_at_3_std value: -9.314125871147313 - type: nauc_ndcg_at_5_diff1 value: 31.25517374528034 - type: nauc_ndcg_at_5_max value: 25.48971100051719 - type: nauc_ndcg_at_5_std value: -7.833719979552164 - type: nauc_precision_at_1000_diff1 value: 3.50440341668897 - type: nauc_precision_at_1000_max value: -3.087157205676272 - type: nauc_precision_at_1000_std value: 15.957120100116718 - type: nauc_precision_at_100_diff1 value: 8.365977140200012 - type: nauc_precision_at_100_max value: 5.265673590955992 - type: nauc_precision_at_100_std value: 10.094715416109812 - type: nauc_precision_at_10_diff1 value: 20.119798486812844 - type: nauc_precision_at_10_max value: 16.84346414214358 - type: nauc_precision_at_10_std value: -6.134362396350626 - type: nauc_precision_at_1_diff1 value: 43.64078480508238 - type: nauc_precision_at_1_max value: 29.370755170129257 - type: nauc_precision_at_1_std value: -9.550993425629777 - type: nauc_precision_at_20_diff1 value: 16.458988589576688 - type: nauc_precision_at_20_max value: 13.882029306822776 - type: nauc_precision_at_20_std value: -2.349385052523666 - type: nauc_precision_at_3_diff1 value: 30.425979660093866 - type: nauc_precision_at_3_max value: 20.960392518113437 - type: nauc_precision_at_3_std value: -9.122507391265795 - type: nauc_precision_at_5_diff1 value: 23.711336481938176 - type: nauc_precision_at_5_max value: 17.785091688656124 - type: nauc_precision_at_5_std value: -5.953830939145774 - type: nauc_recall_at_1000_diff1 value: 4.652161838596426 - type: nauc_recall_at_1000_max value: 13.427480897667563 - type: nauc_recall_at_1000_std value: 13.162281305962134 - type: nauc_recall_at_100_diff1 value: 12.940141763574056 - type: nauc_recall_at_100_max value: 17.133363434036806 - type: nauc_recall_at_100_std value: 5.929516195308144 - type: nauc_recall_at_10_diff1 value: 19.28025711155888 - type: nauc_recall_at_10_max value: 21.611600359640324 - type: nauc_recall_at_10_std value: -7.3225233954700055 - type: nauc_recall_at_1_diff1 value: 40.80511648851529 - type: nauc_recall_at_1_max value: 30.162361476463918 - type: nauc_recall_at_1_std value: -9.17387155813208 - type: nauc_recall_at_20_diff1 value: 13.45385867706307 - type: nauc_recall_at_20_max value: 17.79542474505384 - type: nauc_recall_at_20_std value: -6.804718967301025 - type: nauc_recall_at_3_diff1 value: 28.00830001315124 - type: nauc_recall_at_3_max value: 23.903118754205703 - type: nauc_recall_at_3_std value: -9.446774660465353 - type: nauc_recall_at_5_diff1 value: 22.969782395962362 - type: nauc_recall_at_5_max value: 23.293961742969984 - type: nauc_recall_at_5_std value: -5.613990704144268 - type: ndcg_at_1 value: 16.008 - type: ndcg_at_10 value: 20.995 - type: ndcg_at_100 value: 25.146 - type: ndcg_at_1000 value: 29.032999999999998 - type: ndcg_at_20 value: 22.149 - type: ndcg_at_3 value: 18.285999999999998 - type: ndcg_at_5 value: 19.725 - type: precision_at_1 value: 16.008 - type: precision_at_10 value: 4.15 - type: precision_at_100 value: 0.881 - type: precision_at_1000 value: 0.18 - type: precision_at_20 value: 2.549 - type: precision_at_3 value: 8.827 - type: precision_at_5 value: 6.561 - type: recall_at_1 value: 12.667 - type: recall_at_10 value: 27.334999999999997 - type: recall_at_100 value: 47.504999999999995 - type: recall_at_1000 value: 74.20400000000001 - type: recall_at_20 value: 32.223 - type: recall_at_3 value: 18.855 - type: recall_at_5 value: 23.031 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval (default) type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 12.855 - type: map_at_1 value: 5.952 - type: map_at_10 value: 10.112 - type: map_at_100 value: 10.841000000000001 - type: map_at_1000 value: 10.952 - type: map_at_20 value: 10.485 - type: map_at_3 value: 8.61 - type: map_at_5 value: 9.39 - type: mrr_at_1 value: 6.654343807763401 - type: mrr_at_10 value: 11.145512425549404 - type: mrr_at_100 value: 11.890834621929745 - type: mrr_at_1000 value: 11.98935500535199 - type: mrr_at_20 value: 11.514438747582577 - type: mrr_at_3 value: 9.519408502772645 - type: mrr_at_5 value: 10.35120147874307 - type: nauc_map_at_1000_diff1 value: 28.118324967619934 - type: nauc_map_at_1000_max value: 33.4955396363861 - type: nauc_map_at_1000_std value: -7.124620464882072 - type: nauc_map_at_100_diff1 value: 28.139542997566775 - type: nauc_map_at_100_max value: 33.43234157469899 - type: nauc_map_at_100_std value: -7.243361520044231 - type: nauc_map_at_10_diff1 value: 28.847631060466366 - type: nauc_map_at_10_max value: 33.901023417079976 - type: nauc_map_at_10_std value: -7.925183564546207 - type: nauc_map_at_1_diff1 value: 50.30049199397133 - type: nauc_map_at_1_max value: 51.25572555439384 - type: nauc_map_at_1_std value: -11.015024646365136 - type: nauc_map_at_20_diff1 value: 28.28927518524186 - type: nauc_map_at_20_max value: 33.660096742342866 - type: nauc_map_at_20_std value: -7.818967310155242 - type: nauc_map_at_3_diff1 value: 31.240569537452505 - type: nauc_map_at_3_max value: 36.287067586021514 - type: nauc_map_at_3_std value: -9.672050554710639 - type: nauc_map_at_5_diff1 value: 29.22151929708611 - type: nauc_map_at_5_max value: 35.151163055940096 - type: nauc_map_at_5_std value: -8.08857100327833 - type: nauc_mrr_at_1000_diff1 value: 28.584908252510456 - type: nauc_mrr_at_1000_max value: 32.74580077107513 - type: nauc_mrr_at_1000_std value: -5.4545568607489425 - type: nauc_mrr_at_100_diff1 value: 28.590158729530756 - type: nauc_mrr_at_100_max value: 32.648265058397314 - type: nauc_mrr_at_100_std value: -5.497715715850587 - type: nauc_mrr_at_10_diff1 value: 29.13862424713755 - type: nauc_mrr_at_10_max value: 33.040759537886785 - type: nauc_mrr_at_10_std value: -5.9147477002669815 - type: nauc_mrr_at_1_diff1 value: 50.27722302230953 - type: nauc_mrr_at_1_max value: 49.25905641972045 - type: nauc_mrr_at_1_std value: -8.480294289311937 - type: nauc_mrr_at_20_diff1 value: 28.638536503165835 - type: nauc_mrr_at_20_max value: 32.88659954102282 - type: nauc_mrr_at_20_std value: -5.981819963535813 - type: nauc_mrr_at_3_diff1 value: 31.18159924468885 - type: nauc_mrr_at_3_max value: 35.21727856087969 - type: nauc_mrr_at_3_std value: -7.572707528554651 - type: nauc_mrr_at_5_diff1 value: 29.565525076928186 - type: nauc_mrr_at_5_max value: 34.266818009562066 - type: nauc_mrr_at_5_std value: -6.198634245500832 - type: nauc_ndcg_at_1000_diff1 value: 21.992225333730815 - type: nauc_ndcg_at_1000_max value: 28.17427028625173 - type: nauc_ndcg_at_1000_std value: -1.5499706000360816 - type: nauc_ndcg_at_100_diff1 value: 22.207779666352856 - type: nauc_ndcg_at_100_max value: 27.049600613849627 - type: nauc_ndcg_at_100_std value: -3.3145082009255664 - type: nauc_ndcg_at_10_diff1 value: 23.689293335278357 - type: nauc_ndcg_at_10_max value: 29.430164805550735 - type: nauc_ndcg_at_10_std value: -6.7008075430059915 - type: nauc_ndcg_at_1_diff1 value: 50.27722302230953 - type: nauc_ndcg_at_1_max value: 49.25905641972045 - type: nauc_ndcg_at_1_std value: -8.480294289311937 - type: nauc_ndcg_at_20_diff1 value: 22.2925895362134 - type: nauc_ndcg_at_20_max value: 28.844919103532163 - type: nauc_ndcg_at_20_std value: -6.594295088509034 - type: nauc_ndcg_at_3_diff1 value: 27.12317888260658 - type: nauc_ndcg_at_3_max value: 32.93206493058083 - type: nauc_ndcg_at_3_std value: -8.832021517864137 - type: nauc_ndcg_at_5_diff1 value: 24.20043773979843 - type: nauc_ndcg_at_5_max value: 31.54380198974836 - type: nauc_ndcg_at_5_std value: -6.807495457594366 - type: nauc_precision_at_1000_diff1 value: 5.581741511647604 - type: nauc_precision_at_1000_max value: 4.703458505931627 - type: nauc_precision_at_1000_std value: 10.657124449862566 - type: nauc_precision_at_100_diff1 value: 10.883192976516437 - type: nauc_precision_at_100_max value: 12.752909725063391 - type: nauc_precision_at_100_std value: 5.477310651451066 - type: nauc_precision_at_10_diff1 value: 13.750559486735126 - type: nauc_precision_at_10_max value: 21.16487005730127 - type: nauc_precision_at_10_std value: -4.531709245413559 - type: nauc_precision_at_1_diff1 value: 50.27722302230953 - type: nauc_precision_at_1_max value: 49.25905641972045 - type: nauc_precision_at_1_std value: -8.480294289311937 - type: nauc_precision_at_20_diff1 value: 11.29346713230963 - type: nauc_precision_at_20_max value: 20.31140492811378 - type: nauc_precision_at_20_std value: -3.028932222489695 - type: nauc_precision_at_3_diff1 value: 18.64174123411719 - type: nauc_precision_at_3_max value: 26.389733577145407 - type: nauc_precision_at_3_std value: -7.942974687482611 - type: nauc_precision_at_5_diff1 value: 14.483776598926971 - type: nauc_precision_at_5_max value: 24.48041754152907 - type: nauc_precision_at_5_std value: -3.7226914654635195 - type: nauc_recall_at_1000_diff1 value: 9.291280130218974 - type: nauc_recall_at_1000_max value: 17.433646542112527 - type: nauc_recall_at_1000_std value: 15.008011633433348 - type: nauc_recall_at_100_diff1 value: 12.803561963798474 - type: nauc_recall_at_100_max value: 14.512899220841478 - type: nauc_recall_at_100_std value: 4.5635363743743405 - type: nauc_recall_at_10_diff1 value: 14.619920557797897 - type: nauc_recall_at_10_max value: 21.580422687110726 - type: nauc_recall_at_10_std value: -5.609327303626449 - type: nauc_recall_at_1_diff1 value: 50.30049199397133 - type: nauc_recall_at_1_max value: 51.25572555439384 - type: nauc_recall_at_1_std value: -11.015024646365136 - type: nauc_recall_at_20_diff1 value: 11.964600349614422 - type: nauc_recall_at_20_max value: 20.588461630785062 - type: nauc_recall_at_20_std value: -5.702130226450261 - type: nauc_recall_at_3_diff1 value: 17.5253237828965 - type: nauc_recall_at_3_max value: 26.185608151458005 - type: nauc_recall_at_3_std value: -9.159514017216269 - type: nauc_recall_at_5_diff1 value: 14.004716307559587 - type: nauc_recall_at_5_max value: 24.584165770910065 - type: nauc_recall_at_5_std value: -5.221167835710616 - type: ndcg_at_1 value: 6.654 - type: ndcg_at_10 value: 12.855 - type: ndcg_at_100 value: 17.012 - type: ndcg_at_1000 value: 20.252 - type: ndcg_at_20 value: 14.161999999999999 - type: ndcg_at_3 value: 9.703000000000001 - type: ndcg_at_5 value: 11.091 - type: precision_at_1 value: 6.654 - type: precision_at_10 value: 2.366 - type: precision_at_100 value: 0.488 - type: precision_at_1000 value: 0.082 - type: precision_at_20 value: 1.488 - type: precision_at_3 value: 4.436 - type: precision_at_5 value: 3.4750000000000005 - type: recall_at_1 value: 5.952 - type: recall_at_10 value: 20.434 - type: recall_at_100 value: 40.579 - type: recall_at_1000 value: 65.872 - type: recall_at_20 value: 25.302000000000003 - type: recall_at_3 value: 11.873000000000001 - type: recall_at_5 value: 15.206 - task: type: Retrieval dataset: name: MTEB ClimateFEVER (default) type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 12.684000000000001 - type: map_at_1 value: 4.893 - type: map_at_10 value: 8.362 - type: map_at_100 value: 9.366 - type: map_at_1000 value: 9.51 - type: map_at_20 value: 8.89 - type: map_at_3 value: 6.922000000000001 - type: map_at_5 value: 7.55 - type: mrr_at_1 value: 10.879478827361563 - type: mrr_at_10 value: 17.425107285042124 - type: mrr_at_100 value: 18.451707469189756 - type: mrr_at_1000 value: 18.52126392525071 - type: mrr_at_20 value: 18.048165607672363 - type: mrr_at_3 value: 14.820846905537465 - type: mrr_at_5 value: 16.114006514657984 - type: nauc_map_at_1000_diff1 value: 23.84006264934629 - type: nauc_map_at_1000_max value: 4.910831067499504 - type: nauc_map_at_1000_std value: 21.87335313820886 - type: nauc_map_at_100_diff1 value: 23.778524081332208 - type: nauc_map_at_100_max value: 4.859800424394481 - type: nauc_map_at_100_std value: 21.531388522921386 - type: nauc_map_at_10_diff1 value: 23.87487289096816 - type: nauc_map_at_10_max value: 4.401846074458388 - type: nauc_map_at_10_std value: 18.73423239612392 - type: nauc_map_at_1_diff1 value: 35.758137361986876 - type: nauc_map_at_1_max value: 6.168314369703521 - type: nauc_map_at_1_std value: 16.65271803089269 - type: nauc_map_at_20_diff1 value: 24.039157788253018 - type: nauc_map_at_20_max value: 4.643267356141666 - type: nauc_map_at_20_std value: 20.226255685740064 - type: nauc_map_at_3_diff1 value: 26.37074453742451 - type: nauc_map_at_3_max value: 4.413149278221099 - type: nauc_map_at_3_std value: 15.63665704099623 - type: nauc_map_at_5_diff1 value: 25.039889457178184 - type: nauc_map_at_5_max value: 4.116798702870248 - type: nauc_map_at_5_std value: 16.483869796310607 - type: nauc_mrr_at_1000_diff1 value: 22.547131264817562 - type: nauc_mrr_at_1000_max value: 6.808785488910689 - type: nauc_mrr_at_1000_std value: 21.975345310790907 - type: nauc_mrr_at_100_diff1 value: 22.549641093971683 - type: nauc_mrr_at_100_max value: 6.81168274530983 - type: nauc_mrr_at_100_std value: 21.974004923300384 - type: nauc_mrr_at_10_diff1 value: 22.312721592779024 - type: nauc_mrr_at_10_max value: 6.679578080791881 - type: nauc_mrr_at_10_std value: 21.07740647837007 - type: nauc_mrr_at_1_diff1 value: 28.173217916679285 - type: nauc_mrr_at_1_max value: 7.01737786335727 - type: nauc_mrr_at_1_std value: 17.343185290003337 - type: nauc_mrr_at_20_diff1 value: 22.453660248688838 - type: nauc_mrr_at_20_max value: 6.729779393524216 - type: nauc_mrr_at_20_std value: 21.7064323041105 - type: nauc_mrr_at_3_diff1 value: 23.318290537345305 - type: nauc_mrr_at_3_max value: 6.85330097005025 - type: nauc_mrr_at_3_std value: 18.579666587768532 - type: nauc_mrr_at_5_diff1 value: 22.776029548753908 - type: nauc_mrr_at_5_max value: 6.774526393574311 - type: nauc_mrr_at_5_std value: 19.493348467671495 - type: nauc_ndcg_at_1000_diff1 value: 21.24051083897338 - type: nauc_ndcg_at_1000_max value: 5.47512915602942 - type: nauc_ndcg_at_1000_std value: 33.891319842379175 - type: nauc_ndcg_at_100_diff1 value: 20.419435436717333 - type: nauc_ndcg_at_100_max value: 5.39782089893606 - type: nauc_ndcg_at_100_std value: 30.159229347157506 - type: nauc_ndcg_at_10_diff1 value: 20.733063242245937 - type: nauc_ndcg_at_10_max value: 4.730118140766257 - type: nauc_ndcg_at_10_std value: 22.15611978743939 - type: nauc_ndcg_at_1_diff1 value: 28.173217916679285 - type: nauc_ndcg_at_1_max value: 7.01737786335727 - type: nauc_ndcg_at_1_std value: 17.343185290003337 - type: nauc_ndcg_at_20_diff1 value: 21.193054968270157 - type: nauc_ndcg_at_20_max value: 5.042507849955366 - type: nauc_ndcg_at_20_std value: 25.574905139811683 - type: nauc_ndcg_at_3_diff1 value: 23.84494482915719 - type: nauc_ndcg_at_3_max value: 5.487614479078213 - type: nauc_ndcg_at_3_std value: 17.257041665670382 - type: nauc_ndcg_at_5_diff1 value: 22.335981246975596 - type: nauc_ndcg_at_5_max value: 4.51930579751092 - type: nauc_ndcg_at_5_std value: 18.146324563164686 - type: nauc_precision_at_1000_diff1 value: 10.732854051903242 - type: nauc_precision_at_1000_max value: 6.906169025482474 - type: nauc_precision_at_1000_std value: 48.29990501127646 - type: nauc_precision_at_100_diff1 value: 11.335367835686048 - type: nauc_precision_at_100_max value: 8.33486931679638 - type: nauc_precision_at_100_std value: 44.02335918155949 - type: nauc_precision_at_10_diff1 value: 12.734140898903185 - type: nauc_precision_at_10_max value: 7.345403114877788 - type: nauc_precision_at_10_std value: 29.786495191603628 - type: nauc_precision_at_1_diff1 value: 28.173217916679285 - type: nauc_precision_at_1_max value: 7.01737786335727 - type: nauc_precision_at_1_std value: 17.343185290003337 - type: nauc_precision_at_20_diff1 value: 14.578686218208455 - type: nauc_precision_at_20_max value: 8.31600884554527 - type: nauc_precision_at_20_std value: 35.57944755395991 - type: nauc_precision_at_3_diff1 value: 17.424902975218114 - type: nauc_precision_at_3_max value: 7.173711594974116 - type: nauc_precision_at_3_std value: 18.881971193903073 - type: nauc_precision_at_5_diff1 value: 14.71989380091471 - type: nauc_precision_at_5_max value: 6.747106177114406 - type: nauc_precision_at_5_std value: 22.565140813543476 - type: nauc_recall_at_1000_diff1 value: 14.018742326454056 - type: nauc_recall_at_1000_max value: 1.5532125941851942 - type: nauc_recall_at_1000_std value: 48.0359073551386 - type: nauc_recall_at_100_diff1 value: 11.782399018197935 - type: nauc_recall_at_100_max value: 2.2870655024097513 - type: nauc_recall_at_100_std value: 37.97352959084523 - type: nauc_recall_at_10_diff1 value: 14.345879239147546 - type: nauc_recall_at_10_max value: 2.0087919399778515 - type: nauc_recall_at_10_std value: 24.59372608521495 - type: nauc_recall_at_1_diff1 value: 35.758137361986876 - type: nauc_recall_at_1_max value: 6.168314369703521 - type: nauc_recall_at_1_std value: 16.65271803089269 - type: nauc_recall_at_20_diff1 value: 14.6032045058713 - type: nauc_recall_at_20_max value: 2.192258051272998 - type: nauc_recall_at_20_std value: 30.200979930961648 - type: nauc_recall_at_3_diff1 value: 21.450459178725765 - type: nauc_recall_at_3_max value: 2.6687225558746217 - type: nauc_recall_at_3_std value: 15.62001953924645 - type: nauc_recall_at_5_diff1 value: 17.872642384652647 - type: nauc_recall_at_5_max value: 1.7062840921304248 - type: nauc_recall_at_5_std value: 17.238197751224522 - type: ndcg_at_1 value: 10.879 - type: ndcg_at_10 value: 12.684000000000001 - type: ndcg_at_100 value: 17.636 - type: ndcg_at_1000 value: 20.931 - type: ndcg_at_20 value: 14.557999999999998 - type: ndcg_at_3 value: 9.666 - type: ndcg_at_5 value: 10.592 - type: precision_at_1 value: 10.879 - type: precision_at_10 value: 4.215 - type: precision_at_100 value: 0.935 - type: precision_at_1000 value: 0.154 - type: precision_at_20 value: 2.8930000000000002 - type: precision_at_3 value: 7.166 - type: precision_at_5 value: 5.694 - type: recall_at_1 value: 4.893 - type: recall_at_10 value: 16.148 - type: recall_at_100 value: 33.826 - type: recall_at_1000 value: 52.91400000000001 - type: recall_at_20 value: 21.568 - type: recall_at_3 value: 8.984 - type: recall_at_5 value: 11.417 - task: type: Retrieval dataset: name: MTEB DBPedia (default) type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 18.714 - type: map_at_1 value: 3.6290000000000004 - type: map_at_10 value: 7.344 - type: map_at_100 value: 10.174999999999999 - type: map_at_1000 value: 10.89 - type: map_at_20 value: 8.439 - type: map_at_3 value: 5.609999999999999 - type: map_at_5 value: 6.337 - type: mrr_at_1 value: 37.0 - type: mrr_at_10 value: 46.09295634920637 - type: mrr_at_100 value: 46.88963947930081 - type: mrr_at_1000 value: 46.921566120401955 - type: mrr_at_20 value: 46.52364089084293 - type: mrr_at_3 value: 43.33333333333334 - type: mrr_at_5 value: 44.90833333333335 - type: nauc_map_at_1000_diff1 value: 15.332578307626383 - type: nauc_map_at_1000_max value: 19.591409700798067 - type: nauc_map_at_1000_std value: 26.787357729943086 - type: nauc_map_at_100_diff1 value: 15.241772873921782 - type: nauc_map_at_100_max value: 18.342574948282497 - type: nauc_map_at_100_std value: 23.631531457963924 - type: nauc_map_at_10_diff1 value: 17.295256116074693 - type: nauc_map_at_10_max value: 10.62161320889349 - type: nauc_map_at_10_std value: 9.528015695519017 - type: nauc_map_at_1_diff1 value: 16.446542483531125 - type: nauc_map_at_1_max value: 4.979934347581338 - type: nauc_map_at_1_std value: 0.8028896220717383 - type: nauc_map_at_20_diff1 value: 16.81602502338933 - type: nauc_map_at_20_max value: 13.113289648729024 - type: nauc_map_at_20_std value: 14.351215296062362 - type: nauc_map_at_3_diff1 value: 14.907096937119139 - type: nauc_map_at_3_max value: 7.35444839341772 - type: nauc_map_at_3_std value: 3.56181101379306 - type: nauc_map_at_5_diff1 value: 17.310165177414458 - type: nauc_map_at_5_max value: 9.029713690770615 - type: nauc_map_at_5_std value: 5.483712783452527 - type: nauc_mrr_at_1000_diff1 value: 21.637726685501068 - type: nauc_mrr_at_1000_max value: 30.207538155542647 - type: nauc_mrr_at_1000_std value: 23.29384324216765 - type: nauc_mrr_at_100_diff1 value: 21.635718406960365 - type: nauc_mrr_at_100_max value: 30.21626999781084 - type: nauc_mrr_at_100_std value: 23.315552275404077 - type: nauc_mrr_at_10_diff1 value: 21.63149126393632 - type: nauc_mrr_at_10_max value: 30.19460995864985 - type: nauc_mrr_at_10_std value: 23.162647549161143 - type: nauc_mrr_at_1_diff1 value: 23.364434113790995 - type: nauc_mrr_at_1_max value: 29.16236827328641 - type: nauc_mrr_at_1_std value: 20.444573577612672 - type: nauc_mrr_at_20_diff1 value: 21.500850583557057 - type: nauc_mrr_at_20_max value: 30.20831775659985 - type: nauc_mrr_at_20_std value: 23.200255998287243 - type: nauc_mrr_at_3_diff1 value: 21.12636914240847 - type: nauc_mrr_at_3_max value: 28.8554344421751 - type: nauc_mrr_at_3_std value: 22.971981931510907 - type: nauc_mrr_at_5_diff1 value: 21.25759448565056 - type: nauc_mrr_at_5_max value: 29.949582847543653 - type: nauc_mrr_at_5_std value: 22.60218450418408 - type: nauc_ndcg_at_1000_diff1 value: 18.808237293933672 - type: nauc_ndcg_at_1000_max value: 21.383496457619863 - type: nauc_ndcg_at_1000_std value: 41.576194502603904 - type: nauc_ndcg_at_100_diff1 value: 17.221887092074635 - type: nauc_ndcg_at_100_max value: 17.701739166467814 - type: nauc_ndcg_at_100_std value: 32.68960425363178 - type: nauc_ndcg_at_10_diff1 value: 18.532709672848732 - type: nauc_ndcg_at_10_max value: 17.09971249017414 - type: nauc_ndcg_at_10_std value: 24.640964891301568 - type: nauc_ndcg_at_1_diff1 value: 20.909544791732714 - type: nauc_ndcg_at_1_max value: 19.966081278133522 - type: nauc_ndcg_at_1_std value: 16.467816838901918 - type: nauc_ndcg_at_20_diff1 value: 17.17581137257012 - type: nauc_ndcg_at_20_max value: 15.286085887063514 - type: nauc_ndcg_at_20_std value: 24.382832522939328 - type: nauc_ndcg_at_3_diff1 value: 16.33752617073797 - type: nauc_ndcg_at_3_max value: 17.80070987939365 - type: nauc_ndcg_at_3_std value: 21.901487508713668 - type: nauc_ndcg_at_5_diff1 value: 17.66213503429926 - type: nauc_ndcg_at_5_max value: 18.315036078788523 - type: nauc_ndcg_at_5_std value: 22.196869148981882 - type: nauc_precision_at_1000_diff1 value: 3.153755654841115 - type: nauc_precision_at_1000_max value: 23.826422759712194 - type: nauc_precision_at_1000_std value: 38.32310024626058 - type: nauc_precision_at_100_diff1 value: 5.254703196587399 - type: nauc_precision_at_100_max value: 31.23694387267914 - type: nauc_precision_at_100_std value: 46.615222544239785 - type: nauc_precision_at_10_diff1 value: 9.171988505302384 - type: nauc_precision_at_10_max value: 26.89906129794692 - type: nauc_precision_at_10_std value: 36.25236215404761 - type: nauc_precision_at_1_diff1 value: 23.364434113790995 - type: nauc_precision_at_1_max value: 29.16236827328641 - type: nauc_precision_at_1_std value: 20.444573577612672 - type: nauc_precision_at_20_diff1 value: 6.816222235055836 - type: nauc_precision_at_20_max value: 28.05552431582458 - type: nauc_precision_at_20_std value: 39.041946684417596 - type: nauc_precision_at_3_diff1 value: 12.440898759477614 - type: nauc_precision_at_3_max value: 25.53095697663368 - type: nauc_precision_at_3_std value: 26.29306114437138 - type: nauc_precision_at_5_diff1 value: 12.961933144163579 - type: nauc_precision_at_5_max value: 28.8551662840494 - type: nauc_precision_at_5_std value: 28.98920116163561 - type: nauc_recall_at_1000_diff1 value: 10.46665439274001 - type: nauc_recall_at_1000_max value: 9.12732640867415 - type: nauc_recall_at_1000_std value: 42.420396816639986 - type: nauc_recall_at_100_diff1 value: 7.630795440733252 - type: nauc_recall_at_100_max value: 9.497703777492731 - type: nauc_recall_at_100_std value: 30.3239668986987 - type: nauc_recall_at_10_diff1 value: 15.472483341738865 - type: nauc_recall_at_10_max value: 3.6641891638054798 - type: nauc_recall_at_10_std value: 4.57953087809313 - type: nauc_recall_at_1_diff1 value: 16.446542483531125 - type: nauc_recall_at_1_max value: 4.979934347581338 - type: nauc_recall_at_1_std value: 0.8028896220717383 - type: nauc_recall_at_20_diff1 value: 9.043285621876421 - type: nauc_recall_at_20_max value: 2.799814278881547 - type: nauc_recall_at_20_std value: 9.488589268742839 - type: nauc_recall_at_3_diff1 value: 11.070041224495936 - type: nauc_recall_at_3_max value: 3.058997523275269 - type: nauc_recall_at_3_std value: -0.31088660397764756 - type: nauc_recall_at_5_diff1 value: 15.280147039490439 - type: nauc_recall_at_5_max value: 3.8735984736389604 - type: nauc_recall_at_5_std value: -0.03652249815937461 - type: ndcg_at_1 value: 26.0 - type: ndcg_at_10 value: 18.714 - type: ndcg_at_100 value: 21.972 - type: ndcg_at_1000 value: 27.908 - type: ndcg_at_20 value: 18.666 - type: ndcg_at_3 value: 21.593 - type: ndcg_at_5 value: 19.89 - type: precision_at_1 value: 37.0 - type: precision_at_10 value: 16.175 - type: precision_at_100 value: 5.405 - type: precision_at_1000 value: 1.1119999999999999 - type: precision_at_20 value: 12.45 - type: precision_at_3 value: 26.25 - type: precision_at_5 value: 21.3 - type: recall_at_1 value: 3.6290000000000004 - type: recall_at_10 value: 11.074 - type: recall_at_100 value: 27.508 - type: recall_at_1000 value: 48.478 - type: recall_at_20 value: 15.765 - type: recall_at_3 value: 6.679 - type: recall_at_5 value: 8.272 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 37.085 - type: f1 value: 33.85927583699898 - type: f1_weighted value: 39.200474117393966 - type: main_score value: 37.085 - task: type: Retrieval dataset: name: MTEB FEVER (default) type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 22.016 - type: map_at_1 value: 12.193 - type: map_at_10 value: 18.082 - type: map_at_100 value: 19.041 - type: map_at_1000 value: 19.127 - type: map_at_20 value: 18.614 - type: map_at_3 value: 15.791 - type: map_at_5 value: 17.074 - type: mrr_at_1 value: 12.946294629462946 - type: mrr_at_10 value: 19.172619642916665 - type: mrr_at_100 value: 20.154909631396883 - type: mrr_at_1000 value: 20.23555740317628 - type: mrr_at_20 value: 19.71354143370259 - type: mrr_at_3 value: 16.76167616761678 - type: mrr_at_5 value: 18.12756275627569 - type: nauc_map_at_1000_diff1 value: 20.290997144547806 - type: nauc_map_at_1000_max value: 11.450991275708125 - type: nauc_map_at_1000_std value: -10.04517962568564 - type: nauc_map_at_100_diff1 value: 20.286419962395446 - type: nauc_map_at_100_max value: 11.425096874032468 - type: nauc_map_at_100_std value: -10.065217561013961 - type: nauc_map_at_10_diff1 value: 20.352678660604802 - type: nauc_map_at_10_max value: 11.01767996890229 - type: nauc_map_at_10_std value: -10.707087936088575 - type: nauc_map_at_1_diff1 value: 25.032419107186094 - type: nauc_map_at_1_max value: 12.369813614872736 - type: nauc_map_at_1_std value: -14.118939916139569 - type: nauc_map_at_20_diff1 value: 20.389612922681682 - type: nauc_map_at_20_max value: 11.353929159428661 - type: nauc_map_at_20_std value: -10.297859728424513 - type: nauc_map_at_3_diff1 value: 21.10704599787224 - type: nauc_map_at_3_max value: 10.930500499862571 - type: nauc_map_at_3_std value: -12.150965535677678 - type: nauc_map_at_5_diff1 value: 20.842777278284128 - type: nauc_map_at_5_max value: 10.827383306142737 - type: nauc_map_at_5_std value: -11.221709408333618 - type: nauc_mrr_at_1000_diff1 value: 20.318256054389476 - type: nauc_mrr_at_1000_max value: 11.796117937558172 - type: nauc_mrr_at_1000_std value: -10.287039413450211 - type: nauc_mrr_at_100_diff1 value: 20.30841620615174 - type: nauc_mrr_at_100_max value: 11.779189553888532 - type: nauc_mrr_at_100_std value: -10.294866807046127 - type: nauc_mrr_at_10_diff1 value: 20.374243995449877 - type: nauc_mrr_at_10_max value: 11.378404399185833 - type: nauc_mrr_at_10_std value: -10.875685274480453 - type: nauc_mrr_at_1_diff1 value: 25.100637371748824 - type: nauc_mrr_at_1_max value: 12.75349173425225 - type: nauc_mrr_at_1_std value: -14.395108761279237 - type: nauc_mrr_at_20_diff1 value: 20.39503308580974 - type: nauc_mrr_at_20_max value: 11.68589575755117 - type: nauc_mrr_at_20_std value: -10.492915215640092 - type: nauc_mrr_at_3_diff1 value: 21.15981004354575 - type: nauc_mrr_at_3_max value: 11.28231678901033 - type: nauc_mrr_at_3_std value: -12.354174511822121 - type: nauc_mrr_at_5_diff1 value: 20.799863945954275 - type: nauc_mrr_at_5_max value: 11.185632335820825 - type: nauc_mrr_at_5_std value: -11.469723683281297 - type: nauc_ndcg_at_1000_diff1 value: 18.464587317922547 - type: nauc_ndcg_at_1000_max value: 13.008062904816914 - type: nauc_ndcg_at_1000_std value: -5.664914582345968 - type: nauc_ndcg_at_100_diff1 value: 18.16644191513211 - type: nauc_ndcg_at_100_max value: 12.562444143891966 - type: nauc_ndcg_at_100_std value: -6.1441260439999 - type: nauc_ndcg_at_10_diff1 value: 18.686352401538496 - type: nauc_ndcg_at_10_max value: 10.869744096886084 - type: nauc_ndcg_at_10_std value: -8.944207877220036 - type: nauc_ndcg_at_1_diff1 value: 25.100637371748824 - type: nauc_ndcg_at_1_max value: 12.75349173425225 - type: nauc_ndcg_at_1_std value: -14.395108761279237 - type: nauc_ndcg_at_20_diff1 value: 18.771980400862198 - type: nauc_ndcg_at_20_max value: 11.905846688294329 - type: nauc_ndcg_at_20_std value: -7.692989490709515 - type: nauc_ndcg_at_3_diff1 value: 20.08654674967674 - type: nauc_ndcg_at_3_max value: 10.663033509421721 - type: nauc_ndcg_at_3_std value: -11.574039012307594 - type: nauc_ndcg_at_5_diff1 value: 19.6605128392337 - type: nauc_ndcg_at_5_max value: 10.508598217516415 - type: nauc_ndcg_at_5_std value: -10.065510128768713 - type: nauc_precision_at_1000_diff1 value: 7.843686893129402 - type: nauc_precision_at_1000_max value: 21.12867481889994 - type: nauc_precision_at_1000_std value: 17.397771341896146 - type: nauc_precision_at_100_diff1 value: 10.964367718664041 - type: nauc_precision_at_100_max value: 18.134742533867346 - type: nauc_precision_at_100_std value: 7.826000941250076 - type: nauc_precision_at_10_diff1 value: 15.105380802537063 - type: nauc_precision_at_10_max value: 11.285261334237703 - type: nauc_precision_at_10_std value: -4.37944714089422 - type: nauc_precision_at_1_diff1 value: 25.100637371748824 - type: nauc_precision_at_1_max value: 12.75349173425225 - type: nauc_precision_at_1_std value: -14.395108761279237 - type: nauc_precision_at_20_diff1 value: 15.077505620030765 - type: nauc_precision_at_20_max value: 14.539549230107863 - type: nauc_precision_at_20_std value: -0.3542706803956202 - type: nauc_precision_at_3_diff1 value: 17.885365023585084 - type: nauc_precision_at_3_max value: 10.292960240507334 - type: nauc_precision_at_3_std value: -10.022232347175288 - type: nauc_precision_at_5_diff1 value: 17.139957329877934 - type: nauc_precision_at_5_max value: 10.26986709887834 - type: nauc_precision_at_5_std value: -7.222300752002702 - type: nauc_recall_at_1000_diff1 value: 10.939852794630156 - type: nauc_recall_at_1000_max value: 20.445200176227928 - type: nauc_recall_at_1000_std value: 17.423637451714775 - type: nauc_recall_at_100_diff1 value: 11.453503005311378 - type: nauc_recall_at_100_max value: 15.652758603853172 - type: nauc_recall_at_100_std value: 6.527801869334319 - type: nauc_recall_at_10_diff1 value: 14.432828795666774 - type: nauc_recall_at_10_max value: 9.917611920139953 - type: nauc_recall_at_10_std value: -4.640402932242214 - type: nauc_recall_at_1_diff1 value: 25.032419107186094 - type: nauc_recall_at_1_max value: 12.369813614872736 - type: nauc_recall_at_1_std value: -14.118939916139569 - type: nauc_recall_at_20_diff1 value: 14.649940175342705 - type: nauc_recall_at_20_max value: 12.839139966470082 - type: nauc_recall_at_20_std value: -1.1007068094900396 - type: nauc_recall_at_3_diff1 value: 17.369984220537575 - type: nauc_recall_at_3_max value: 9.706157288728694 - type: nauc_recall_at_3_std value: -9.933996418659476 - type: nauc_recall_at_5_diff1 value: 16.73461655268465 - type: nauc_recall_at_5_max value: 9.307482112802237 - type: nauc_recall_at_5_std value: -7.03240216549824 - type: ndcg_at_1 value: 12.946 - type: ndcg_at_10 value: 22.016 - type: ndcg_at_100 value: 27.1 - type: ndcg_at_1000 value: 29.608 - type: ndcg_at_20 value: 23.949 - type: ndcg_at_3 value: 17.254 - type: ndcg_at_5 value: 19.572 - type: precision_at_1 value: 12.946 - type: precision_at_10 value: 3.614 - type: precision_at_100 value: 0.632 - type: precision_at_1000 value: 0.086 - type: precision_at_20 value: 2.2190000000000003 - type: precision_at_3 value: 7.335999999999999 - type: precision_at_5 value: 5.62 - type: recall_at_1 value: 12.193 - type: recall_at_10 value: 33.477000000000004 - type: recall_at_100 value: 57.653 - type: recall_at_1000 value: 77.331 - type: recall_at_20 value: 40.967 - type: recall_at_3 value: 20.524 - type: recall_at_5 value: 26.049 - task: type: Retrieval dataset: name: MTEB FiQA2018 (default) type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 10.489999999999998 - type: map_at_1 value: 4.324999999999999 - type: map_at_10 value: 7.2620000000000005 - type: map_at_100 value: 8.049000000000001 - type: map_at_1000 value: 8.219999999999999 - type: map_at_20 value: 7.61 - type: map_at_3 value: 5.973 - type: map_at_5 value: 6.691 - type: mrr_at_1 value: 8.950617283950617 - type: mrr_at_10 value: 13.708602782676858 - type: mrr_at_100 value: 14.590661251603459 - type: mrr_at_1000 value: 14.700261572617254 - type: mrr_at_20 value: 14.11123716025319 - type: mrr_at_3 value: 12.062757201646086 - type: mrr_at_5 value: 13.127572016460906 - type: nauc_map_at_1000_diff1 value: 29.868612329177928 - type: nauc_map_at_1000_max value: 1.8204575427341532 - type: nauc_map_at_1000_std value: -4.185357333535049 - type: nauc_map_at_100_diff1 value: 29.946178213759282 - type: nauc_map_at_100_max value: 1.610360929666458 - type: nauc_map_at_100_std value: -4.324079540013444 - type: nauc_map_at_10_diff1 value: 30.399813155198824 - type: nauc_map_at_10_max value: 1.8115464824069072 - type: nauc_map_at_10_std value: -4.737607209968629 - type: nauc_map_at_1_diff1 value: 37.53493767190502 - type: nauc_map_at_1_max value: 6.343933558239079 - type: nauc_map_at_1_std value: -8.230966082922905 - type: nauc_map_at_20_diff1 value: 30.308094557427058 - type: nauc_map_at_20_max value: 1.7031539908608901 - type: nauc_map_at_20_std value: -4.596734035205173 - type: nauc_map_at_3_diff1 value: 32.8951312020134 - type: nauc_map_at_3_max value: 1.5535854126023998 - type: nauc_map_at_3_std value: -4.539910426062374 - type: nauc_map_at_5_diff1 value: 30.438220232065543 - type: nauc_map_at_5_max value: 2.0380362092746083 - type: nauc_map_at_5_std value: -4.716253038875689 - type: nauc_mrr_at_1000_diff1 value: 26.097087362103995 - type: nauc_mrr_at_1000_max value: 6.377351302196768 - type: nauc_mrr_at_1000_std value: -8.980609641309028 - type: nauc_mrr_at_100_diff1 value: 26.0420700495144 - type: nauc_mrr_at_100_max value: 6.3133809175339755 - type: nauc_mrr_at_100_std value: -9.000162649179808 - type: nauc_mrr_at_10_diff1 value: 26.535507660887507 - type: nauc_mrr_at_10_max value: 6.381465133195606 - type: nauc_mrr_at_10_std value: -9.191571489530038 - type: nauc_mrr_at_1_diff1 value: 33.21219729698373 - type: nauc_mrr_at_1_max value: 8.117452072894173 - type: nauc_mrr_at_1_std value: -12.844056505931412 - type: nauc_mrr_at_20_diff1 value: 26.119432629408944 - type: nauc_mrr_at_20_max value: 6.142130397600541 - type: nauc_mrr_at_20_std value: -8.969120848763918 - type: nauc_mrr_at_3_diff1 value: 29.213633065227913 - type: nauc_mrr_at_3_max value: 6.158454748584739 - type: nauc_mrr_at_3_std value: -9.312167992788329 - type: nauc_mrr_at_5_diff1 value: 26.853690010476384 - type: nauc_mrr_at_5_max value: 6.607630323087147 - type: nauc_mrr_at_5_std value: -9.16727089175747 - type: nauc_ndcg_at_1000_diff1 value: 24.608991804968696 - type: nauc_ndcg_at_1000_max value: 5.359080584203262 - type: nauc_ndcg_at_1000_std value: -1.4847472953357936 - type: nauc_ndcg_at_100_diff1 value: 24.648632317746273 - type: nauc_ndcg_at_100_max value: 2.1712898966851113 - type: nauc_ndcg_at_100_std value: -3.5369260708070107 - type: nauc_ndcg_at_10_diff1 value: 27.014604913486856 - type: nauc_ndcg_at_10_max value: 2.4695161721048713 - type: nauc_ndcg_at_10_std value: -5.3598766328112735 - type: nauc_ndcg_at_1_diff1 value: 33.21219729698373 - type: nauc_ndcg_at_1_max value: 8.117452072894173 - type: nauc_ndcg_at_1_std value: -12.844056505931412 - type: nauc_ndcg_at_20_diff1 value: 26.348030975637954 - type: nauc_ndcg_at_20_max value: 1.76798660214836 - type: nauc_ndcg_at_20_std value: -4.752973355036493 - type: nauc_ndcg_at_3_diff1 value: 30.08569857797367 - type: nauc_ndcg_at_3_max value: 3.8922869178252917 - type: nauc_ndcg_at_3_std value: -5.983540710713673 - type: nauc_ndcg_at_5_diff1 value: 27.00404833916418 - type: nauc_ndcg_at_5_max value: 3.5093481086647174 - type: nauc_ndcg_at_5_std value: -5.594177739447796 - type: nauc_precision_at_1000_diff1 value: 6.90213731255884 - type: nauc_precision_at_1000_max value: 22.546962761447155 - type: nauc_precision_at_1000_std value: -4.411259743880491 - type: nauc_precision_at_100_diff1 value: 14.110688584366798 - type: nauc_precision_at_100_max value: 10.545246972283675 - type: nauc_precision_at_100_std value: -5.013842584740609 - type: nauc_precision_at_10_diff1 value: 20.259939679291286 - type: nauc_precision_at_10_max value: 6.864599576255598 - type: nauc_precision_at_10_std value: -6.629146983652406 - type: nauc_precision_at_1_diff1 value: 33.21219729698373 - type: nauc_precision_at_1_max value: 8.117452072894173 - type: nauc_precision_at_1_std value: -12.844056505931412 - type: nauc_precision_at_20_diff1 value: 19.290649186490967 - type: nauc_precision_at_20_max value: 5.972515078212738 - type: nauc_precision_at_20_std value: -5.429565238738726 - type: nauc_precision_at_3_diff1 value: 26.615348561686524 - type: nauc_precision_at_3_max value: 4.303529688113032 - type: nauc_precision_at_3_std value: -6.3859133152717575 - type: nauc_precision_at_5_diff1 value: 20.15741104687489 - type: nauc_precision_at_5_max value: 5.829980153393318 - type: nauc_precision_at_5_std value: -7.303750048891929 - type: nauc_recall_at_1000_diff1 value: 12.433553342367036 - type: nauc_recall_at_1000_max value: 4.468200721496133 - type: nauc_recall_at_1000_std value: 14.900182633571784 - type: nauc_recall_at_100_diff1 value: 14.0062702129626 - type: nauc_recall_at_100_max value: -1.7131702012948224 - type: nauc_recall_at_100_std value: 2.2633308267962704 - type: nauc_recall_at_10_diff1 value: 21.690668515787653 - type: nauc_recall_at_10_max value: -0.6937364802491892 - type: nauc_recall_at_10_std value: -3.082925088768182 - type: nauc_recall_at_1_diff1 value: 37.53493767190502 - type: nauc_recall_at_1_max value: 6.343933558239079 - type: nauc_recall_at_1_std value: -8.230966082922905 - type: nauc_recall_at_20_diff1 value: 19.77931628522879 - type: nauc_recall_at_20_max value: -1.8891310482328967 - type: nauc_recall_at_20_std value: -2.116148089873719 - type: nauc_recall_at_3_diff1 value: 29.51744746509749 - type: nauc_recall_at_3_max value: -1.5430112189485936 - type: nauc_recall_at_3_std value: -1.655207409284257 - type: nauc_recall_at_5_diff1 value: 21.71469884887553 - type: nauc_recall_at_5_max value: 0.7546577860370985 - type: nauc_recall_at_5_std value: -1.8445545818566638 - type: ndcg_at_1 value: 8.951 - type: ndcg_at_10 value: 10.489999999999998 - type: ndcg_at_100 value: 15.051 - type: ndcg_at_1000 value: 19.479 - type: ndcg_at_20 value: 11.73 - type: ndcg_at_3 value: 8.407 - type: ndcg_at_5 value: 9.382 - type: precision_at_1 value: 8.951 - type: precision_at_10 value: 3.056 - type: precision_at_100 value: 0.761 - type: precision_at_1000 value: 0.151 - type: precision_at_20 value: 1.991 - type: precision_at_3 value: 5.813 - type: precision_at_5 value: 4.7219999999999995 - type: recall_at_1 value: 4.324999999999999 - type: recall_at_10 value: 13.963999999999999 - type: recall_at_100 value: 32.568999999999996 - type: recall_at_1000 value: 60.873999999999995 - type: recall_at_20 value: 18.044 - type: recall_at_3 value: 7.863 - type: recall_at_5 value: 10.741 - task: type: Retrieval dataset: name: MTEB HotpotQA (default) type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 28.296 - type: map_at_1 value: 16.124 - type: map_at_10 value: 22.006999999999998 - type: map_at_100 value: 22.739 - type: map_at_1000 value: 22.831000000000003 - type: map_at_20 value: 22.397 - type: map_at_3 value: 20.343 - type: map_at_5 value: 21.273 - type: mrr_at_1 value: 32.248480756245776 - type: mrr_at_10 value: 38.63598169405064 - type: mrr_at_100 value: 39.30912106800413 - type: mrr_at_1000 value: 39.36706737124047 - type: mrr_at_20 value: 39.01889362753551 - type: mrr_at_3 value: 36.90524420436645 - type: mrr_at_5 value: 37.876884987621 - type: nauc_map_at_1000_diff1 value: 52.56275733949851 - type: nauc_map_at_1000_max value: 15.678119273683258 - type: nauc_map_at_1000_std value: 21.94442763793275 - type: nauc_map_at_100_diff1 value: 52.57779873054535 - type: nauc_map_at_100_max value: 15.675547534713088 - type: nauc_map_at_100_std value: 21.86210645684129 - type: nauc_map_at_10_diff1 value: 53.016128486745004 - type: nauc_map_at_10_max value: 15.782677582200714 - type: nauc_map_at_10_std value: 20.895601911314472 - type: nauc_map_at_1_diff1 value: 62.39324742344811 - type: nauc_map_at_1_max value: 18.922278332305293 - type: nauc_map_at_1_std value: 15.431990044458088 - type: nauc_map_at_20_diff1 value: 52.66735350527932 - type: nauc_map_at_20_max value: 15.720152193572472 - type: nauc_map_at_20_std value: 21.43058845996297 - type: nauc_map_at_3_diff1 value: 54.6666892102859 - type: nauc_map_at_3_max value: 16.731046525278487 - type: nauc_map_at_3_std value: 19.200351760472845 - type: nauc_map_at_5_diff1 value: 53.67302712440124 - type: nauc_map_at_5_max value: 16.14212699563179 - type: nauc_map_at_5_std value: 20.109580390507958 - type: nauc_mrr_at_1000_diff1 value: 57.590587384091286 - type: nauc_mrr_at_1000_max value: 16.955585029521554 - type: nauc_mrr_at_1000_std value: 18.940765599942846 - type: nauc_mrr_at_100_diff1 value: 57.57727053172551 - type: nauc_mrr_at_100_max value: 16.95237066457576 - type: nauc_mrr_at_100_std value: 18.940796857284766 - type: nauc_mrr_at_10_diff1 value: 57.71480130493494 - type: nauc_mrr_at_10_max value: 17.047197537035274 - type: nauc_mrr_at_10_std value: 18.60310516808845 - type: nauc_mrr_at_1_diff1 value: 62.39324742344811 - type: nauc_mrr_at_1_max value: 18.922278332305293 - type: nauc_mrr_at_1_std value: 15.431990044458088 - type: nauc_mrr_at_20_diff1 value: 57.59068015425055 - type: nauc_mrr_at_20_max value: 16.98394919583758 - type: nauc_mrr_at_20_std value: 18.81315221111426 - type: nauc_mrr_at_3_diff1 value: 58.67948717756185 - type: nauc_mrr_at_3_max value: 17.68777692655858 - type: nauc_mrr_at_3_std value: 17.53265364680353 - type: nauc_mrr_at_5_diff1 value: 58.139101763281666 - type: nauc_mrr_at_5_max value: 17.270925196457462 - type: nauc_mrr_at_5_std value: 18.056055685643045 - type: nauc_ndcg_at_1000_diff1 value: 50.592269072101516 - type: nauc_ndcg_at_1000_max value: 14.524760647752915 - type: nauc_ndcg_at_1000_std value: 26.838335704567463 - type: nauc_ndcg_at_100_diff1 value: 50.77465151278066 - type: nauc_ndcg_at_100_max value: 14.54429816135242 - type: nauc_ndcg_at_100_std value: 25.550144005876646 - type: nauc_ndcg_at_10_diff1 value: 52.196099719654995 - type: nauc_ndcg_at_10_max value: 15.021941288342521 - type: nauc_ndcg_at_10_std value: 22.17407528719642 - type: nauc_ndcg_at_1_diff1 value: 62.39324742344811 - type: nauc_ndcg_at_1_max value: 18.922278332305293 - type: nauc_ndcg_at_1_std value: 15.431990044458088 - type: nauc_ndcg_at_20_diff1 value: 51.30002836393829 - type: nauc_ndcg_at_20_max value: 14.814680820356232 - type: nauc_ndcg_at_20_std value: 23.506479941769733 - type: nauc_ndcg_at_3_diff1 value: 54.90780405878355 - type: nauc_ndcg_at_3_max value: 16.648637328318923 - type: nauc_ndcg_at_3_std value: 19.30934390416425 - type: nauc_ndcg_at_5_diff1 value: 53.479799880106086 - type: nauc_ndcg_at_5_max value: 15.738363325622498 - type: nauc_ndcg_at_5_std value: 20.58963012081015 - type: nauc_precision_at_1000_diff1 value: 24.304482939944215 - type: nauc_precision_at_1000_max value: 5.650518835490494 - type: nauc_precision_at_1000_std value: 41.977320321177345 - type: nauc_precision_at_100_diff1 value: 31.210792569116773 - type: nauc_precision_at_100_max value: 7.568305897193786 - type: nauc_precision_at_100_std value: 35.39707853767338 - type: nauc_precision_at_10_diff1 value: 41.43987014969449 - type: nauc_precision_at_10_max value: 10.60950763673837 - type: nauc_precision_at_10_std value: 26.62624496899695 - type: nauc_precision_at_1_diff1 value: 62.39324742344811 - type: nauc_precision_at_1_max value: 18.922278332305293 - type: nauc_precision_at_1_std value: 15.431990044458088 - type: nauc_precision_at_20_diff1 value: 37.555981094379796 - type: nauc_precision_at_20_max value: 9.733917395724056 - type: nauc_precision_at_20_std value: 29.976963378218098 - type: nauc_precision_at_3_diff1 value: 50.27466251846394 - type: nauc_precision_at_3_max value: 15.137975562897834 - type: nauc_precision_at_3_std value: 21.385116394323468 - type: nauc_precision_at_5_diff1 value: 46.22016922464899 - type: nauc_precision_at_5_max value: 12.884011400229156 - type: nauc_precision_at_5_std value: 23.551280371239656 - type: nauc_recall_at_1000_diff1 value: 24.30448293994435 - type: nauc_recall_at_1000_max value: 5.650518835490617 - type: nauc_recall_at_1000_std value: 41.97732032117746 - type: nauc_recall_at_100_diff1 value: 31.21079256911678 - type: nauc_recall_at_100_max value: 7.56830589719377 - type: nauc_recall_at_100_std value: 35.397078537673345 - type: nauc_recall_at_10_diff1 value: 41.43987014969447 - type: nauc_recall_at_10_max value: 10.609507636738407 - type: nauc_recall_at_10_std value: 26.626244968996925 - type: nauc_recall_at_1_diff1 value: 62.39324742344811 - type: nauc_recall_at_1_max value: 18.922278332305293 - type: nauc_recall_at_1_std value: 15.431990044458088 - type: nauc_recall_at_20_diff1 value: 37.5559810943798 - type: nauc_recall_at_20_max value: 9.733917395724083 - type: nauc_recall_at_20_std value: 29.976963378218112 - type: nauc_recall_at_3_diff1 value: 50.27466251846396 - type: nauc_recall_at_3_max value: 15.13797556289784 - type: nauc_recall_at_3_std value: 21.38511639432347 - type: nauc_recall_at_5_diff1 value: 46.220169224649 - type: nauc_recall_at_5_max value: 12.88401140022913 - type: nauc_recall_at_5_std value: 23.551280371239613 - type: ndcg_at_1 value: 32.248 - type: ndcg_at_10 value: 28.296 - type: ndcg_at_100 value: 31.830000000000002 - type: ndcg_at_1000 value: 34.182 - type: ndcg_at_20 value: 29.593000000000004 - type: ndcg_at_3 value: 25.080000000000002 - type: ndcg_at_5 value: 26.641 - type: precision_at_1 value: 32.248 - type: precision_at_10 value: 6.151 - type: precision_at_100 value: 0.898 - type: precision_at_1000 value: 0.121 - type: precision_at_20 value: 3.4939999999999998 - type: precision_at_3 value: 15.665000000000001 - type: precision_at_5 value: 10.633 - type: recall_at_1 value: 16.124 - type: recall_at_10 value: 30.756 - type: recall_at_100 value: 44.895 - type: recall_at_1000 value: 60.655 - type: recall_at_20 value: 34.936 - type: recall_at_3 value: 23.498 - type: recall_at_5 value: 26.583000000000002 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 65.11479999999999 - type: ap value: 60.16054663114752 - type: ap_weighted value: 60.16054663114752 - type: f1 value: 64.58602077899722 - type: f1_weighted value: 64.58602077899724 - type: main_score value: 65.11479999999999 - task: type: Retrieval dataset: name: MTEB MSMARCO (default) type: mteb/msmarco config: default split: test revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 27.705000000000002 - type: map_at_1 value: 0.777 - type: map_at_10 value: 4.274 - type: map_at_100 value: 10.459 - type: map_at_1000 value: 12.995000000000001 - type: map_at_20 value: 6.47 - type: map_at_3 value: 1.8610000000000002 - type: map_at_5 value: 2.606 - type: mrr_at_1 value: 46.51162790697674 - type: mrr_at_10 value: 58.708010335917315 - type: mrr_at_100 value: 59.00751703077284 - type: mrr_at_1000 value: 59.02276496514652 - type: mrr_at_20 value: 58.90180878552971 - type: mrr_at_3 value: 55.81395348837209 - type: mrr_at_5 value: 57.44186046511628 - type: nauc_map_at_1000_diff1 value: 37.892094182998136 - type: nauc_map_at_1000_max value: 61.74117112323522 - type: nauc_map_at_1000_std value: 58.58032442470286 - type: nauc_map_at_100_diff1 value: 40.49245812562701 - type: nauc_map_at_100_max value: 57.01499706917439 - type: nauc_map_at_100_std value: 51.72298891596721 - type: nauc_map_at_10_diff1 value: 38.194743917917116 - type: nauc_map_at_10_max value: 28.735417026530364 - type: nauc_map_at_10_std value: 31.023879510246598 - type: nauc_map_at_1_diff1 value: 32.49931114906685 - type: nauc_map_at_1_max value: 17.671517789719864 - type: nauc_map_at_1_std value: 16.99861035727389 - type: nauc_map_at_20_diff1 value: 36.32556775140449 - type: nauc_map_at_20_max value: 34.68159609940747 - type: nauc_map_at_20_std value: 38.40576232270393 - type: nauc_map_at_3_diff1 value: 28.749285903216972 - type: nauc_map_at_3_max value: 22.471665405120152 - type: nauc_map_at_3_std value: 24.69853700687298 - type: nauc_map_at_5_diff1 value: 31.853910704413547 - type: nauc_map_at_5_max value: 24.263061493565555 - type: nauc_map_at_5_std value: 28.612970147886262 - type: nauc_mrr_at_1000_diff1 value: 38.28674723804615 - type: nauc_mrr_at_1000_max value: 65.31128352347841 - type: nauc_mrr_at_1000_std value: 60.74832369191216 - type: nauc_mrr_at_100_diff1 value: 38.31302530772531 - type: nauc_mrr_at_100_max value: 65.33138728948728 - type: nauc_mrr_at_100_std value: 60.756072020421946 - type: nauc_mrr_at_10_diff1 value: 38.407877536524715 - type: nauc_mrr_at_10_max value: 64.69187029537487 - type: nauc_mrr_at_10_std value: 60.99973125836723 - type: nauc_mrr_at_1_diff1 value: 33.86818356255958 - type: nauc_mrr_at_1_max value: 63.497988338553334 - type: nauc_mrr_at_1_std value: 57.319330794169545 - type: nauc_mrr_at_20_diff1 value: 38.548064176888836 - type: nauc_mrr_at_20_max value: 65.17230095066438 - type: nauc_mrr_at_20_std value: 60.876500917878865 - type: nauc_mrr_at_3_diff1 value: 33.6890627338303 - type: nauc_mrr_at_3_max value: 64.82321215840447 - type: nauc_mrr_at_3_std value: 61.26157086058862 - type: nauc_mrr_at_5_diff1 value: 37.49455502289622 - type: nauc_mrr_at_5_max value: 65.53530465417907 - type: nauc_mrr_at_5_std value: 61.02287299328536 - type: nauc_ndcg_at_1000_diff1 value: 49.55226865832326 - type: nauc_ndcg_at_1000_max value: 61.12649206783223 - type: nauc_ndcg_at_1000_std value: 57.53286905675567 - type: nauc_ndcg_at_100_diff1 value: 45.73981167442622 - type: nauc_ndcg_at_100_max value: 64.82900696367803 - type: nauc_ndcg_at_100_std value: 48.49824360353255 - type: nauc_ndcg_at_10_diff1 value: 44.58241602640944 - type: nauc_ndcg_at_10_max value: 62.58045432730028 - type: nauc_ndcg_at_10_std value: 44.00810752260865 - type: nauc_ndcg_at_1_diff1 value: 35.224578682142635 - type: nauc_ndcg_at_1_max value: 44.63222303780071 - type: nauc_ndcg_at_1_std value: 22.087936224074618 - type: nauc_ndcg_at_20_diff1 value: 41.64314419662495 - type: nauc_ndcg_at_20_max value: 65.3789962064312 - type: nauc_ndcg_at_20_std value: 47.213428209069924 - type: nauc_ndcg_at_3_diff1 value: 36.95443124125196 - type: nauc_ndcg_at_3_max value: 56.10236595509034 - type: nauc_ndcg_at_3_std value: 38.53747582748712 - type: nauc_ndcg_at_5_diff1 value: 39.85878950415295 - type: nauc_ndcg_at_5_max value: 61.567975785495534 - type: nauc_ndcg_at_5_std value: 42.480532442232764 - type: nauc_precision_at_1000_diff1 value: 9.463162430234085 - type: nauc_precision_at_1000_max value: 61.7012187403225 - type: nauc_precision_at_1000_std value: 53.356643761687806 - type: nauc_precision_at_100_diff1 value: 22.507457849227073 - type: nauc_precision_at_100_max value: 74.14227941923573 - type: nauc_precision_at_100_std value: 56.66415918103874 - type: nauc_precision_at_10_diff1 value: 37.11634706297281 - type: nauc_precision_at_10_max value: 64.70246260978291 - type: nauc_precision_at_10_std value: 52.076370670842195 - type: nauc_precision_at_1_diff1 value: 33.86818356255958 - type: nauc_precision_at_1_max value: 63.497988338553334 - type: nauc_precision_at_1_std value: 57.319330794169545 - type: nauc_precision_at_20_diff1 value: 30.464024743782335 - type: nauc_precision_at_20_max value: 67.25613806762661 - type: nauc_precision_at_20_std value: 52.950474527983495 - type: nauc_precision_at_3_diff1 value: 25.67014245501591 - type: nauc_precision_at_3_max value: 64.64109190221811 - type: nauc_precision_at_3_std value: 61.79128083613472 - type: nauc_precision_at_5_diff1 value: 30.728206847540683 - type: nauc_precision_at_5_max value: 63.132851485096175 - type: nauc_precision_at_5_std value: 53.934810596223336 - type: nauc_recall_at_1000_diff1 value: 44.772142334722375 - type: nauc_recall_at_1000_max value: 52.83460479783461 - type: nauc_recall_at_1000_std value: 58.70222029972984 - type: nauc_recall_at_100_diff1 value: 48.17949191462816 - type: nauc_recall_at_100_max value: 51.837404933039686 - type: nauc_recall_at_100_std value: 46.57038195442946 - type: nauc_recall_at_10_diff1 value: 44.70152550284119 - type: nauc_recall_at_10_max value: 25.41255284271965 - type: nauc_recall_at_10_std value: 26.05400058770887 - type: nauc_recall_at_1_diff1 value: 32.49931114906685 - type: nauc_recall_at_1_max value: 17.671517789719864 - type: nauc_recall_at_1_std value: 16.99861035727389 - type: nauc_recall_at_20_diff1 value: 41.61632802348345 - type: nauc_recall_at_20_max value: 29.22885033770648 - type: nauc_recall_at_20_std value: 29.70591175740895 - type: nauc_recall_at_3_diff1 value: 25.408832214219373 - type: nauc_recall_at_3_max value: 20.110088341846414 - type: nauc_recall_at_3_std value: 27.5814549517511 - type: nauc_recall_at_5_diff1 value: 33.87726583518953 - type: nauc_recall_at_5_max value: 21.44640652682217 - type: nauc_recall_at_5_std value: 28.68467500448753 - type: ndcg_at_1 value: 31.008000000000003 - type: ndcg_at_10 value: 27.705000000000002 - type: ndcg_at_100 value: 25.61 - type: ndcg_at_1000 value: 32.81 - type: ndcg_at_20 value: 26.617 - type: ndcg_at_3 value: 29.476000000000003 - type: ndcg_at_5 value: 27.461999999999996 - type: precision_at_1 value: 46.512 - type: precision_at_10 value: 36.047000000000004 - type: precision_at_100 value: 15.86 - type: precision_at_1000 value: 3.519 - type: precision_at_20 value: 31.163 - type: precision_at_3 value: 43.411 - type: precision_at_5 value: 39.07 - type: recall_at_1 value: 0.777 - type: recall_at_10 value: 5.749 - type: recall_at_100 value: 20.636 - type: recall_at_1000 value: 41.509 - type: recall_at_20 value: 9.689 - type: recall_at_3 value: 2.125 - type: recall_at_5 value: 3.1809999999999996 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 84.75604195166439 - type: f1 value: 83.95972384901661 - type: f1_weighted value: 84.89916018023138 - type: main_score value: 84.75604195166439 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 63.25809393524852 - type: f1 value: 45.891660110133806 - type: f1_weighted value: 67.20838453908303 - type: main_score value: 63.25809393524852 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 62.66980497646267 - type: f1 value: 60.96054297925082 - type: f1_weighted value: 62.97616683347667 - type: main_score value: 62.66980497646267 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 66.69804976462676 - type: f1 value: 65.66281437950263 - type: f1_weighted value: 66.80017206918848 - type: main_score value: 66.69804976462676 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 24.995363084202875 - type: v_measure value: 24.995363084202875 - type: v_measure_std value: 1.5274247452970715 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 20.260962789850833 - type: v_measure value: 20.260962789850833 - type: v_measure_std value: 1.5612389984116821 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 26.982693878333546 - type: map value: 26.982693878333546 - type: mrr value: 27.234304648772216 - type: nAUC_map_diff1 value: 15.483599146095642 - type: nAUC_map_max value: -31.954865506309687 - type: nAUC_map_std value: -19.352114548188798 - type: nAUC_mrr_diff1 value: 14.897752061307749 - type: nAUC_mrr_max value: -25.96940014108176 - type: nAUC_mrr_std value: -16.128495128181108 - task: type: Retrieval dataset: name: MTEB NFCorpus (default) type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 19.475 - type: map_at_1 value: 2.673 - type: map_at_10 value: 5.7860000000000005 - type: map_at_100 value: 7.434 - type: map_at_1000 value: 8.429 - type: map_at_20 value: 6.394 - type: map_at_3 value: 4.352 - type: map_at_5 value: 5.013999999999999 - type: mrr_at_1 value: 27.24458204334365 - type: mrr_at_10 value: 36.41702294953069 - type: mrr_at_100 value: 37.2489840100607 - type: mrr_at_1000 value: 37.3170804962274 - type: mrr_at_20 value: 36.81253770554204 - type: mrr_at_3 value: 33.797729618163046 - type: mrr_at_5 value: 35.577915376676984 - type: nauc_map_at_1000_diff1 value: 29.712895586376238 - type: nauc_map_at_1000_max value: 26.118684880596003 - type: nauc_map_at_1000_std value: 24.766880316423457 - type: nauc_map_at_100_diff1 value: 31.159834051695544 - type: nauc_map_at_100_max value: 26.800206575448644 - type: nauc_map_at_100_std value: 20.993328557808237 - type: nauc_map_at_10_diff1 value: 34.34909074479394 - type: nauc_map_at_10_max value: 25.23888585073763 - type: nauc_map_at_10_std value: 15.666191671894675 - type: nauc_map_at_1_diff1 value: 54.40851531063473 - type: nauc_map_at_1_max value: 25.79812290419997 - type: nauc_map_at_1_std value: 9.490593216131844 - type: nauc_map_at_20_diff1 value: 32.98428104841538 - type: nauc_map_at_20_max value: 26.274463522342213 - type: nauc_map_at_20_std value: 17.768552660498734 - type: nauc_map_at_3_diff1 value: 40.97296071677192 - type: nauc_map_at_3_max value: 24.256933079739213 - type: nauc_map_at_3_std value: 12.605367264265299 - type: nauc_map_at_5_diff1 value: 39.35136745378991 - type: nauc_map_at_5_max value: 25.24732157901422 - type: nauc_map_at_5_std value: 14.346530622570702 - type: nauc_mrr_at_1000_diff1 value: 25.381479004777763 - type: nauc_mrr_at_1000_max value: 23.575087021020536 - type: nauc_mrr_at_1000_std value: 23.472005406321436 - type: nauc_mrr_at_100_diff1 value: 25.3574395673177 - type: nauc_mrr_at_100_max value: 23.583049296879377 - type: nauc_mrr_at_100_std value: 23.456570812574856 - type: nauc_mrr_at_10_diff1 value: 25.689849758337413 - type: nauc_mrr_at_10_max value: 23.617681843801964 - type: nauc_mrr_at_10_std value: 24.075405363094195 - type: nauc_mrr_at_1_diff1 value: 26.641133846014746 - type: nauc_mrr_at_1_max value: 19.62245594877117 - type: nauc_mrr_at_1_std value: 15.81592525325739 - type: nauc_mrr_at_20_diff1 value: 25.156433096912977 - type: nauc_mrr_at_20_max value: 23.580922123726676 - type: nauc_mrr_at_20_std value: 23.553425708985458 - type: nauc_mrr_at_3_diff1 value: 25.92080426032495 - type: nauc_mrr_at_3_max value: 22.38972437925532 - type: nauc_mrr_at_3_std value: 23.868512198894585 - type: nauc_mrr_at_5_diff1 value: 26.231411975409568 - type: nauc_mrr_at_5_max value: 22.763533805080037 - type: nauc_mrr_at_5_std value: 23.774766628068885 - type: nauc_ndcg_at_1000_diff1 value: 23.768885727339356 - type: nauc_ndcg_at_1000_max value: 29.247599007631937 - type: nauc_ndcg_at_1000_std value: 28.022344377335152 - type: nauc_ndcg_at_100_diff1 value: 23.85335949897677 - type: nauc_ndcg_at_100_max value: 25.697407111528147 - type: nauc_ndcg_at_100_std value: 27.07625187183171 - type: nauc_ndcg_at_10_diff1 value: 20.50707532119363 - type: nauc_ndcg_at_10_max value: 20.857784625493622 - type: nauc_ndcg_at_10_std value: 31.239220591583607 - type: nauc_ndcg_at_1_diff1 value: 26.802222119437737 - type: nauc_ndcg_at_1_max value: 17.38626435465188 - type: nauc_ndcg_at_1_std value: 18.543036819776866 - type: nauc_ndcg_at_20_diff1 value: 22.68036110236631 - type: nauc_ndcg_at_20_max value: 22.127685695906415 - type: nauc_ndcg_at_20_std value: 31.38065283673992 - type: nauc_ndcg_at_3_diff1 value: 21.126779548662377 - type: nauc_ndcg_at_3_max value: 21.257256258583762 - type: nauc_ndcg_at_3_std value: 30.38520412268269 - type: nauc_ndcg_at_5_diff1 value: 20.728997790365923 - type: nauc_ndcg_at_5_max value: 21.136871113511706 - type: nauc_ndcg_at_5_std value: 30.103036943833878 - type: nauc_precision_at_1000_diff1 value: -0.9684850979991009 - type: nauc_precision_at_1000_max value: -1.910073925377927 - type: nauc_precision_at_1000_std value: 42.445075721709244 - type: nauc_precision_at_100_diff1 value: 2.553047959683974 - type: nauc_precision_at_100_max value: 6.706578335145517 - type: nauc_precision_at_100_std value: 42.677614016114795 - type: nauc_precision_at_10_diff1 value: 6.908721977279816 - type: nauc_precision_at_10_max value: 18.524181494610247 - type: nauc_precision_at_10_std value: 38.513766049365444 - type: nauc_precision_at_1_diff1 value: 26.641133846014746 - type: nauc_precision_at_1_max value: 19.62245594877117 - type: nauc_precision_at_1_std value: 15.81592525325739 - type: nauc_precision_at_20_diff1 value: 6.5698441504079135 - type: nauc_precision_at_20_max value: 16.36401526243144 - type: nauc_precision_at_20_std value: 42.15246597563734 - type: nauc_precision_at_3_diff1 value: 13.746590558925318 - type: nauc_precision_at_3_max value: 24.471712487836307 - type: nauc_precision_at_3_std value: 35.07796641303652 - type: nauc_precision_at_5_diff1 value: 10.024055178218116 - type: nauc_precision_at_5_max value: 21.70563811077537 - type: nauc_precision_at_5_std value: 33.549334119957294 - type: nauc_recall_at_1000_diff1 value: 15.516112454483574 - type: nauc_recall_at_1000_max value: 12.812602971232662 - type: nauc_recall_at_1000_std value: 4.9745377100353645 - type: nauc_recall_at_100_diff1 value: 15.727471787207076 - type: nauc_recall_at_100_max value: 14.07072041204842 - type: nauc_recall_at_100_std value: 5.280256534913133 - type: nauc_recall_at_10_diff1 value: 23.54021143821257 - type: nauc_recall_at_10_max value: 16.21143367909769 - type: nauc_recall_at_10_std value: 10.742397069751759 - type: nauc_recall_at_1_diff1 value: 54.40851531063473 - type: nauc_recall_at_1_max value: 25.79812290419997 - type: nauc_recall_at_1_std value: 9.490593216131844 - type: nauc_recall_at_20_diff1 value: 20.56588979224455 - type: nauc_recall_at_20_max value: 19.004784742942014 - type: nauc_recall_at_20_std value: 9.966568259612574 - type: nauc_recall_at_3_diff1 value: 33.468878145564304 - type: nauc_recall_at_3_max value: 18.73787633759768 - type: nauc_recall_at_3_std value: 12.353055019568094 - type: nauc_recall_at_5_diff1 value: 32.89494204767019 - type: nauc_recall_at_5_max value: 19.01998117178556 - type: nauc_recall_at_5_std value: 13.737801318037624 - type: ndcg_at_1 value: 25.541999999999998 - type: ndcg_at_10 value: 19.475 - type: ndcg_at_100 value: 18.815 - type: ndcg_at_1000 value: 27.71 - type: ndcg_at_20 value: 18.212999999999997 - type: ndcg_at_3 value: 22.651 - type: ndcg_at_5 value: 21.516 - type: precision_at_1 value: 27.245 - type: precision_at_10 value: 14.365 - type: precision_at_100 value: 5.384 - type: precision_at_1000 value: 1.772 - type: precision_at_20 value: 11.006 - type: precision_at_3 value: 21.569 - type: precision_at_5 value: 18.947 - type: recall_at_1 value: 2.673 - type: recall_at_10 value: 9.212 - type: recall_at_100 value: 21.549 - type: recall_at_1000 value: 52.617999999999995 - type: recall_at_20 value: 11.705 - type: recall_at_3 value: 5.313 - type: recall_at_5 value: 6.869 - task: type: Retrieval dataset: name: MTEB NQ (default) type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 16.991 - type: map_at_1 value: 7.414 - type: map_at_10 value: 13.291 - type: map_at_100 value: 14.295 - type: map_at_1000 value: 14.389 - type: map_at_20 value: 13.876 - type: map_at_3 value: 11.262 - type: map_at_5 value: 12.339 - type: mrr_at_1 value: 8.516801853997682 - type: mrr_at_10 value: 14.731154242307184 - type: mrr_at_100 value: 15.694198665655856 - type: mrr_at_1000 value: 15.77486181874144 - type: mrr_at_20 value: 15.298086694879798 - type: mrr_at_3 value: 12.659327925840078 - type: mrr_at_5 value: 13.768829663962883 - type: nauc_map_at_1000_diff1 value: 20.28889762069646 - type: nauc_map_at_1000_max value: 11.368502727824952 - type: nauc_map_at_1000_std value: 10.077176659068975 - type: nauc_map_at_100_diff1 value: 20.285666016924328 - type: nauc_map_at_100_max value: 11.352497499093694 - type: nauc_map_at_100_std value: 9.98136423017311 - type: nauc_map_at_10_diff1 value: 20.335416558539237 - type: nauc_map_at_10_max value: 11.091563979136637 - type: nauc_map_at_10_std value: 8.745901277549152 - type: nauc_map_at_1_diff1 value: 24.979719230754476 - type: nauc_map_at_1_max value: 10.972032990843237 - type: nauc_map_at_1_std value: 4.7964267266650955 - type: nauc_map_at_20_diff1 value: 20.302803697684848 - type: nauc_map_at_20_max value: 11.159589961608782 - type: nauc_map_at_20_std value: 9.360825884036176 - type: nauc_map_at_3_diff1 value: 19.863972188782967 - type: nauc_map_at_3_max value: 10.898818486894147 - type: nauc_map_at_3_std value: 6.97496787073755 - type: nauc_map_at_5_diff1 value: 20.44569321324553 - type: nauc_map_at_5_max value: 10.722482919334105 - type: nauc_map_at_5_std value: 7.787226185137379 - type: nauc_mrr_at_1000_diff1 value: 19.746039395864496 - type: nauc_mrr_at_1000_max value: 10.495187770800463 - type: nauc_mrr_at_1000_std value: 10.284862758352 - type: nauc_mrr_at_100_diff1 value: 19.743060052871396 - type: nauc_mrr_at_100_max value: 10.484702853211761 - type: nauc_mrr_at_100_std value: 10.220220019367744 - type: nauc_mrr_at_10_diff1 value: 19.747518214214974 - type: nauc_mrr_at_10_max value: 10.1823356525796 - type: nauc_mrr_at_10_std value: 9.25568601945109 - type: nauc_mrr_at_1_diff1 value: 24.040270890346534 - type: nauc_mrr_at_1_max value: 9.900172534036168 - type: nauc_mrr_at_1_std value: 5.7354869310700245 - type: nauc_mrr_at_20_diff1 value: 19.75060956163397 - type: nauc_mrr_at_20_max value: 10.31776046090269 - type: nauc_mrr_at_20_std value: 9.770741755791374 - type: nauc_mrr_at_3_diff1 value: 19.4775451565507 - type: nauc_mrr_at_3_max value: 9.804429146930495 - type: nauc_mrr_at_3_std value: 7.931570036855481 - type: nauc_mrr_at_5_diff1 value: 19.806308832458882 - type: nauc_mrr_at_5_max value: 9.77292617618666 - type: nauc_mrr_at_5_std value: 8.55195259630072 - type: nauc_ndcg_at_1000_diff1 value: 19.375648509077983 - type: nauc_ndcg_at_1000_max value: 12.688796294165622 - type: nauc_ndcg_at_1000_std value: 17.80793230435146 - type: nauc_ndcg_at_100_diff1 value: 19.343394443678996 - type: nauc_ndcg_at_100_max value: 12.520511876585841 - type: nauc_ndcg_at_100_std value: 15.978861606925918 - type: nauc_ndcg_at_10_diff1 value: 19.42682468753324 - type: nauc_ndcg_at_10_max value: 11.10087572901484 - type: nauc_ndcg_at_10_std value: 10.54992883803028 - type: nauc_ndcg_at_1_diff1 value: 24.318414546738026 - type: nauc_ndcg_at_1_max value: 9.82349827107002 - type: nauc_ndcg_at_1_std value: 5.951156922071484 - type: nauc_ndcg_at_20_diff1 value: 19.41464830610135 - type: nauc_ndcg_at_20_max value: 11.344469897954262 - type: nauc_ndcg_at_20_std value: 12.221787446241533 - type: nauc_ndcg_at_3_diff1 value: 18.641316759283264 - type: nauc_ndcg_at_3_max value: 10.543844267142214 - type: nauc_ndcg_at_3_std value: 7.687890803254003 - type: nauc_ndcg_at_5_diff1 value: 19.45986949428097 - type: nauc_ndcg_at_5_max value: 10.375727437812799 - type: nauc_ndcg_at_5_std value: 8.85624541644588 - type: nauc_precision_at_1000_diff1 value: 11.066860853955465 - type: nauc_precision_at_1000_max value: 12.190880720909412 - type: nauc_precision_at_1000_std value: 35.834721766648705 - type: nauc_precision_at_100_diff1 value: 15.633579933121927 - type: nauc_precision_at_100_max value: 13.900393333698496 - type: nauc_precision_at_100_std value: 30.435998605665272 - type: nauc_precision_at_10_diff1 value: 18.321561255328813 - type: nauc_precision_at_10_max value: 10.71704151142003 - type: nauc_precision_at_10_std value: 14.681070391575767 - type: nauc_precision_at_1_diff1 value: 24.318414546738026 - type: nauc_precision_at_1_max value: 9.82349827107002 - type: nauc_precision_at_1_std value: 5.951156922071484 - type: nauc_precision_at_20_diff1 value: 17.897250659867172 - type: nauc_precision_at_20_max value: 11.178073596260878 - type: nauc_precision_at_20_std value: 18.922339798822485 - type: nauc_precision_at_3_diff1 value: 16.247029796437438 - type: nauc_precision_at_3_max value: 9.403033789602311 - type: nauc_precision_at_3_std value: 9.396827994803164 - type: nauc_precision_at_5_diff1 value: 18.40723036139704 - type: nauc_precision_at_5_max value: 8.984724544333158 - type: nauc_precision_at_5_std value: 11.190725807701849 - type: nauc_recall_at_1000_diff1 value: 17.125181724831485 - type: nauc_recall_at_1000_max value: 17.738235803420288 - type: nauc_recall_at_1000_std value: 47.4670421060216 - type: nauc_recall_at_100_diff1 value: 17.27215401019124 - type: nauc_recall_at_100_max value: 16.00490577182562 - type: nauc_recall_at_100_std value: 30.65356324274426 - type: nauc_recall_at_10_diff1 value: 17.554785599875217 - type: nauc_recall_at_10_max value: 11.381345798386317 - type: nauc_recall_at_10_std value: 13.34173170828859 - type: nauc_recall_at_1_diff1 value: 24.979719230754476 - type: nauc_recall_at_1_max value: 10.972032990843237 - type: nauc_recall_at_1_std value: 4.7964267266650955 - type: nauc_recall_at_20_diff1 value: 17.507273879317893 - type: nauc_recall_at_20_max value: 11.772238504003177 - type: nauc_recall_at_20_std value: 17.00496015114505 - type: nauc_recall_at_3_diff1 value: 15.718069166841971 - type: nauc_recall_at_3_max value: 10.507841411541175 - type: nauc_recall_at_3_std value: 8.362642856838368 - type: nauc_recall_at_5_diff1 value: 17.39920934041924 - type: nauc_recall_at_5_max value: 10.10162321958792 - type: nauc_recall_at_5_std value: 10.260318695226664 - type: ndcg_at_1 value: 8.488 - type: ndcg_at_10 value: 16.991 - type: ndcg_at_100 value: 22.103 - type: ndcg_at_1000 value: 24.708 - type: ndcg_at_20 value: 19.086 - type: ndcg_at_3 value: 12.803999999999998 - type: ndcg_at_5 value: 14.727 - type: precision_at_1 value: 8.488 - type: precision_at_10 value: 3.1780000000000004 - type: precision_at_100 value: 0.607 - type: precision_at_1000 value: 0.086 - type: precision_at_20 value: 2.0650000000000004 - type: precision_at_3 value: 6.151 - type: precision_at_5 value: 4.7620000000000005 - type: recall_at_1 value: 7.414 - type: recall_at_10 value: 27.105 - type: recall_at_100 value: 50.782000000000004 - type: recall_at_1000 value: 70.77799999999999 - type: recall_at_20 value: 35.105 - type: recall_at_3 value: 15.901000000000002 - type: recall_at_5 value: 20.399 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval (default) type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 74.388 - type: map_at_1 value: 57.594 - type: map_at_10 value: 69.411 - type: map_at_100 value: 70.197 - type: map_at_1000 value: 70.23899999999999 - type: map_at_20 value: 69.896 - type: map_at_3 value: 66.50500000000001 - type: map_at_5 value: 68.199 - type: mrr_at_1 value: 66.34 - type: mrr_at_10 value: 74.12798015872983 - type: mrr_at_100 value: 74.45813156051709 - type: mrr_at_1000 value: 74.47054611594581 - type: mrr_at_20 value: 74.34983075339647 - type: mrr_at_3 value: 72.47666666666632 - type: mrr_at_5 value: 73.4861666666661 - type: nauc_map_at_1000_diff1 value: 69.23574495855162 - type: nauc_map_at_1000_max value: 38.326344115314825 - type: nauc_map_at_1000_std value: -9.69190621889919 - type: nauc_map_at_100_diff1 value: 69.23018899929654 - type: nauc_map_at_100_max value: 38.32200052980655 - type: nauc_map_at_100_std value: -9.709873607585722 - type: nauc_map_at_10_diff1 value: 69.11881416442584 - type: nauc_map_at_10_max value: 37.80595474994142 - type: nauc_map_at_10_std value: -10.460350770888079 - type: nauc_map_at_1_diff1 value: 71.29617122119095 - type: nauc_map_at_1_max value: 32.80205937689043 - type: nauc_map_at_1_std value: -13.444125573046852 - type: nauc_map_at_20_diff1 value: 69.19096974069583 - type: nauc_map_at_20_max value: 38.15987972416603 - type: nauc_map_at_20_std value: -10.020269369800706 - type: nauc_map_at_3_diff1 value: 69.12951153560108 - type: nauc_map_at_3_max value: 36.52459750894883 - type: nauc_map_at_3_std value: -12.174854661737818 - type: nauc_map_at_5_diff1 value: 69.0264228661453 - type: nauc_map_at_5_max value: 37.166727350784164 - type: nauc_map_at_5_std value: -11.493776844406158 - type: nauc_mrr_at_1000_diff1 value: 70.68150057700754 - type: nauc_mrr_at_1000_max value: 41.0178466695076 - type: nauc_mrr_at_1000_std value: -8.021358816489824 - type: nauc_mrr_at_100_diff1 value: 70.67856380420632 - type: nauc_mrr_at_100_max value: 41.02236359207632 - type: nauc_mrr_at_100_std value: -8.004727052332067 - type: nauc_mrr_at_10_diff1 value: 70.57476646749362 - type: nauc_mrr_at_10_max value: 40.98353008138954 - type: nauc_mrr_at_10_std value: -8.035083785813892 - type: nauc_mrr_at_1_diff1 value: 72.83106243448691 - type: nauc_mrr_at_1_max value: 40.497226437078496 - type: nauc_mrr_at_1_std value: -10.545921253601675 - type: nauc_mrr_at_20_diff1 value: 70.64698930715971 - type: nauc_mrr_at_20_max value: 41.01991026936206 - type: nauc_mrr_at_20_std value: -8.019248560369828 - type: nauc_mrr_at_3_diff1 value: 70.48136695574067 - type: nauc_mrr_at_3_max value: 40.83575836332353 - type: nauc_mrr_at_3_std value: -8.80652589242081 - type: nauc_mrr_at_5_diff1 value: 70.52447208499292 - type: nauc_mrr_at_5_max value: 40.95085309489185 - type: nauc_mrr_at_5_std value: -8.35502569521486 - type: nauc_ndcg_at_1000_diff1 value: 69.2418574551877 - type: nauc_ndcg_at_1000_max value: 39.85962706323504 - type: nauc_ndcg_at_1000_std value: -6.479667269089863 - type: nauc_ndcg_at_100_diff1 value: 69.13381091149564 - type: nauc_ndcg_at_100_max value: 39.902530291451974 - type: nauc_ndcg_at_100_std value: -6.19261331168395 - type: nauc_ndcg_at_10_diff1 value: 68.49804618931282 - type: nauc_ndcg_at_10_max value: 38.95870794043419 - type: nauc_ndcg_at_10_std value: -7.9554943741526465 - type: nauc_ndcg_at_1_diff1 value: 72.74562116035368 - type: nauc_ndcg_at_1_max value: 40.59003854736593 - type: nauc_ndcg_at_1_std value: -10.371154250660494 - type: nauc_ndcg_at_20_diff1 value: 68.81744480185341 - type: nauc_ndcg_at_20_max value: 39.48036257511071 - type: nauc_ndcg_at_20_std value: -7.288863470178731 - type: nauc_ndcg_at_3_diff1 value: 68.31977162714793 - type: nauc_ndcg_at_3_max value: 38.31785051573491 - type: nauc_ndcg_at_3_std value: -10.002238766651905 - type: nauc_ndcg_at_5_diff1 value: 68.34693163150705 - type: nauc_ndcg_at_5_max value: 38.384529237292085 - type: nauc_ndcg_at_5_std value: -9.504613414918412 - type: nauc_precision_at_1000_diff1 value: -27.886662167224248 - type: nauc_precision_at_1000_max value: -1.2099912726932696 - type: nauc_precision_at_1000_std value: 22.918146835627798 - type: nauc_precision_at_100_diff1 value: -22.32582293591269 - type: nauc_precision_at_100_max value: 4.238909760244244 - type: nauc_precision_at_100_std value: 23.62131900536325 - type: nauc_precision_at_10_diff1 value: -4.400459668224666 - type: nauc_precision_at_10_max value: 14.825184001294167 - type: nauc_precision_at_10_std value: 15.417646122517157 - type: nauc_precision_at_1_diff1 value: 72.74562116035368 - type: nauc_precision_at_1_max value: 40.59003854736593 - type: nauc_precision_at_1_std value: -10.371154250660494 - type: nauc_precision_at_20_diff1 value: -12.423098453024796 - type: nauc_precision_at_20_max value: 11.415547902904635 - type: nauc_precision_at_20_std value: 19.489921263698616 - type: nauc_precision_at_3_diff1 value: 22.682624176435127 - type: nauc_precision_at_3_max value: 25.682155720802452 - type: nauc_precision_at_3_std value: 2.6084400354215935 - type: nauc_precision_at_5_diff1 value: 9.272509130152006 - type: nauc_precision_at_5_max value: 20.36818990716189 - type: nauc_precision_at_5_std value: 8.054265889323238 - type: nauc_recall_at_1000_diff1 value: 60.88815464763635 - type: nauc_recall_at_1000_max value: 43.112146232617725 - type: nauc_recall_at_1000_std value: 50.36464338810094 - type: nauc_recall_at_100_diff1 value: 59.928500788144376 - type: nauc_recall_at_100_max value: 41.21981278373438 - type: nauc_recall_at_100_std value: 24.89653567034821 - type: nauc_recall_at_10_diff1 value: 60.89345811958783 - type: nauc_recall_at_10_max value: 36.2662873716048 - type: nauc_recall_at_10_std value: -1.7478273979841499 - type: nauc_recall_at_1_diff1 value: 71.29617122119095 - type: nauc_recall_at_1_max value: 32.80205937689043 - type: nauc_recall_at_1_std value: -13.444125573046852 - type: nauc_recall_at_20_diff1 value: 60.72735270299192 - type: nauc_recall_at_20_max value: 38.02822016647552 - type: nauc_recall_at_20_std value: 3.7019564772205054 - type: nauc_recall_at_3_diff1 value: 64.16899635037826 - type: nauc_recall_at_3_max value: 34.697022598257874 - type: nauc_recall_at_3_std value: -10.894218643842715 - type: nauc_recall_at_5_diff1 value: 62.56790753908123 - type: nauc_recall_at_5_max value: 35.18512660768109 - type: nauc_recall_at_5_std value: -8.518825484008714 - type: ndcg_at_1 value: 66.38 - type: ndcg_at_10 value: 74.388 - type: ndcg_at_100 value: 76.889 - type: ndcg_at_1000 value: 77.518 - type: ndcg_at_20 value: 75.548 - type: ndcg_at_3 value: 70.513 - type: ndcg_at_5 value: 72.406 - type: precision_at_1 value: 66.38 - type: precision_at_10 value: 11.274000000000001 - type: precision_at_100 value: 1.373 - type: precision_at_1000 value: 0.149 - type: precision_at_20 value: 6.095 - type: precision_at_3 value: 30.42 - type: precision_at_5 value: 20.174 - type: recall_at_1 value: 57.594 - type: recall_at_10 value: 84.09 - type: recall_at_100 value: 94.035 - type: recall_at_1000 value: 97.914 - type: recall_at_20 value: 88.13600000000001 - type: recall_at_3 value: 73.074 - type: recall_at_5 value: 78.29599999999999 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 23.878842199856606 - type: v_measure value: 23.878842199856606 - type: v_measure_std value: 4.578743173985467 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 37.76655625558288 - type: v_measure value: 37.76655625558288 - type: v_measure_std value: 9.302167236222553 - task: type: Retrieval dataset: name: MTEB SCIDOCS (default) type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 9.668000000000001 - type: map_at_1 value: 2.395 - type: map_at_10 value: 5.237 - type: map_at_100 value: 6.311999999999999 - type: map_at_1000 value: 6.529 - type: map_at_20 value: 5.742 - type: map_at_3 value: 3.827 - type: map_at_5 value: 4.54 - type: mrr_at_1 value: 11.799999999999999 - type: mrr_at_10 value: 18.01527777777777 - type: mrr_at_100 value: 19.170155944203785 - type: mrr_at_1000 value: 19.281296973485173 - type: mrr_at_20 value: 18.67572073480355 - type: mrr_at_3 value: 15.549999999999988 - type: mrr_at_5 value: 16.92999999999999 - type: nauc_map_at_1000_diff1 value: 15.362749019317306 - type: nauc_map_at_1000_max value: 13.84696529256478 - type: nauc_map_at_1000_std value: 11.013607523301609 - type: nauc_map_at_100_diff1 value: 15.41591399608084 - type: nauc_map_at_100_max value: 13.730140090589293 - type: nauc_map_at_100_std value: 10.455348719140309 - type: nauc_map_at_10_diff1 value: 15.834686627354852 - type: nauc_map_at_10_max value: 13.28911184808523 - type: nauc_map_at_10_std value: 7.254487702527721 - type: nauc_map_at_1_diff1 value: 20.822383776341656 - type: nauc_map_at_1_max value: 9.583343414892674 - type: nauc_map_at_1_std value: 2.8889126256334383 - type: nauc_map_at_20_diff1 value: 15.522358238447422 - type: nauc_map_at_20_max value: 13.479963494201828 - type: nauc_map_at_20_std value: 8.76740668066124 - type: nauc_map_at_3_diff1 value: 18.748084536735927 - type: nauc_map_at_3_max value: 10.620059279509105 - type: nauc_map_at_3_std value: 4.337679139867589 - type: nauc_map_at_5_diff1 value: 17.345202973256 - type: nauc_map_at_5_max value: 12.452658321525504 - type: nauc_map_at_5_std value: 5.549910657395744 - type: nauc_mrr_at_1000_diff1 value: 15.377808587249769 - type: nauc_mrr_at_1000_max value: 10.04139543851182 - type: nauc_mrr_at_1000_std value: 5.4677890792436274 - type: nauc_mrr_at_100_diff1 value: 15.362987006646186 - type: nauc_mrr_at_100_max value: 10.041646833263774 - type: nauc_mrr_at_100_std value: 5.45421536846783 - type: nauc_mrr_at_10_diff1 value: 15.195360862950183 - type: nauc_mrr_at_10_max value: 9.93445070582588 - type: nauc_mrr_at_10_std value: 5.052925884003134 - type: nauc_mrr_at_1_diff1 value: 20.78440492344873 - type: nauc_mrr_at_1_max value: 9.65366117965217 - type: nauc_mrr_at_1_std value: 3.4370160103187177 - type: nauc_mrr_at_20_diff1 value: 15.367072076987753 - type: nauc_mrr_at_20_max value: 9.944084606452824 - type: nauc_mrr_at_20_std value: 5.1697642130127885 - type: nauc_mrr_at_3_diff1 value: 17.1065083677322 - type: nauc_mrr_at_3_max value: 9.730529319874428 - type: nauc_mrr_at_3_std value: 4.274768582707443 - type: nauc_mrr_at_5_diff1 value: 15.781360738081599 - type: nauc_mrr_at_5_max value: 10.189809550324469 - type: nauc_mrr_at_5_std value: 4.45427477219345 - type: nauc_ndcg_at_1000_diff1 value: 12.133137994513579 - type: nauc_ndcg_at_1000_max value: 14.593507049508561 - type: nauc_ndcg_at_1000_std value: 17.11300477285902 - type: nauc_ndcg_at_100_diff1 value: 12.768847933024317 - type: nauc_ndcg_at_100_max value: 13.62157103798925 - type: nauc_ndcg_at_100_std value: 13.97874886533375 - type: nauc_ndcg_at_10_diff1 value: 13.192522371369787 - type: nauc_ndcg_at_10_max value: 12.795709547611608 - type: nauc_ndcg_at_10_std value: 8.102799683454048 - type: nauc_ndcg_at_1_diff1 value: 20.78440492344873 - type: nauc_ndcg_at_1_max value: 9.65366117965217 - type: nauc_ndcg_at_1_std value: 3.4370160103187177 - type: nauc_ndcg_at_20_diff1 value: 13.10893336294196 - type: nauc_ndcg_at_20_max value: 12.87552853654183 - type: nauc_ndcg_at_20_std value: 10.673587471258529 - type: nauc_ndcg_at_3_diff1 value: 17.44757983297746 - type: nauc_ndcg_at_3_max value: 10.4479529428812 - type: nauc_ndcg_at_3_std value: 4.926065165471736 - type: nauc_ndcg_at_5_diff1 value: 15.131431597511005 - type: nauc_ndcg_at_5_max value: 12.138370476656045 - type: nauc_ndcg_at_5_std value: 5.747804810875746 - type: nauc_precision_at_1000_diff1 value: 4.651545309113199 - type: nauc_precision_at_1000_max value: 14.534556833197726 - type: nauc_precision_at_1000_std value: 25.883957300866957 - type: nauc_precision_at_100_diff1 value: 8.103597756413784 - type: nauc_precision_at_100_max value: 13.914816649477062 - type: nauc_precision_at_100_std value: 20.148598895345536 - type: nauc_precision_at_10_diff1 value: 8.606065646275212 - type: nauc_precision_at_10_max value: 14.068776248492663 - type: nauc_precision_at_10_std value: 11.140890379112346 - type: nauc_precision_at_1_diff1 value: 20.78440492344873 - type: nauc_precision_at_1_max value: 9.65366117965217 - type: nauc_precision_at_1_std value: 3.4370160103187177 - type: nauc_precision_at_20_diff1 value: 8.704973032555928 - type: nauc_precision_at_20_max value: 13.437392449115665 - type: nauc_precision_at_20_std value: 15.65525714739556 - type: nauc_precision_at_3_diff1 value: 15.796711189581933 - type: nauc_precision_at_3_max value: 10.514163928603118 - type: nauc_precision_at_3_std value: 5.788980186693269 - type: nauc_precision_at_5_diff1 value: 11.878373012657411 - type: nauc_precision_at_5_max value: 13.465410920052506 - type: nauc_precision_at_5_std value: 7.369374260570812 - type: nauc_recall_at_1000_diff1 value: 4.54914455375335 - type: nauc_recall_at_1000_max value: 15.398087677716521 - type: nauc_recall_at_1000_std value: 25.99787873557512 - type: nauc_recall_at_100_diff1 value: 7.937303192890431 - type: nauc_recall_at_100_max value: 14.280466786048457 - type: nauc_recall_at_100_std value: 19.989053944649168 - type: nauc_recall_at_10_diff1 value: 8.569047949172177 - type: nauc_recall_at_10_max value: 13.885951056418197 - type: nauc_recall_at_10_std value: 10.963367786952073 - type: nauc_recall_at_1_diff1 value: 20.822383776341656 - type: nauc_recall_at_1_max value: 9.583343414892674 - type: nauc_recall_at_1_std value: 2.8889126256334383 - type: nauc_recall_at_20_diff1 value: 8.683232232799698 - type: nauc_recall_at_20_max value: 13.336768111236735 - type: nauc_recall_at_20_std value: 15.457170894067298 - type: nauc_recall_at_3_diff1 value: 15.745448840185977 - type: nauc_recall_at_3_max value: 10.317079087586992 - type: nauc_recall_at_3_std value: 5.450728079255462 - type: nauc_recall_at_5_diff1 value: 11.800239024102154 - type: nauc_recall_at_5_max value: 13.175274608964674 - type: nauc_recall_at_5_std value: 7.016480519402965 - type: ndcg_at_1 value: 11.799999999999999 - type: ndcg_at_10 value: 9.668000000000001 - type: ndcg_at_100 value: 15.015999999999998 - type: ndcg_at_1000 value: 20.015 - type: ndcg_at_20 value: 11.436 - type: ndcg_at_3 value: 8.924 - type: ndcg_at_5 value: 7.911 - type: precision_at_1 value: 11.799999999999999 - type: precision_at_10 value: 5.050000000000001 - type: precision_at_100 value: 1.291 - type: precision_at_1000 value: 0.251 - type: precision_at_20 value: 3.56 - type: precision_at_3 value: 8.133 - type: precision_at_5 value: 6.88 - type: recall_at_1 value: 2.395 - type: recall_at_10 value: 10.232 - type: recall_at_100 value: 26.172 - type: recall_at_1000 value: 50.917 - type: recall_at_20 value: 14.421999999999999 - type: recall_at_3 value: 4.935 - type: recall_at_5 value: 6.973 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 73.8523071648734 - type: cosine_spearman value: 65.43442849067297 - type: euclidean_pearson value: 66.70464173822097 - type: euclidean_spearman value: 60.82604439637834 - type: main_score value: 65.43442849067297 - type: manhattan_pearson value: 66.58172841322595 - type: manhattan_spearman value: 61.202424661616796 - type: pearson value: 73.8523071648734 - type: spearman value: 65.43442849067297 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 66.23949905692108 - type: cosine_spearman value: 59.97334423570035 - type: euclidean_pearson value: 53.93367474754671 - type: euclidean_spearman value: 49.65643891073131 - type: main_score value: 59.97334423570035 - type: manhattan_pearson value: 52.50090747870868 - type: manhattan_spearman value: 48.726772969833064 - type: pearson value: 66.23949905692108 - type: spearman value: 59.97334423570035 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 70.87351220452432 - type: cosine_spearman value: 71.81863685179427 - type: euclidean_pearson value: 59.249945757203946 - type: euclidean_spearman value: 60.053057494316796 - type: main_score value: 71.81863685179427 - type: manhattan_pearson value: 59.798731614026714 - type: manhattan_spearman value: 60.31075071097369 - type: pearson value: 70.87351220452432 - type: spearman value: 71.81863685179427 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 69.03600787240593 - type: cosine_spearman value: 66.99860396187162 - type: euclidean_pearson value: 58.61094669791067 - type: euclidean_spearman value: 58.286341788544995 - type: main_score value: 66.99860396187162 - type: manhattan_pearson value: 58.665872206618964 - type: manhattan_spearman value: 58.30408154246083 - type: pearson value: 69.03600787240593 - type: spearman value: 66.99860396187162 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 74.45269985909863 - type: cosine_spearman value: 75.4907813361932 - type: euclidean_pearson value: 58.68237542933832 - type: euclidean_spearman value: 61.08891047408572 - type: main_score value: 75.4907813361932 - type: manhattan_pearson value: 59.32028954908928 - type: manhattan_spearman value: 61.38980243849822 - type: pearson value: 74.45269985909863 - type: spearman value: 75.4907813361932 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 64.2309456558779 - type: cosine_spearman value: 66.97205823920407 - type: euclidean_pearson value: 52.471209393825134 - type: euclidean_spearman value: 55.05667213079255 - type: main_score value: 66.97205823920407 - type: manhattan_pearson value: 52.4566691722933 - type: manhattan_spearman value: 54.98149865449457 - type: pearson value: 64.2309456558779 - type: spearman value: 66.97205823920407 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 21.06202710190164 - type: cosine_spearman value: 18.26963771909619 - type: euclidean_pearson value: -10.937704538162821 - type: euclidean_spearman value: -13.838045200730331 - type: main_score value: 18.26963771909619 - type: manhattan_pearson value: -9.194548970239005 - type: manhattan_spearman value: -12.642533487235347 - type: pearson value: 21.06202710190164 - type: spearman value: 18.26963771909619 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 9.974655940103192 - type: cosine_spearman value: 6.625332823012507 - type: euclidean_pearson value: -6.193994464373409 - type: euclidean_spearman value: -13.09777719442545 - type: main_score value: 6.625332823012507 - type: manhattan_pearson value: -7.596649200902214 - type: manhattan_spearman value: -14.341067466786914 - type: pearson value: 9.974655940103192 - type: spearman value: 6.625332823012507 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 3.939829923076509 - type: cosine_spearman value: 1.5988688581594497 - type: euclidean_pearson value: -10.456279294578557 - type: euclidean_spearman value: -9.811244215059508 - type: main_score value: 1.5988688581594497 - type: manhattan_pearson value: -10.913654400994407 - type: manhattan_spearman value: -8.604616012491228 - type: pearson value: 3.939829923076509 - type: spearman value: 1.5988688581594497 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 17.28499679216241 - type: cosine_spearman value: 14.621483811474079 - type: euclidean_pearson value: -16.874097134885233 - type: euclidean_spearman value: -16.68311783384881 - type: main_score value: 14.621483811474079 - type: manhattan_pearson value: -17.639738926102574 - type: manhattan_spearman value: -16.66416708388087 - type: pearson value: 17.28499679216241 - type: spearman value: 14.621483811474079 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 78.99251283215277 - type: cosine_spearman value: 80.61049377743727 - type: euclidean_pearson value: 66.17827666954877 - type: euclidean_spearman value: 67.45271515314245 - type: main_score value: 80.61049377743727 - type: manhattan_pearson value: 66.23284409257823 - type: manhattan_spearman value: 67.666247437264 - type: pearson value: 78.99251283215277 - type: spearman value: 80.61049377743727 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: -1.931391285281735 - type: cosine_spearman value: -3.321078837897458 - type: euclidean_pearson value: -21.683857378409378 - type: euclidean_spearman value: -24.244038106560804 - type: main_score value: -3.321078837897458 - type: manhattan_pearson value: -22.19415161015049 - type: manhattan_spearman value: -22.71872700697092 - type: pearson value: -1.931391285281735 - type: spearman value: -3.321078837897458 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 21.215714201927316 - type: cosine_spearman value: 16.647983989080657 - type: euclidean_pearson value: -17.529579365480654 - type: euclidean_spearman value: -17.98599150405874 - type: main_score value: 16.647983989080657 - type: manhattan_pearson value: -17.041217222851987 - type: manhattan_spearman value: -17.099688376247617 - type: pearson value: 21.215714201927316 - type: spearman value: 16.647983989080657 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 25.55717236376004 - type: cosine_spearman value: 21.120437860825668 - type: euclidean_pearson value: -13.532867255677811 - type: euclidean_spearman value: -14.067414622756136 - type: main_score value: 21.120437860825668 - type: manhattan_pearson value: -14.812251264524642 - type: manhattan_spearman value: -14.777202854314126 - type: pearson value: 25.55717236376004 - type: spearman value: 21.120437860825668 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 45.445485581559176 - type: cosine_spearman value: 57.81995941896327 - type: euclidean_pearson value: 46.45758835829159 - type: euclidean_spearman value: 57.15291591278634 - type: main_score value: 57.81995941896327 - type: manhattan_pearson value: 45.38976415067536 - type: manhattan_spearman value: 56.412461810883244 - type: pearson value: 45.445485581559176 - type: spearman value: 57.81995941896327 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 9.618696238808342 - type: cosine_spearman value: 11.05047267189447 - type: euclidean_pearson value: 10.475166065910297 - type: euclidean_spearman value: 11.515497306325212 - type: main_score value: 11.05047267189447 - type: manhattan_pearson value: 11.677707905016238 - type: manhattan_spearman value: 13.47068609853333 - type: pearson value: 9.618696238808342 - type: spearman value: 11.05047267189447 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 9.219640350559175 - type: cosine_spearman value: 15.424812621979203 - type: euclidean_pearson value: 27.079648075136692 - type: euclidean_spearman value: 15.127881072012025 - type: main_score value: 15.424812621979203 - type: manhattan_pearson value: 29.948405026370768 - type: manhattan_spearman value: 11.450097312769431 - type: pearson value: 9.219640350559175 - type: spearman value: 15.424812621979203 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 2.016891027432069 - type: cosine_spearman value: 9.065694923749145 - type: euclidean_pearson value: -0.2317575485284492 - type: euclidean_spearman value: 1.478447144326562 - type: main_score value: 9.065694923749145 - type: manhattan_pearson value: 1.2210552984769953 - type: manhattan_spearman value: 1.0797490938939034 - type: pearson value: 2.016891027432069 - type: spearman value: 9.065694923749145 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 20.30265778022666 - type: cosine_spearman value: 27.04088495025885 - type: euclidean_pearson value: 21.92624711333554 - type: euclidean_spearman value: 30.314966090982715 - type: main_score value: 27.04088495025885 - type: manhattan_pearson value: 22.449954374970556 - type: manhattan_spearman value: 33.98792612061501 - type: pearson value: 20.30265778022666 - type: spearman value: 27.04088495025885 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 67.58098869120114 - type: cosine_spearman value: 67.2453123773366 - type: euclidean_pearson value: 58.23603604808463 - type: euclidean_spearman value: 58.623631847217 - type: main_score value: 67.2453123773366 - type: manhattan_pearson value: 58.368136302971195 - type: manhattan_spearman value: 58.837841919175105 - type: pearson value: 67.58098869120114 - type: spearman value: 67.2453123773366 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 68.53428785087402 - type: map value: 68.53428785087402 - type: mrr value: 88.53875880836665 - type: nAUC_map_diff1 value: 11.778449408360105 - type: nAUC_map_max value: 55.710378394122195 - type: nAUC_map_std value: 66.15614923206279 - type: nAUC_mrr_diff1 value: 47.35327285304558 - type: nAUC_mrr_max value: 74.15113781105075 - type: nAUC_mrr_std value: 70.40747046150474 - task: type: Retrieval dataset: name: MTEB SciFact (default) type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 44.018 - type: map_at_1 value: 30.778 - type: map_at_10 value: 39.095 - type: map_at_100 value: 40.136 - type: map_at_1000 value: 40.19 - type: map_at_20 value: 39.695 - type: map_at_3 value: 36.25 - type: map_at_5 value: 37.942 - type: mrr_at_1 value: 32.33333333333333 - type: mrr_at_10 value: 40.46640211640211 - type: mrr_at_100 value: 41.3527413808237 - type: mrr_at_1000 value: 41.402308015811776 - type: mrr_at_20 value: 40.9920777608471 - type: mrr_at_3 value: 37.999999999999986 - type: mrr_at_5 value: 39.46666666666666 - type: nauc_map_at_1000_diff1 value: 51.57525678345129 - type: nauc_map_at_1000_max value: 35.72906391653508 - type: nauc_map_at_1000_std value: -1.672862325664642 - type: nauc_map_at_100_diff1 value: 51.57482414972323 - type: nauc_map_at_100_max value: 35.714681767398474 - type: nauc_map_at_100_std value: -1.6459806802624475 - type: nauc_map_at_10_diff1 value: 51.142890340689064 - type: nauc_map_at_10_max value: 35.78128552943207 - type: nauc_map_at_10_std value: -2.1957957240897907 - type: nauc_map_at_1_diff1 value: 57.59762900453854 - type: nauc_map_at_1_max value: 36.479602157030534 - type: nauc_map_at_1_std value: -4.834289532948042 - type: nauc_map_at_20_diff1 value: 51.47980323079124 - type: nauc_map_at_20_max value: 35.585900524174406 - type: nauc_map_at_20_std value: -1.7680354064625985 - type: nauc_map_at_3_diff1 value: 51.012766710346625 - type: nauc_map_at_3_max value: 34.8262662118054 - type: nauc_map_at_3_std value: -2.8168593560801045 - type: nauc_map_at_5_diff1 value: 50.836092917622864 - type: nauc_map_at_5_max value: 35.32174769825645 - type: nauc_map_at_5_std value: -3.113242921586995 - type: nauc_mrr_at_1000_diff1 value: 53.10217120766699 - type: nauc_mrr_at_1000_max value: 37.46657201878918 - type: nauc_mrr_at_1000_std value: 1.9085047586195323 - type: nauc_mrr_at_100_diff1 value: 53.10038602820947 - type: nauc_mrr_at_100_max value: 37.461065885458225 - type: nauc_mrr_at_100_std value: 1.9403756850021763 - type: nauc_mrr_at_10_diff1 value: 52.71420660954082 - type: nauc_mrr_at_10_max value: 37.62806428278671 - type: nauc_mrr_at_10_std value: 1.9517437711674281 - type: nauc_mrr_at_1_diff1 value: 59.730007702616675 - type: nauc_mrr_at_1_max value: 38.85146416502298 - type: nauc_mrr_at_1_std value: -0.46260223776596965 - type: nauc_mrr_at_20_diff1 value: 53.041376670418906 - type: nauc_mrr_at_20_max value: 37.45508852907037 - type: nauc_mrr_at_20_std value: 1.9843723810434797 - type: nauc_mrr_at_3_diff1 value: 52.716388196194494 - type: nauc_mrr_at_3_max value: 36.76096106397856 - type: nauc_mrr_at_3_std value: 1.716782555536502 - type: nauc_mrr_at_5_diff1 value: 52.61598345028188 - type: nauc_mrr_at_5_max value: 37.26316036644959 - type: nauc_mrr_at_5_std value: 1.3757366695050894 - type: nauc_ndcg_at_1000_diff1 value: 51.342395628428314 - type: nauc_ndcg_at_1000_max value: 37.22548194348463 - type: nauc_ndcg_at_1000_std value: 1.6360986297119697 - type: nauc_ndcg_at_100_diff1 value: 51.12772923293346 - type: nauc_ndcg_at_100_max value: 37.08162525770745 - type: nauc_ndcg_at_100_std value: 2.1437445417460146 - type: nauc_ndcg_at_10_diff1 value: 49.48104920841383 - type: nauc_ndcg_at_10_max value: 36.98553295749576 - type: nauc_ndcg_at_10_std value: 0.7074029546666143 - type: nauc_ndcg_at_1_diff1 value: 59.730007702616675 - type: nauc_ndcg_at_1_max value: 38.85146416502298 - type: nauc_ndcg_at_1_std value: -0.46260223776596965 - type: nauc_ndcg_at_20_diff1 value: 50.63630218240983 - type: nauc_ndcg_at_20_max value: 36.29047254679528 - type: nauc_ndcg_at_20_std value: 1.3772144888034745 - type: nauc_ndcg_at_3_diff1 value: 49.382153963236625 - type: nauc_ndcg_at_3_max value: 35.22306811742639 - type: nauc_ndcg_at_3_std value: -0.8877334603608296 - type: nauc_ndcg_at_5_diff1 value: 49.05555691688766 - type: nauc_ndcg_at_5_max value: 36.00098364740635 - type: nauc_ndcg_at_5_std value: -1.5274960265115565 - type: nauc_precision_at_1000_diff1 value: 12.30933370851068 - type: nauc_precision_at_1000_max value: 24.80977336944425 - type: nauc_precision_at_1000_std value: 42.85052700690557 - type: nauc_precision_at_100_diff1 value: 26.185494481397587 - type: nauc_precision_at_100_max value: 31.155891382208928 - type: nauc_precision_at_100_std value: 35.608690885169295 - type: nauc_precision_at_10_diff1 value: 36.27376093062482 - type: nauc_precision_at_10_max value: 36.42692892209515 - type: nauc_precision_at_10_std value: 16.967432904462893 - type: nauc_precision_at_1_diff1 value: 59.730007702616675 - type: nauc_precision_at_1_max value: 38.85146416502298 - type: nauc_precision_at_1_std value: -0.46260223776596965 - type: nauc_precision_at_20_diff1 value: 37.622482136709785 - type: nauc_precision_at_20_max value: 31.21688679166065 - type: nauc_precision_at_20_std value: 23.221017808713682 - type: nauc_precision_at_3_diff1 value: 42.340206572143984 - type: nauc_precision_at_3_max value: 36.3442813514268 - type: nauc_precision_at_3_std value: 7.592922050055632 - type: nauc_precision_at_5_diff1 value: 38.17808235542409 - type: nauc_precision_at_5_max value: 35.09801657302365 - type: nauc_precision_at_5_std value: 8.398007414457009 - type: nauc_recall_at_1000_diff1 value: 55.841144651529085 - type: nauc_recall_at_1000_max value: 56.572722198749226 - type: nauc_recall_at_1000_std value: 31.84957409406956 - type: nauc_recall_at_100_diff1 value: 48.328441413096336 - type: nauc_recall_at_100_max value: 42.071227967505166 - type: nauc_recall_at_100_std value: 18.845456547380337 - type: nauc_recall_at_10_diff1 value: 42.32690986833832 - type: nauc_recall_at_10_max value: 38.657602228864995 - type: nauc_recall_at_10_std value: 5.742422923256993 - type: nauc_recall_at_1_diff1 value: 57.59762900453854 - type: nauc_recall_at_1_max value: 36.479602157030534 - type: nauc_recall_at_1_std value: -4.834289532948042 - type: nauc_recall_at_20_diff1 value: 46.280085660215995 - type: nauc_recall_at_20_max value: 35.65299771551237 - type: nauc_recall_at_20_std value: 8.057327587598591 - type: nauc_recall_at_3_diff1 value: 42.84012935628984 - type: nauc_recall_at_3_max value: 33.69290527723077 - type: nauc_recall_at_3_std value: -0.9503712670051102 - type: nauc_recall_at_5_diff1 value: 42.1137382698146 - type: nauc_recall_at_5_max value: 36.12494070598603 - type: nauc_recall_at_5_std value: -1.394936950543654 - type: ndcg_at_1 value: 32.333 - type: ndcg_at_10 value: 44.018 - type: ndcg_at_100 value: 49.089 - type: ndcg_at_1000 value: 50.651 - type: ndcg_at_20 value: 46.089 - type: ndcg_at_3 value: 38.499 - type: ndcg_at_5 value: 41.297 - type: precision_at_1 value: 32.333 - type: precision_at_10 value: 6.4 - type: precision_at_100 value: 0.923 - type: precision_at_1000 value: 0.106 - type: precision_at_20 value: 3.6670000000000003 - type: precision_at_3 value: 15.443999999999999 - type: precision_at_5 value: 10.867 - type: recall_at_1 value: 30.778 - type: recall_at_10 value: 57.99999999999999 - type: recall_at_100 value: 81.722 - type: recall_at_1000 value: 94.033 - type: recall_at_20 value: 66.02799999999999 - type: recall_at_3 value: 43.056 - type: recall_at_5 value: 49.694 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.6 - type: cosine_accuracy_threshold value: 72.43388891220093 - type: cosine_ap value: 85.05626292429993 - type: cosine_f1 value: 78.94211576846308 - type: cosine_f1_threshold value: 70.86913585662842 - type: cosine_precision value: 78.78486055776892 - type: cosine_recall value: 79.10000000000001 - type: dot_accuracy value: 99.06534653465346 - type: dot_accuracy_threshold value: 76633.75244140625 - type: dot_ap value: 35.63520526748108 - type: dot_f1 value: 40.297274979355905 - type: dot_f1_threshold value: 46533.13903808594 - type: dot_precision value: 34.31786216596343 - type: dot_recall value: 48.8 - type: euclidean_accuracy value: 99.38217821782177 - type: euclidean_accuracy_threshold value: 1529.2129516601562 - type: euclidean_ap value: 65.66713048050076 - type: euclidean_f1 value: 63.702056698165656 - type: euclidean_f1_threshold value: 1659.9403381347656 - type: euclidean_precision value: 71.71464330413016 - type: euclidean_recall value: 57.3 - type: main_score value: 85.05626292429993 - type: manhattan_accuracy value: 99.36633663366337 - type: manhattan_accuracy_threshold value: 19134.791564941406 - type: manhattan_ap value: 64.327573756549 - type: manhattan_f1 value: 62.878385554965476 - type: manhattan_f1_threshold value: 20997.62725830078 - type: manhattan_precision value: 67.04416761041902 - type: manhattan_recall value: 59.199999999999996 - type: max_accuracy value: 99.6 - type: max_ap value: 85.05626292429993 - type: max_f1 value: 78.94211576846308 - type: max_precision value: 78.78486055776892 - type: max_recall value: 79.10000000000001 - type: similarity_accuracy value: 99.6 - type: similarity_accuracy_threshold value: 72.43388891220093 - type: similarity_ap value: 85.05626292429993 - type: similarity_f1 value: 78.94211576846308 - type: similarity_f1_threshold value: 70.86913585662842 - type: similarity_precision value: 78.78486055776892 - type: similarity_recall value: 79.10000000000001 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 33.04088699016667 - type: v_measure value: 33.04088699016667 - type: v_measure_std value: 4.201419342997424 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 27.79227103935552 - type: v_measure value: 27.79227103935552 - type: v_measure_std value: 1.6306895991356034 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 43.37562407771596 - type: map value: 43.37562407771596 - type: mrr value: 43.95843943638062 - type: nAUC_map_diff1 value: 35.17057785776578 - type: nAUC_map_max value: 16.895292109117968 - type: nAUC_map_std value: 7.566837158800999 - type: nAUC_mrr_diff1 value: 34.529930093774155 - type: nAUC_mrr_max value: 17.875421743140148 - type: nAUC_mrr_std value: 8.16194884246291 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 29.667795250962197 - type: cosine_spearman value: 29.280803143378677 - type: dot_pearson value: 17.20848486618972 - type: dot_spearman value: 19.642791960809518 - type: main_score value: 29.280803143378677 - type: pearson value: 29.667795250962197 - type: spearman value: 29.280803143378677 - task: type: Retrieval dataset: name: MTEB TRECCOVID (default) type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 47.015 - type: map_at_1 value: 0.11299999999999999 - type: map_at_10 value: 0.924 - type: map_at_100 value: 4.172 - type: map_at_1000 value: 9.794 - type: map_at_20 value: 1.512 - type: map_at_3 value: 0.32299999999999995 - type: map_at_5 value: 0.5349999999999999 - type: mrr_at_1 value: 54.0 - type: mrr_at_10 value: 64.37222222222222 - type: mrr_at_100 value: 64.95440794499618 - type: mrr_at_1000 value: 64.95440794499618 - type: mrr_at_20 value: 64.79285714285714 - type: mrr_at_3 value: 61.0 - type: mrr_at_5 value: 62.9 - type: nauc_map_at_1000_diff1 value: 5.391181504174254 - type: nauc_map_at_1000_max value: 48.53906859573933 - type: nauc_map_at_1000_std value: 58.77913245945572 - type: nauc_map_at_100_diff1 value: 5.602676644566584 - type: nauc_map_at_100_max value: 30.35986103902266 - type: nauc_map_at_100_std value: 43.61342447615204 - type: nauc_map_at_10_diff1 value: 11.168677765044714 - type: nauc_map_at_10_max value: 12.615876642210566 - type: nauc_map_at_10_std value: 15.487673375733934 - type: nauc_map_at_1_diff1 value: 13.856607126355705 - type: nauc_map_at_1_max value: 2.1470727276166315 - type: nauc_map_at_1_std value: 13.755038114656543 - type: nauc_map_at_20_diff1 value: 9.278354233919723 - type: nauc_map_at_20_max value: 14.549895562986578 - type: nauc_map_at_20_std value: 21.58014466138326 - type: nauc_map_at_3_diff1 value: 17.476371244979568 - type: nauc_map_at_3_max value: 5.336749157036172 - type: nauc_map_at_3_std value: 13.60030032869252 - type: nauc_map_at_5_diff1 value: 18.159708091961715 - type: nauc_map_at_5_max value: 5.5023295542724195 - type: nauc_map_at_5_std value: 13.464524190505264 - type: nauc_mrr_at_1000_diff1 value: 24.183591049739295 - type: nauc_mrr_at_1000_max value: 23.244935337421687 - type: nauc_mrr_at_1000_std value: 36.76491491232038 - type: nauc_mrr_at_100_diff1 value: 24.183591049739295 - type: nauc_mrr_at_100_max value: 23.244935337421687 - type: nauc_mrr_at_100_std value: 36.76491491232038 - type: nauc_mrr_at_10_diff1 value: 25.116993699935996 - type: nauc_mrr_at_10_max value: 23.996446760940472 - type: nauc_mrr_at_10_std value: 36.661108373978486 - type: nauc_mrr_at_1_diff1 value: 22.46394932066349 - type: nauc_mrr_at_1_max value: 17.99338723569777 - type: nauc_mrr_at_1_std value: 31.805173515601105 - type: nauc_mrr_at_20_diff1 value: 24.29457665863037 - type: nauc_mrr_at_20_max value: 23.511208714905433 - type: nauc_mrr_at_20_std value: 37.03779743443747 - type: nauc_mrr_at_3_diff1 value: 21.325058136848703 - type: nauc_mrr_at_3_max value: 25.498590855189146 - type: nauc_mrr_at_3_std value: 35.28303533385696 - type: nauc_mrr_at_5_diff1 value: 23.91581725239823 - type: nauc_mrr_at_5_max value: 21.88399789010818 - type: nauc_mrr_at_5_std value: 37.46999023019008 - type: nauc_ndcg_at_1000_diff1 value: 3.7557778508958846 - type: nauc_ndcg_at_1000_max value: 40.346503557806564 - type: nauc_ndcg_at_1000_std value: 50.92180253083818 - type: nauc_ndcg_at_100_diff1 value: 11.758581771303305 - type: nauc_ndcg_at_100_max value: 35.16894818233675 - type: nauc_ndcg_at_100_std value: 47.424485591389114 - type: nauc_ndcg_at_10_diff1 value: 12.849993798661563 - type: nauc_ndcg_at_10_max value: 30.851313506820976 - type: nauc_ndcg_at_10_std value: 36.943619057267505 - type: nauc_ndcg_at_1_diff1 value: 11.113346207488473 - type: nauc_ndcg_at_1_max value: 15.184797768479774 - type: nauc_ndcg_at_1_std value: 27.52387082931017 - type: nauc_ndcg_at_20_diff1 value: 12.331028684560186 - type: nauc_ndcg_at_20_max value: 28.893165127974708 - type: nauc_ndcg_at_20_std value: 39.097000545114646 - type: nauc_ndcg_at_3_diff1 value: 15.782271186947469 - type: nauc_ndcg_at_3_max value: 23.91790545249963 - type: nauc_ndcg_at_3_std value: 34.87568041720673 - type: nauc_ndcg_at_5_diff1 value: 14.306657014965335 - type: nauc_ndcg_at_5_max value: 24.92679497185896 - type: nauc_ndcg_at_5_std value: 35.14072395767764 - type: nauc_precision_at_1000_diff1 value: 9.698627632231533 - type: nauc_precision_at_1000_max value: 43.62044953565815 - type: nauc_precision_at_1000_std value: 54.089192302090495 - type: nauc_precision_at_100_diff1 value: 11.799461882261514 - type: nauc_precision_at_100_max value: 36.87868882997057 - type: nauc_precision_at_100_std value: 51.09246667126284 - type: nauc_precision_at_10_diff1 value: 13.170655404348533 - type: nauc_precision_at_10_max value: 38.227922901784936 - type: nauc_precision_at_10_std value: 40.51375636546919 - type: nauc_precision_at_1_diff1 value: 22.46394932066349 - type: nauc_precision_at_1_max value: 17.99338723569777 - type: nauc_precision_at_1_std value: 31.805173515601105 - type: nauc_precision_at_20_diff1 value: 13.020942321118012 - type: nauc_precision_at_20_max value: 32.76679746744021 - type: nauc_precision_at_20_std value: 43.375734018262754 - type: nauc_precision_at_3_diff1 value: 22.36277013079758 - type: nauc_precision_at_3_max value: 29.14917970240368 - type: nauc_precision_at_3_std value: 38.40675412594522 - type: nauc_precision_at_5_diff1 value: 20.38016205233649 - type: nauc_precision_at_5_max value: 28.40199750312108 - type: nauc_precision_at_5_std value: 37.658196861765916 - type: nauc_recall_at_1000_diff1 value: -1.8797682238301674 - type: nauc_recall_at_1000_max value: 40.00611463779723 - type: nauc_recall_at_1000_std value: 50.00277798847854 - type: nauc_recall_at_100_diff1 value: 5.570829659209835 - type: nauc_recall_at_100_max value: 21.511683158026184 - type: nauc_recall_at_100_std value: 37.17966017860592 - type: nauc_recall_at_10_diff1 value: 5.649731119631445 - type: nauc_recall_at_10_max value: 12.690473408729572 - type: nauc_recall_at_10_std value: 8.697137776280309 - type: nauc_recall_at_1_diff1 value: 13.856607126355705 - type: nauc_recall_at_1_max value: 2.1470727276166315 - type: nauc_recall_at_1_std value: 13.755038114656543 - type: nauc_recall_at_20_diff1 value: 8.149753992066595 - type: nauc_recall_at_20_max value: 8.365030917145909 - type: nauc_recall_at_20_std value: 15.05385058373975 - type: nauc_recall_at_3_diff1 value: 16.664831204533417 - type: nauc_recall_at_3_max value: 4.9075975386189015 - type: nauc_recall_at_3_std value: 11.436115039116913 - type: nauc_recall_at_5_diff1 value: 17.863326487393323 - type: nauc_recall_at_5_max value: 0.04244496355094046 - type: nauc_recall_at_5_std value: 8.039336595643896 - type: ndcg_at_1 value: 48.0 - type: ndcg_at_10 value: 47.015 - type: ndcg_at_100 value: 31.857999999999997 - type: ndcg_at_1000 value: 27.142 - type: ndcg_at_20 value: 43.162 - type: ndcg_at_3 value: 49.123 - type: ndcg_at_5 value: 49.425999999999995 - type: precision_at_1 value: 54.0 - type: precision_at_10 value: 51.0 - type: precision_at_100 value: 32.56 - type: precision_at_1000 value: 13.072000000000001 - type: precision_at_20 value: 45.9 - type: precision_at_3 value: 54.0 - type: precision_at_5 value: 55.2 - type: recall_at_1 value: 0.11299999999999999 - type: recall_at_10 value: 1.162 - type: recall_at_100 value: 6.809 - type: recall_at_1000 value: 25.805 - type: recall_at_20 value: 2.051 - type: recall_at_3 value: 0.35200000000000004 - type: recall_at_5 value: 0.618 - task: type: Retrieval dataset: name: MTEB Touche2020 (default) type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 12.417 - type: map_at_1 value: 1.2 - type: map_at_10 value: 4.376 - type: map_at_100 value: 7.161 - type: map_at_1000 value: 8.405 - type: map_at_20 value: 5.578 - type: map_at_3 value: 2.396 - type: map_at_5 value: 3.044 - type: mrr_at_1 value: 16.3265306122449 - type: mrr_at_10 value: 30.004859086491738 - type: mrr_at_100 value: 31.506819710420675 - type: mrr_at_1000 value: 31.52488003189439 - type: mrr_at_20 value: 31.07992314474907 - type: mrr_at_3 value: 24.489795918367346 - type: mrr_at_5 value: 27.857142857142854 - type: nauc_map_at_1000_diff1 value: -15.240085041163246 - type: nauc_map_at_1000_max value: -34.07491781069546 - type: nauc_map_at_1000_std value: -39.33676134505847 - type: nauc_map_at_100_diff1 value: -17.475590176275173 - type: nauc_map_at_100_max value: -36.27378611366948 - type: nauc_map_at_100_std value: -42.367310265458066 - type: nauc_map_at_10_diff1 value: -17.79313659611791 - type: nauc_map_at_10_max value: -30.930524152161155 - type: nauc_map_at_10_std value: -37.96490423161143 - type: nauc_map_at_1_diff1 value: -20.304167493996196 - type: nauc_map_at_1_max value: -34.39784658467407 - type: nauc_map_at_1_std value: -34.8048180060142 - type: nauc_map_at_20_diff1 value: -19.601011957021058 - type: nauc_map_at_20_max value: -36.19251563365872 - type: nauc_map_at_20_std value: -41.872703350300306 - type: nauc_map_at_3_diff1 value: -18.604827557464603 - type: nauc_map_at_3_max value: -33.87036816368854 - type: nauc_map_at_3_std value: -37.87305582981634 - type: nauc_map_at_5_diff1 value: -19.000407560148222 - type: nauc_map_at_5_max value: -35.88105036080159 - type: nauc_map_at_5_std value: -39.89433800276062 - type: nauc_mrr_at_1000_diff1 value: -10.977908813445096 - type: nauc_mrr_at_1000_max value: -32.70254863800196 - type: nauc_mrr_at_1000_std value: -36.932750949391014 - type: nauc_mrr_at_100_diff1 value: -10.923380877501057 - type: nauc_mrr_at_100_max value: -32.61546764122419 - type: nauc_mrr_at_100_std value: -36.842894043351315 - type: nauc_mrr_at_10_diff1 value: -10.131576305498573 - type: nauc_mrr_at_10_max value: -31.890083580054764 - type: nauc_mrr_at_10_std value: -36.93266622814508 - type: nauc_mrr_at_1_diff1 value: -16.139790526714425 - type: nauc_mrr_at_1_max value: -29.900749975522345 - type: nauc_mrr_at_1_std value: -29.066801658151576 - type: nauc_mrr_at_20_diff1 value: -10.70805724526718 - type: nauc_mrr_at_20_max value: -32.340792705157114 - type: nauc_mrr_at_20_std value: -36.72547772593701 - type: nauc_mrr_at_3_diff1 value: -17.91765468161938 - type: nauc_mrr_at_3_max value: -32.241705526206275 - type: nauc_mrr_at_3_std value: -33.553729892050974 - type: nauc_mrr_at_5_diff1 value: -12.991140385709848 - type: nauc_mrr_at_5_max value: -33.87447283054401 - type: nauc_mrr_at_5_std value: -37.96193128324505 - type: nauc_ndcg_at_1000_diff1 value: 1.4521546341817582 - type: nauc_ndcg_at_1000_max value: -22.463819593958227 - type: nauc_ndcg_at_1000_std value: -27.617648672815875 - type: nauc_ndcg_at_100_diff1 value: -11.537693897677832 - type: nauc_ndcg_at_100_max value: -36.160393447246 - type: nauc_ndcg_at_100_std value: -44.05399962086289 - type: nauc_ndcg_at_10_diff1 value: -9.919400208671634 - type: nauc_ndcg_at_10_max value: -22.769115244797316 - type: nauc_ndcg_at_10_std value: -34.034353433778854 - type: nauc_ndcg_at_1_diff1 value: -17.822259770980857 - type: nauc_ndcg_at_1_max value: -26.332806784918134 - type: nauc_ndcg_at_1_std value: -26.435402666146484 - type: nauc_ndcg_at_20_diff1 value: -13.788195267001576 - type: nauc_ndcg_at_20_max value: -32.974957041119055 - type: nauc_ndcg_at_20_std value: -42.33157337528393 - type: nauc_ndcg_at_3_diff1 value: -16.223851866502706 - type: nauc_ndcg_at_3_max value: -26.2902601974522 - type: nauc_ndcg_at_3_std value: -32.304039646610335 - type: nauc_ndcg_at_5_diff1 value: -12.817036231720957 - type: nauc_ndcg_at_5_max value: -28.44642751642767 - type: nauc_ndcg_at_5_std value: -36.58899943553682 - type: nauc_precision_at_1000_diff1 value: 26.935463895508967 - type: nauc_precision_at_1000_max value: 46.72249889198106 - type: nauc_precision_at_1000_std value: 38.53058407998278 - type: nauc_precision_at_100_diff1 value: 4.163340339758862 - type: nauc_precision_at_100_max value: -10.581299020111306 - type: nauc_precision_at_100_std value: -29.038739456237955 - type: nauc_precision_at_10_diff1 value: 0.5857232239199855 - type: nauc_precision_at_10_max value: -12.365623679544461 - type: nauc_precision_at_10_std value: -29.949307140170728 - type: nauc_precision_at_1_diff1 value: -16.139790526714425 - type: nauc_precision_at_1_max value: -29.900749975522345 - type: nauc_precision_at_1_std value: -29.066801658151576 - type: nauc_precision_at_20_diff1 value: -7.74805679959642 - type: nauc_precision_at_20_max value: -25.268356658986903 - type: nauc_precision_at_20_std value: -37.758242471707966 - type: nauc_precision_at_3_diff1 value: -15.634998600034066 - type: nauc_precision_at_3_max value: -28.48849869574053 - type: nauc_precision_at_3_std value: -34.907495608911546 - type: nauc_precision_at_5_diff1 value: -8.48679992836417 - type: nauc_precision_at_5_max value: -29.707555980272975 - type: nauc_precision_at_5_std value: -40.733334704807156 - type: nauc_recall_at_1000_diff1 value: 8.826494916857577 - type: nauc_recall_at_1000_max value: -16.922331971426086 - type: nauc_recall_at_1000_std value: 1.4850859633484936 - type: nauc_recall_at_100_diff1 value: -12.650176624230422 - type: nauc_recall_at_100_max value: -40.574740215148125 - type: nauc_recall_at_100_std value: -40.52283965149714 - type: nauc_recall_at_10_diff1 value: -13.43480673345223 - type: nauc_recall_at_10_max value: -28.6156485981151 - type: nauc_recall_at_10_std value: -35.45555317207978 - type: nauc_recall_at_1_diff1 value: -20.304167493996196 - type: nauc_recall_at_1_max value: -34.39784658467407 - type: nauc_recall_at_1_std value: -34.8048180060142 - type: nauc_recall_at_20_diff1 value: -19.74246524681499 - type: nauc_recall_at_20_max value: -41.057831832815154 - type: nauc_recall_at_20_std value: -43.831099576419234 - type: nauc_recall_at_3_diff1 value: -22.564348397487556 - type: nauc_recall_at_3_max value: -35.421451948002236 - type: nauc_recall_at_3_std value: -36.72882367879091 - type: nauc_recall_at_5_diff1 value: -18.948821357059504 - type: nauc_recall_at_5_max value: -39.22248196683214 - type: nauc_recall_at_5_std value: -39.964758319612635 - type: ndcg_at_1 value: 14.285999999999998 - type: ndcg_at_10 value: 12.417 - type: ndcg_at_100 value: 21.564 - type: ndcg_at_1000 value: 34.264 - type: ndcg_at_20 value: 13.932 - type: ndcg_at_3 value: 13.997000000000002 - type: ndcg_at_5 value: 13.161999999999999 - type: precision_at_1 value: 16.326999999999998 - type: precision_at_10 value: 12.245000000000001 - type: precision_at_100 value: 5.163 - type: precision_at_1000 value: 1.304 - type: precision_at_20 value: 10.918 - type: precision_at_3 value: 16.326999999999998 - type: precision_at_5 value: 14.285999999999998 - type: recall_at_1 value: 1.2 - type: recall_at_10 value: 8.763 - type: recall_at_100 value: 31.584 - type: recall_at_1000 value: 70.519 - type: recall_at_20 value: 14.379 - type: recall_at_3 value: 3.229 - type: recall_at_5 value: 5.079000000000001 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 63.04199218749999 - type: ap value: 10.379917199607485 - type: ap_weighted value: 10.379917199607485 - type: f1 value: 47.876568123841864 - type: f1_weighted value: 71.2370937104015 - type: main_score value: 63.04199218749999 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 49.442558007923026 - type: f1 value: 49.60441043943531 - type: f1_weighted value: 48.96898929345838 - type: main_score value: 49.442558007923026 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 21.127920450161458 - type: v_measure value: 21.127920450161458 - type: v_measure_std value: 1.5027840050520012 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 82.18394230196103 - type: cosine_accuracy_threshold value: 70.92341184616089 - type: cosine_ap value: 59.78262740579837 - type: cosine_f1 value: 56.536101934874935 - type: cosine_f1_threshold value: 63.08426856994629 - type: cosine_precision value: 51.13102859581733 - type: cosine_recall value: 63.21899736147757 - type: dot_accuracy value: 78.2559456398641 - type: dot_accuracy_threshold value: 75122.66235351562 - type: dot_ap value: 42.7554645305854 - type: dot_f1 value: 46.84298752095361 - type: dot_f1_threshold value: 47930.230712890625 - type: dot_precision value: 36.19746689694876 - type: dot_recall value: 66.35883905013192 - type: euclidean_accuracy value: 80.41962210168684 - type: euclidean_accuracy_threshold value: 2041.592025756836 - type: euclidean_ap value: 53.9382918676684 - type: euclidean_f1 value: 53.007111003977336 - type: euclidean_f1_threshold value: 2444.729995727539 - type: euclidean_precision value: 48.79076991346794 - type: euclidean_recall value: 58.02110817941952 - type: main_score value: 59.78262740579837 - type: manhattan_accuracy value: 80.65208320915539 - type: manhattan_accuracy_threshold value: 26017.153930664062 - type: manhattan_ap value: 54.628314460914396 - type: manhattan_f1 value: 53.78151260504202 - type: manhattan_f1_threshold value: 30961.737060546875 - type: manhattan_precision value: 47.208931419457734 - type: manhattan_recall value: 62.48021108179419 - type: max_accuracy value: 82.18394230196103 - type: max_ap value: 59.78262740579837 - type: max_f1 value: 56.536101934874935 - type: max_precision value: 51.13102859581733 - type: max_recall value: 66.35883905013192 - type: similarity_accuracy value: 82.18394230196103 - type: similarity_accuracy_threshold value: 70.92341184616089 - type: similarity_ap value: 59.78262740579837 - type: similarity_f1 value: 56.536101934874935 - type: similarity_f1_threshold value: 63.08426856994629 - type: similarity_precision value: 51.13102859581733 - type: similarity_recall value: 63.21899736147757 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 86.35269918888501 - type: cosine_accuracy_threshold value: 65.62063097953796 - type: cosine_ap value: 79.86337146522463 - type: cosine_f1 value: 72.03383314109958 - type: cosine_f1_threshold value: 62.217533588409424 - type: cosine_precision value: 71.93979419444018 - type: cosine_recall value: 72.12811826301201 - type: dot_accuracy value: 82.84045484534482 - type: dot_accuracy_threshold value: 35566.62902832031 - type: dot_ap value: 69.69127356271262 - type: dot_f1 value: 64.93162154619034 - type: dot_f1_threshold value: 28885.244750976562 - type: dot_precision value: 59.36463383516203 - type: dot_recall value: 71.65075454265477 - type: euclidean_accuracy value: 83.63022470601933 - type: euclidean_accuracy_threshold value: 1693.5848236083984 - type: euclidean_ap value: 71.73555972139718 - type: euclidean_f1 value: 63.8556476722812 - type: euclidean_f1_threshold value: 1923.9103317260742 - type: euclidean_precision value: 62.26497914990124 - type: euclidean_recall value: 65.52971974129966 - type: main_score value: 79.86337146522463 - type: manhattan_accuracy value: 83.70978383203322 - type: manhattan_accuracy_threshold value: 21348.568725585938 - type: manhattan_ap value: 72.01847359087003 - type: manhattan_f1 value: 64.34136401773942 - type: manhattan_f1_threshold value: 23113.516235351562 - type: manhattan_precision value: 66.8715222988124 - type: manhattan_recall value: 61.99568832768709 - type: max_accuracy value: 86.35269918888501 - type: max_ap value: 79.86337146522463 - type: max_f1 value: 72.03383314109958 - type: max_precision value: 71.93979419444018 - type: max_recall value: 72.12811826301201 - type: similarity_accuracy value: 86.35269918888501 - type: similarity_accuracy_threshold value: 65.62063097953796 - type: similarity_ap value: 79.86337146522463 - type: similarity_f1 value: 72.03383314109958 - type: similarity_f1_threshold value: 62.217533588409424 - type: similarity_precision value: 71.93979419444018 - type: similarity_recall value: 72.12811826301201 --- # minishlab/M2V_base_output Model Card This [Model2Vec](https://github.com/MinishLab/model2vec) model is a distilled version of the [baai/bge-base-en-v1.5](https://huggingface.co/baai/bge-base-en-v1.5) Sentence Transformer. It uses static embeddings, allowing text embeddings to be computed orders of magnitude faster on both GPU and CPU. It is designed for applications where computational resources are limited or where real-time performance is critical. ## Installation Install model2vec using pip: ``` pip install model2vec ``` ## Usage Load this model using the `from_pretrained` method: ```python from model2vec import StaticModel # Load a pretrained Model2Vec model model = StaticModel.from_pretrained("minishlab/M2V_base_output") # Compute text embeddings embeddings = model.encode(["Example sentence"]) ``` Alternatively, you can distill your own model using the `distill` method: ```python from model2vec.distill import distill # Choose a Sentence Transformer model model_name = "BAAI/bge-base-en-v1.5" # Distill the model m2v_model = distill(model_name=model_name, pca_dims=256) # Save the model m2v_model.save_pretrained("m2v_model") ``` ## How it works Model2vec creates a small, fast, and powerful model that outperforms other static embedding models by a large margin on all tasks we could find, while being much faster to create than traditional static embedding models such as GloVe. Best of all, you don't need any data to distill a model using Model2Vec. It works by passing a vocabulary through a sentence transformer model, then reducing the dimensionality of the resulting embeddings using PCA, and finally weighting the embeddings using zipf weighting. During inference, we simply take the mean of all token embeddings occurring in a sentence. ## Additional Resources - [All Model2Vec models on the hub](https://huggingface.co/models?library=model2vec) - [Model2Vec Repo](https://github.com/MinishLab/model2vec) - [Model2Vec Results](https://github.com/MinishLab/model2vec?tab=readme-ov-file#results) - [Model2Vec Tutorials](https://github.com/MinishLab/model2vec/tree/main/tutorials) ## Library Authors Model2Vec was developed by the [Minish Lab](https://github.com/MinishLab) team consisting of [Stephan Tulkens](https://github.com/stephantul) and [Thomas van Dongen](https://github.com/Pringled). ## Citation Please cite the [Model2Vec repository](https://github.com/MinishLab/model2vec) if you use this model in your work. ``` @software{minishlab2024model2vec, authors = {Stephan Tulkens, Thomas van Dongen}, title = {Model2Vec: Turn any Sentence Transformer into a Small Fast Model}, year = {2024}, url = {https://github.com/MinishLab/model2vec}, } ```
[ "BIOSSES", "SCIFACT" ]
openbmb/MiniCPM-V-2_6
openbmb
image-text-to-text
[ "transformers", "safetensors", "minicpmv", "feature-extraction", "minicpm-v", "vision", "ocr", "multi-image", "video", "custom_code", "image-text-to-text", "conversational", "multilingual", "dataset:openbmb/RLAIF-V-Dataset", "arxiv:2408.01800", "region:us" ]
"2024-08-04T06:24:29Z"
2025-01-15T10:21:13+00:00
73,195
956
--- datasets: - openbmb/RLAIF-V-Dataset language: - multilingual library_name: transformers pipeline_tag: image-text-to-text tags: - minicpm-v - vision - ocr - multi-image - video - custom_code --- <h1>A GPT-4V Level MLLM for Single Image, Multi Image and Video on Your Phone</h1> [GitHub](https://github.com/OpenBMB/MiniCPM-V) | [Demo](http://120.92.209.146:8887/)</a> ## News <!-- omit in toc --> * [2025.01.14] 🔥🔥 We open source [**MiniCPM-o 2.6**](https://huggingface.co/openbmb/MiniCPM-o-2_6), with significant performance improvement over **MiniCPM-V 2.6**, and support real-time speech-to-speech conversation and multimodal live streaming. Try it now. ## MiniCPM-V 2.6 **MiniCPM-V 2.6** is the latest and most capable model in the MiniCPM-V series. The model is built on SigLip-400M and Qwen2-7B with a total of 8B parameters. It exhibits a significant performance improvement over MiniCPM-Llama3-V 2.5, and introduces new features for multi-image and video understanding. Notable features of MiniCPM-V 2.6 include: - 🔥 **Leading Performance.** MiniCPM-V 2.6 achieves an average score of 65.2 on the latest version of OpenCompass, a comprehensive evaluation over 8 popular benchmarks. **With only 8B parameters, it surpasses widely used proprietary models like GPT-4o mini, GPT-4V, Gemini 1.5 Pro, and Claude 3.5 Sonnet** for single image understanding. - 🖼️ **Multi Image Understanding and In-context Learning.** MiniCPM-V 2.6 can also perform **conversation and reasoning over multiple images**. It achieves **state-of-the-art performance** on popular multi-image benchmarks such as Mantis-Eval, BLINK, Mathverse mv and Sciverse mv, and also shows promising in-context learning capability. - 🎬 **Video Understanding.** MiniCPM-V 2.6 can also **accept video inputs**, performing conversation and providing dense captions for spatial-temporal information. It outperforms **GPT-4V, Claude 3.5 Sonnet and LLaVA-NeXT-Video-34B** on Video-MME with/without subtitles. - 💪 **Strong OCR Capability and Others.** MiniCPM-V 2.6 can process images with any aspect ratio and up to 1.8 million pixels (e.g., 1344x1344). It achieves **state-of-the-art performance on OCRBench, surpassing proprietary models such as GPT-4o, GPT-4V, and Gemini 1.5 Pro**. Based on the the latest [RLAIF-V](https://github.com/RLHF-V/RLAIF-V/) and [VisCPM](https://github.com/OpenBMB/VisCPM) techniques, it features **trustworthy behaviors**, with significantly lower hallucination rates than GPT-4o and GPT-4V on Object HalBench, and supports **multilingual capabilities** on English, Chinese, German, French, Italian, Korean, etc. - 🚀 **Superior Efficiency.** In addition to its friendly size, MiniCPM-V 2.6 also shows **state-of-the-art token density** (i.e., number of pixels encoded into each visual token). **It produces only 640 tokens when processing a 1.8M pixel image, which is 75% fewer than most models**. This directly improves the inference speed, first-token latency, memory usage, and power consumption. As a result, MiniCPM-V 2.6 can efficiently support **real-time video understanding** on end-side devices such as iPad. - 💫 **Easy Usage.** MiniCPM-V 2.6 can be easily used in various ways: (1) [llama.cpp](https://github.com/OpenBMB/llama.cpp/blob/minicpmv-main/examples/llava/README-minicpmv2.6.md) and [ollama](https://github.com/OpenBMB/ollama/tree/minicpm-v2.6) support for efficient CPU inference on local devices, (2) [int4](https://huggingface.co/openbmb/MiniCPM-V-2_6-int4) and [GGUF](https://huggingface.co/openbmb/MiniCPM-V-2_6-gguf) format quantized models in 16 sizes, (3) [vLLM](https://github.com/OpenBMB/MiniCPM-V/tree/main?tab=readme-ov-file#inference-with-vllm) support for high-throughput and memory-efficient inference, (4) fine-tuning on new domains and tasks, (5) quick local WebUI demo setup with [Gradio](https://github.com/OpenBMB/MiniCPM-V/tree/main?tab=readme-ov-file#chat-with-our-demo-on-gradio) and (6) online web [demo](http://120.92.209.146:8887). ### Evaluation <!-- omit in toc --> <div align="center"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/radar_final.png" width=66% /> </div> #### Single image results on OpenCompass, MME, MMVet, OCRBench, MMMU, MathVista, MMB, AI2D, TextVQA, DocVQA, HallusionBench, Object HalBench: <div align="center"> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/QVl0iPtT5aUhlvViyEpgs.png) </div> <sup>*</sup> We evaluate this benchmark using chain-of-thought prompting. <sup>+</sup> Token Density: number of pixels encoded into each visual token at maximum resolution, i.e., # pixels at maximum resolution / # visual tokens. Note: For proprietary models, we calculate token density based on the image encoding charging strategy defined in the official API documentation, which provides an upper-bound estimation. #### Multi-image results on Mantis Eval, BLINK Val, Mathverse mv, Sciverse mv, MIRB: <div align="center"> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/o6FGHytRhzeatmhxq0Dbi.png) </div> <sup>*</sup> We evaluate the officially released checkpoint by ourselves. #### Video results on Video-MME and Video-ChatGPT: <div align="center"> <!-- ![image/png](https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/_T1mw5yhqNCqVdYRTQOGu.png) --> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/jmrjoRr8SFLkrstjDmpaV.png) </div> <details> <summary>Click to view few-shot results on TextVQA, VizWiz, VQAv2, OK-VQA.</summary> <div align="center"> ![image/png](https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/zXIuiCTTe-POqKGHszdn0.png) </div> * denotes zero image shot and two additional text shots following Flamingo. <sup>+</sup> We evaluate the pretraining ckpt without SFT. </details> ### Examples <!-- omit in toc --> <div style="display: flex; flex-direction: column; align-items: center;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/minicpmv2_6/multi_img-bike.png" alt="Bike" style="margin-bottom: -20px;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/minicpmv2_6/multi_img-menu.png" alt="Menu" style="margin-bottom: -20px;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/minicpmv2_6/multi_img-code.png" alt="Code" style="margin-bottom: -20px;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/minicpmv2_6/ICL-Mem.png" alt="Mem" style="margin-bottom: -20px;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/minicpmv2_6/multiling-medal.png" alt="medal" style="margin-bottom: 10px;"> </div> <details> <summary>Click to view more cases.</summary> <div style="display: flex; flex-direction: column; align-items: center;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/minicpmv2_6/ICL-elec.png" alt="elec" style="margin-bottom: -20px;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/minicpmv2_6/multiling-olympic.png" alt="Menu" style="margin-bottom: 10px;"> </div> </details> We deploy MiniCPM-V 2.6 on end devices. The demo video is the raw screen recording on a iPad Pro without edition. <div style="display: flex; justify-content: center;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/gif_cases/ai.gif" width="48%" style="margin: 0 10px;"/> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/gif_cases/beer.gif" width="48%" style="margin: 0 10px;"/> </div> <div style="display: flex; justify-content: center; margin-top: 20px;"> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/gif_cases/ticket.gif" width="48%" style="margin: 0 10px;"/> <img src="https://github.com/OpenBMB/MiniCPM-V/raw/main/assets/gif_cases/wfh.gif" width="48%" style="margin: 0 10px;"/> </div> <div style="text-align: center;"> <video controls autoplay src="https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/mXAEFQFqNd4nnvPk7r5eX.mp4"></video> <!-- <video controls autoplay src="https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/fEWzfHUdKnpkM7sdmnBQa.mp4"></video> --> </div> ## Demo Click here to try the Demo of [MiniCPM-V 2.6](http://120.92.209.146:8887/). ## Usage Inference using Huggingface transformers on NVIDIA GPUs. Requirements tested on python 3.10: ``` Pillow==10.1.0 torch==2.1.2 torchvision==0.16.2 transformers==4.40.0 sentencepiece==0.1.99 decord ``` ```python # test.py import torch from PIL import Image from transformers import AutoModel, AutoTokenizer model = AutoModel.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True, attn_implementation='sdpa', torch_dtype=torch.bfloat16) # sdpa or flash_attention_2, no eager model = model.eval().cuda() tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True) image = Image.open('xx.jpg').convert('RGB') question = 'What is in the image?' msgs = [{'role': 'user', 'content': [image, question]}] res = model.chat( image=None, msgs=msgs, tokenizer=tokenizer ) print(res) ## if you want to use streaming, please make sure sampling=True and stream=True ## the model.chat will return a generator res = model.chat( image=None, msgs=msgs, tokenizer=tokenizer, sampling=True, stream=True ) generated_text = "" for new_text in res: generated_text += new_text print(new_text, flush=True, end='') ``` ### Chat with multiple images <details> <summary> Click to show Python code running MiniCPM-V 2.6 with multiple images input. </summary> ```python import torch from PIL import Image from transformers import AutoModel, AutoTokenizer model = AutoModel.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True, attn_implementation='sdpa', torch_dtype=torch.bfloat16) # sdpa or flash_attention_2, no eager model = model.eval().cuda() tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True) image1 = Image.open('image1.jpg').convert('RGB') image2 = Image.open('image2.jpg').convert('RGB') question = 'Compare image 1 and image 2, tell me about the differences between image 1 and image 2.' msgs = [{'role': 'user', 'content': [image1, image2, question]}] answer = model.chat( image=None, msgs=msgs, tokenizer=tokenizer ) print(answer) ``` </details> ### In-context few-shot learning <details> <summary> Click to view Python code running MiniCPM-V 2.6 with few-shot input. </summary> ```python import torch from PIL import Image from transformers import AutoModel, AutoTokenizer model = AutoModel.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True, attn_implementation='sdpa', torch_dtype=torch.bfloat16) # sdpa or flash_attention_2, no eager model = model.eval().cuda() tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True) question = "production date" image1 = Image.open('example1.jpg').convert('RGB') answer1 = "2023.08.04" image2 = Image.open('example2.jpg').convert('RGB') answer2 = "2007.04.24" image_test = Image.open('test.jpg').convert('RGB') msgs = [ {'role': 'user', 'content': [image1, question]}, {'role': 'assistant', 'content': [answer1]}, {'role': 'user', 'content': [image2, question]}, {'role': 'assistant', 'content': [answer2]}, {'role': 'user', 'content': [image_test, question]} ] answer = model.chat( image=None, msgs=msgs, tokenizer=tokenizer ) print(answer) ``` </details> ### Chat with video <details> <summary> Click to view Python code running MiniCPM-V 2.6 with video input. </summary> ```python import torch from PIL import Image from transformers import AutoModel, AutoTokenizer from decord import VideoReader, cpu # pip install decord model = AutoModel.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True, attn_implementation='sdpa', torch_dtype=torch.bfloat16) # sdpa or flash_attention_2, no eager model = model.eval().cuda() tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-V-2_6', trust_remote_code=True) MAX_NUM_FRAMES=64 # if cuda OOM set a smaller number def encode_video(video_path): def uniform_sample(l, n): gap = len(l) / n idxs = [int(i * gap + gap / 2) for i in range(n)] return [l[i] for i in idxs] vr = VideoReader(video_path, ctx=cpu(0)) sample_fps = round(vr.get_avg_fps() / 1) # FPS frame_idx = [i for i in range(0, len(vr), sample_fps)] if len(frame_idx) > MAX_NUM_FRAMES: frame_idx = uniform_sample(frame_idx, MAX_NUM_FRAMES) frames = vr.get_batch(frame_idx).asnumpy() frames = [Image.fromarray(v.astype('uint8')) for v in frames] print('num frames:', len(frames)) return frames video_path ="video_test.mp4" frames = encode_video(video_path) question = "Describe the video" msgs = [ {'role': 'user', 'content': frames + [question]}, ] # Set decode params for video params={} params["use_image_id"] = False params["max_slice_nums"] = 2 # use 1 if cuda OOM and video resolution > 448*448 answer = model.chat( image=None, msgs=msgs, tokenizer=tokenizer, **params ) print(answer) ``` </details> Please look at [GitHub](https://github.com/OpenBMB/MiniCPM-V) for more detail about usage. ## Inference with llama.cpp<a id="llamacpp"></a> MiniCPM-V 2.6 can run with llama.cpp. See our fork of [llama.cpp](https://github.com/OpenBMB/llama.cpp/tree/minicpm-v2.5/examples/minicpmv) for more detail. ## Int4 quantized version Download the int4 quantized version for lower GPU memory (7GB) usage: [MiniCPM-V-2_6-int4](https://huggingface.co/openbmb/MiniCPM-V-2_6-int4). ## License #### Model License * The code in this repo is released under the [Apache-2.0](https://github.com/OpenBMB/MiniCPM/blob/main/LICENSE) License. * The usage of MiniCPM-V series model weights must strictly follow [MiniCPM Model License.md](https://github.com/OpenBMB/MiniCPM/blob/main/MiniCPM%20Model%20License.md). * The models and weights of MiniCPM are completely free for academic research. After filling out a ["questionnaire"](https://modelbest.feishu.cn/share/base/form/shrcnpV5ZT9EJ6xYjh3Kx0J6v8g) for registration, MiniCPM-V 2.6 weights are also available for free commercial use. #### Statement * As an LMM, MiniCPM-V 2.6 generates contents by learning a large mount of multimodal corpora, but it cannot comprehend, express personal opinions or make value judgement. Anything generated by MiniCPM-V 2.6 does not represent the views and positions of the model developers * We will not be liable for any problems arising from the use of the MinCPM-V models, including but not limited to data security issues, risk of public opinion, or any risks and problems arising from the misdirection, misuse, dissemination or misuse of the model. ## Key Techniques and Other Multimodal Projects 👏 Welcome to explore key techniques of MiniCPM-V 2.6 and other multimodal projects of our team: [VisCPM](https://github.com/OpenBMB/VisCPM/tree/main) | [RLHF-V](https://github.com/RLHF-V/RLHF-V) | [LLaVA-UHD](https://github.com/thunlp/LLaVA-UHD) | [RLAIF-V](https://github.com/RLHF-V/RLAIF-V) ## Citation If you find our work helpful, please consider citing our papers 📝 and liking this project ❤️! ```bib @article{yao2024minicpm, title={MiniCPM-V: A GPT-4V Level MLLM on Your Phone}, author={Yao, Yuan and Yu, Tianyu and Zhang, Ao and Wang, Chongyi and Cui, Junbo and Zhu, Hongji and Cai, Tianchi and Li, Haoyu and Zhao, Weilin and He, Zhihui and others}, journal={arXiv preprint arXiv:2408.01800}, year={2024} } ```
[ "MEDAL" ]
BAAI/bge-small-zh-v1.5
BAAI
feature-extraction
[ "transformers", "pytorch", "safetensors", "bert", "feature-extraction", "zh", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-09-12T05:22:29Z"
2023-10-12T03:35:59+00:00
72,966
53
--- language: - zh license: mit --- <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) FlagEmbedding can map any text to a low-dimensional dense vector which can be used for tasks like retrieval, classification, clustering, or semantic search. And it also can be used in vector databases for LLMs. ************* 🌟**Updates**🌟 ************* - 10/12/2023: Release [LLM-Embedder](./FlagEmbedding/llm_embedder/README.md), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Paper](https://arxiv.org/pdf/2310.07554.pdf) :fire: - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [masive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](./FlagEmbedding/llm_embedder/README.md) [Fine-tune](./FlagEmbedding/llm_embedder/README.md) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](./FlagEmbedding/llm_embedder/README.md) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages to a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For examples, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 document to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you also can download the models at https://model.baai.ac.cn/models . ## Frequently asked questions <details> <summary>1. How to fine-tune bge embedding model?</summary> <!-- ### How to fine-tune bge embedding model? --> Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. </details> <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples for using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pairs data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. More training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). More details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR" ]
deepvk/USER-bge-m3
deepvk
sentence-similarity
[ "sentence-transformers", "safetensors", "xlm-roberta", "sentence-similarity", "feature-extraction", "ru", "dataset:deepvk/ru-HNP", "dataset:deepvk/ru-WANLI", "dataset:Shitao/bge-m3-data", "dataset:RussianNLP/russian_super_glue", "dataset:reciTAL/mlsum", "dataset:Milana/russian_keywords", "dataset:IlyaGusev/gazeta", "dataset:d0rj/gsm8k-ru", "dataset:bragovo/dsum_ru", "dataset:CarlBrendt/Summ_Dialog_News", "arxiv:2311.13534", "arxiv:2309.12871", "license:apache-2.0", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-07-05T16:48:46Z"
2024-07-18T12:12:51+00:00
72,709
55
--- datasets: - deepvk/ru-HNP - deepvk/ru-WANLI - Shitao/bge-m3-data - RussianNLP/russian_super_glue - reciTAL/mlsum - Milana/russian_keywords - IlyaGusev/gazeta - d0rj/gsm8k-ru - bragovo/dsum_ru - CarlBrendt/Summ_Dialog_News language: - ru library_name: sentence-transformers license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - sentence-similarity - feature-extraction widget: [] --- # USER-bge-m3 **U**niversal **S**entence **E**ncoder for **R**ussian (USER) is a [sentence-transformer](https://www.SBERT.net) model for extracting embeddings exclusively for Russian language. It maps sentences & paragraphs to a 1024 dimensional dense vector space and can be used for tasks like clustering or semantic search. This model is initialized from [`TatonkaHF/bge-m3_en_ru`](https://huggingface.co/TatonkaHF/bge-m3_en_ru) which is shrinked version of [`baai/bge-m3`](https://huggingface.co/BAAI/bge-m3) model and trained to work mainly with the Russian language. Its quality on other languages was not evaluated. ## Usage Using this model becomes easy when you have [`sentence-transformers`](https://www.SBERT.net) installed: ``` pip install -U sentence-transformers ``` Then you can use the model like this: ```python from sentence_transformers import SentenceTransformer input_texts = [ "Когда был спущен на воду первый миноносец «Спокойный»?", "Есть ли нефть в Удмуртии?", "Спокойный (эсминец)\nЗачислен в списки ВМФ СССР 19 августа 1952 года.", "Нефтепоисковые работы в Удмуртии были начаты сразу после Второй мировой войны в 1945 году и продолжаются по сей день. Добыча нефти началась в 1967 году." ] model = SentenceTransformer("deepvk/USER-bge-m3") embeddings = model.encode(input_texts, normalize_embeddings=True) ``` However, you can use model directly with [`transformers`](https://huggingface.co/docs/transformers/en/index) ```python import torch.nn.functional as F from torch import Tensor, inference_mode from transformers import AutoTokenizer, AutoModel input_texts = [ "Когда был спущен на воду первый миноносец «Спокойный»?", "Есть ли нефть в Удмуртии?", "Спокойный (эсминец)\nЗачислен в списки ВМФ СССР 19 августа 1952 года.", "Нефтепоисковые работы в Удмуртии были начаты сразу после Второй мировой войны в 1945 году и продолжаются по сей день. Добыча нефти началась в 1967 году." ] tokenizer = AutoTokenizer.from_pretrained("deepvk/USER-bge-m3") model = AutoModel.from_pretrained("deepvk/USER-bge-m3") model.eval() encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) # [[0.5567, 0.3014], # [0.1701, 0.7122]] scores = (sentence_embeddings[:2] @ sentence_embeddings[2:].T) ``` Also, you can use native [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding) library for evaluation. Usage is described in [`bge-m3` model card](https://huggingface.co/BAAI/bge-m3). # Training Details We follow the [`USER-base`](https://huggingface.co/deepvk/USER-base) model training algorithm, with several changes as we use different backbone. **Initialization:** [`TatonkaHF/bge-m3_en_ru`](https://huggingface.co/TatonkaHF/bge-m3_en_ru) – shrinked version of [`baai/bge-m3`](https://huggingface.co/BAAI/bge-m3) to support only Russian and English tokens. **Fine-tuning:** Supervised fine-tuning two different models based on data symmetry and then merging via [`LM-Cocktail`](https://arxiv.org/abs/2311.13534): 1. Since we split the data, we could additionally apply the [AnglE loss](https://arxiv.org/abs/2309.12871) to the symmetric model, which enhances performance on symmetric tasks. 2. Finally, we added the original `bge-m3` model to the two obtained models to prevent catastrophic forgetting, tuning the weights for the merger using `LM-Cocktail` to produce the final model, **USER-bge-m3**. ### Dataset During model development, we additional collect 2 datasets: [`deepvk/ru-HNP`](https://huggingface.co/datasets/deepvk/ru-HNP) and [`deepvk/ru-WANLI`](https://huggingface.co/datasets/deepvk/ru-WANLI). | Symmetric Dataset | Size | Asymmetric Dataset | Size | |-------------------|-------|--------------------|------| | **AllNLI** | 282 644 | [**MIRACL**](https://huggingface.co/datasets/Shitao/bge-m3-data/tree/main) | 10 000 | | [MedNLI](https://github.com/jgc128/mednli) | 3 699 | [MLDR](https://huggingface.co/datasets/Shitao/bge-m3-data/tree/main) | 1 864 | | [RCB](https://huggingface.co/datasets/RussianNLP/russian_super_glue) | 392 | [Lenta](https://github.com/yutkin/Lenta.Ru-News-Dataset) | 185 972 | | [Terra](https://huggingface.co/datasets/RussianNLP/russian_super_glue) | 1 359 | [Mlsum](https://huggingface.co/datasets/reciTAL/mlsum) | 51 112 | | [Tapaco](https://huggingface.co/datasets/tapaco) | 91 240 | [Mr-TyDi](https://huggingface.co/datasets/Shitao/bge-m3-data/tree/main) | 536 600 | | [**deepvk/ru-WANLI**](https://huggingface.co/datasets/deepvk/ru-WANLI) | 35 455 | [Panorama](https://huggingface.co/datasets/its5Q/panorama) | 11 024 | | [**deepvk/ru-HNP**](https://huggingface.co/datasets/deepvk/ru-HNP) | 500 000 | [PravoIsrael](https://huggingface.co/datasets/TarasHu/pravoIsrael) | 26 364 | | | | [Xlsum](https://huggingface.co/datasets/csebuetnlp/xlsum) | 124 486 | | | | [Fialka-v1](https://huggingface.co/datasets/0x7o/fialka-v1) | 130 000 | | | | [RussianKeywords](https://huggingface.co/datasets/Milana/russian_keywords) | 16 461 | | | | [Gazeta](https://huggingface.co/datasets/IlyaGusev/gazeta) | 121 928 | | | | [Gsm8k-ru](https://huggingface.co/datasets/d0rj/gsm8k-ru) | 7 470 | | | | [DSumRu](https://huggingface.co/datasets/bragovo/dsum_ru) | 27 191 | | | | [SummDialogNews](https://huggingface.co/datasets/CarlBrendt/Summ_Dialog_News) | 75 700 | **Total positive pairs:** 2,240,961 **Total negative pairs:** 792,644 (negative pairs from AIINLI, MIRACL, deepvk/ru-WANLI, deepvk/ru-HNP) For all labeled datasets, we only use its training set for fine-tuning. For datasets Gazeta, Mlsum, Xlsum: pairs (title/text) and (title/summary) are combined and used as asymmetric data. `AllNLI` is an translated to Russian combination of SNLI, MNLI and ANLI. ## Experiments We compare our mode with the basic [`baai/bge-m3`](https://huggingface.co/BAAI/bge-m3) on the [`encodechka`](https://github.com/avidale/encodechka) benchmark. In addition, we evaluate model on the russian subset of [`MTEB`](https://github.com/embeddings-benchmark/mteb) on Classification, Reranking, Multilabel Classification, STS, Retrieval, and PairClassification tasks. We use validation scripts from the official repositories for each of the tasks. Results on encodechka: | Model | Mean S | Mean S+W | STS | PI | NLI | SA | TI | IA | IC | ICX | NE1 | NE2 | |-------------|--------|----------|------|------|------|------|------|------|------|------|------|------| | [`baai/bge-m3`](https://huggingface.co/BAAI/bge-m3) | 0.787 | 0.696 | 0.86 | 0.75 | 0.51 | 0.82 | 0.97 | 0.79 | 0.81 | 0.78 | 0.24 | 0.42 | | `USER-bge-m3` | **0.799** | **0.709** | **0.87** | **0.76** | **0.58** | 0.82 | 0.97 | 0.79 | 0.81 | 0.78 | **0.28** | **0.43** | Results on MTEB: | Type | [`baai/bge-m3`](https://huggingface.co/BAAI/bge-m3) | `USER-bge-m3` | |---------------------------|--------|-------------| | Average (30 datasets) | 0.689 | **0.706** | | Classification Average (12 datasets) | 0.571 | **0.594** | | Reranking Average (2 datasets) | **0.698** | 0.688 | | MultilabelClassification (2 datasets) | 0.343 | **0.359** | | STS Average (4 datasets) | 0.735 | **0.753** | | Retrieval Average (6 datasets) | **0.945** | 0.934 | | PairClassification Average (4 datasets) | 0.784 | **0.833** | ## Limitations We did not thoroughly evaluate the model's ability for sparse and multi-vec encoding. ## Citations ``` @misc{deepvk2024user, title={USER: Universal Sentence Encoder for Russian}, author={Malashenko, Boris and Zemerov, Anton and Spirin, Egor}, url={https://huggingface.co/datasets/deepvk/USER-base}, publisher={Hugging Face} year={2024}, } ```
[ "MEDNLI" ]
sdadas/mmlw-roberta-base
sdadas
sentence-similarity
[ "sentence-transformers", "pytorch", "safetensors", "roberta", "feature-extraction", "sentence-similarity", "transformers", "mteb", "pl", "arxiv:2402.13350", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-11-17T19:04:53Z"
2024-10-24T13:05:51+00:00
72,641
3
--- language: pl license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers - mteb widget: - source_sentence: 'zapytanie: Jak dożyć 100 lat?' sentences: - Trzeba zdrowo się odżywiać i uprawiać sport. - Trzeba pić alkohol, imprezować i jeździć szybkimi autami. - Gdy trwała kampania politycy zapewniali, że rozprawią się z zakazem niedzielnego handlu. model-index: - name: mmlw-roberta-base results: - task: type: Clustering dataset: name: MTEB 8TagsClustering type: PL-MTEB/8tags-clustering config: default split: test revision: None metrics: - type: v_measure value: 33.08463724780795 - task: type: Classification dataset: name: MTEB AllegroReviews type: PL-MTEB/allegro-reviews config: default split: test revision: None metrics: - type: accuracy value: 40.25844930417495 - type: f1 value: 35.59685265418916 - task: type: Retrieval dataset: name: MTEB ArguAna-PL type: arguana-pl config: default split: test revision: None metrics: - type: map_at_1 value: 33.073 - type: map_at_10 value: 50.223 - type: map_at_100 value: 50.942 - type: map_at_1000 value: 50.94499999999999 - type: map_at_3 value: 45.721000000000004 - type: map_at_5 value: 48.413000000000004 - type: mrr_at_1 value: 34.424 - type: mrr_at_10 value: 50.68899999999999 - type: mrr_at_100 value: 51.437999999999995 - type: mrr_at_1000 value: 51.441 - type: mrr_at_3 value: 46.219 - type: mrr_at_5 value: 48.921 - type: ndcg_at_1 value: 33.073 - type: ndcg_at_10 value: 59.021 - type: ndcg_at_100 value: 61.902 - type: ndcg_at_1000 value: 61.983999999999995 - type: ndcg_at_3 value: 49.818 - type: ndcg_at_5 value: 54.644999999999996 - type: precision_at_1 value: 33.073 - type: precision_at_10 value: 8.684 - type: precision_at_100 value: 0.9900000000000001 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 20.555 - type: precision_at_5 value: 14.666 - type: recall_at_1 value: 33.073 - type: recall_at_10 value: 86.842 - type: recall_at_100 value: 99.004 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 61.663999999999994 - type: recall_at_5 value: 73.329 - task: type: Classification dataset: name: MTEB CBD type: PL-MTEB/cbd config: default split: test revision: None metrics: - type: accuracy value: 68.11 - type: ap value: 20.916633959031266 - type: f1 value: 56.85804802205465 - task: type: PairClassification dataset: name: MTEB CDSC-E type: PL-MTEB/cdsce-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 89.2 - type: cos_sim_ap value: 79.1041156765933 - type: cos_sim_f1 value: 70.0 - type: cos_sim_precision value: 74.11764705882354 - type: cos_sim_recall value: 66.3157894736842 - type: dot_accuracy value: 88.2 - type: dot_ap value: 72.57183688228149 - type: dot_f1 value: 67.16417910447761 - type: dot_precision value: 63.67924528301887 - type: dot_recall value: 71.05263157894737 - type: euclidean_accuracy value: 89.3 - type: euclidean_ap value: 79.01345533432428 - type: euclidean_f1 value: 70.19498607242339 - type: euclidean_precision value: 74.55621301775149 - type: euclidean_recall value: 66.3157894736842 - type: manhattan_accuracy value: 89.3 - type: manhattan_ap value: 79.01671381791259 - type: manhattan_f1 value: 70.0280112044818 - type: manhattan_precision value: 74.8502994011976 - type: manhattan_recall value: 65.78947368421053 - type: max_accuracy value: 89.3 - type: max_ap value: 79.1041156765933 - type: max_f1 value: 70.19498607242339 - task: type: STS dataset: name: MTEB CDSC-R type: PL-MTEB/cdscr-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 91.79559442663039 - type: cos_sim_spearman value: 92.5438168962641 - type: euclidean_pearson value: 92.02981265332856 - type: euclidean_spearman value: 92.5548245733484 - type: manhattan_pearson value: 91.95296287979178 - type: manhattan_spearman value: 92.50279516120241 - task: type: Retrieval dataset: name: MTEB DBPedia-PL type: dbpedia-pl config: default split: test revision: None metrics: - type: map_at_1 value: 7.829999999999999 - type: map_at_10 value: 16.616 - type: map_at_100 value: 23.629 - type: map_at_1000 value: 25.235999999999997 - type: map_at_3 value: 12.485 - type: map_at_5 value: 14.077 - type: mrr_at_1 value: 61.75000000000001 - type: mrr_at_10 value: 69.852 - type: mrr_at_100 value: 70.279 - type: mrr_at_1000 value: 70.294 - type: mrr_at_3 value: 68.375 - type: mrr_at_5 value: 69.187 - type: ndcg_at_1 value: 49.75 - type: ndcg_at_10 value: 36.217 - type: ndcg_at_100 value: 41.235 - type: ndcg_at_1000 value: 48.952 - type: ndcg_at_3 value: 41.669 - type: ndcg_at_5 value: 38.285000000000004 - type: precision_at_1 value: 61.5 - type: precision_at_10 value: 28.499999999999996 - type: precision_at_100 value: 9.572 - type: precision_at_1000 value: 2.025 - type: precision_at_3 value: 44.083 - type: precision_at_5 value: 36.3 - type: recall_at_1 value: 7.829999999999999 - type: recall_at_10 value: 21.462999999999997 - type: recall_at_100 value: 47.095 - type: recall_at_1000 value: 71.883 - type: recall_at_3 value: 13.891 - type: recall_at_5 value: 16.326999999999998 - task: type: Retrieval dataset: name: MTEB FiQA-PL type: fiqa-pl config: default split: test revision: None metrics: - type: map_at_1 value: 16.950000000000003 - type: map_at_10 value: 27.422 - type: map_at_100 value: 29.146 - type: map_at_1000 value: 29.328 - type: map_at_3 value: 23.735999999999997 - type: map_at_5 value: 25.671 - type: mrr_at_1 value: 33.796 - type: mrr_at_10 value: 42.689 - type: mrr_at_100 value: 43.522 - type: mrr_at_1000 value: 43.563 - type: mrr_at_3 value: 40.226 - type: mrr_at_5 value: 41.685 - type: ndcg_at_1 value: 33.642 - type: ndcg_at_10 value: 35.008 - type: ndcg_at_100 value: 41.839 - type: ndcg_at_1000 value: 45.035 - type: ndcg_at_3 value: 31.358999999999998 - type: ndcg_at_5 value: 32.377 - type: precision_at_1 value: 33.642 - type: precision_at_10 value: 9.937999999999999 - type: precision_at_100 value: 1.685 - type: precision_at_1000 value: 0.22699999999999998 - type: precision_at_3 value: 21.142 - type: precision_at_5 value: 15.586 - type: recall_at_1 value: 16.950000000000003 - type: recall_at_10 value: 42.286 - type: recall_at_100 value: 68.51899999999999 - type: recall_at_1000 value: 87.471 - type: recall_at_3 value: 28.834 - type: recall_at_5 value: 34.274 - task: type: Retrieval dataset: name: MTEB HotpotQA-PL type: hotpotqa-pl config: default split: test revision: None metrics: - type: map_at_1 value: 37.711 - type: map_at_10 value: 57.867999999999995 - type: map_at_100 value: 58.77 - type: map_at_1000 value: 58.836999999999996 - type: map_at_3 value: 54.400999999999996 - type: map_at_5 value: 56.564 - type: mrr_at_1 value: 75.449 - type: mrr_at_10 value: 81.575 - type: mrr_at_100 value: 81.783 - type: mrr_at_1000 value: 81.792 - type: mrr_at_3 value: 80.50399999999999 - type: mrr_at_5 value: 81.172 - type: ndcg_at_1 value: 75.422 - type: ndcg_at_10 value: 66.635 - type: ndcg_at_100 value: 69.85 - type: ndcg_at_1000 value: 71.179 - type: ndcg_at_3 value: 61.648 - type: ndcg_at_5 value: 64.412 - type: precision_at_1 value: 75.422 - type: precision_at_10 value: 13.962 - type: precision_at_100 value: 1.649 - type: precision_at_1000 value: 0.183 - type: precision_at_3 value: 39.172000000000004 - type: precision_at_5 value: 25.691000000000003 - type: recall_at_1 value: 37.711 - type: recall_at_10 value: 69.811 - type: recall_at_100 value: 82.471 - type: recall_at_1000 value: 91.29 - type: recall_at_3 value: 58.757999999999996 - type: recall_at_5 value: 64.227 - task: type: Retrieval dataset: name: MTEB MSMARCO-PL type: msmarco-pl config: default split: validation revision: None metrics: - type: map_at_1 value: 17.033 - type: map_at_10 value: 27.242 - type: map_at_100 value: 28.451999999999998 - type: map_at_1000 value: 28.515 - type: map_at_3 value: 24.046 - type: map_at_5 value: 25.840999999999998 - type: mrr_at_1 value: 17.493 - type: mrr_at_10 value: 27.67 - type: mrr_at_100 value: 28.823999999999998 - type: mrr_at_1000 value: 28.881 - type: mrr_at_3 value: 24.529999999999998 - type: mrr_at_5 value: 26.27 - type: ndcg_at_1 value: 17.479 - type: ndcg_at_10 value: 33.048 - type: ndcg_at_100 value: 39.071 - type: ndcg_at_1000 value: 40.739999999999995 - type: ndcg_at_3 value: 26.493 - type: ndcg_at_5 value: 29.701 - type: precision_at_1 value: 17.479 - type: precision_at_10 value: 5.324 - type: precision_at_100 value: 0.8380000000000001 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 11.408999999999999 - type: precision_at_5 value: 8.469999999999999 - type: recall_at_1 value: 17.033 - type: recall_at_10 value: 50.929 - type: recall_at_100 value: 79.262 - type: recall_at_1000 value: 92.239 - type: recall_at_3 value: 33.06 - type: recall_at_5 value: 40.747 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 72.31002017484867 - type: f1 value: 69.61603671063031 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 75.52790854068594 - type: f1 value: 75.4053872472259 - task: type: Retrieval dataset: name: MTEB NFCorpus-PL type: nfcorpus-pl config: default split: test revision: None metrics: - type: map_at_1 value: 5.877000000000001 - type: map_at_10 value: 12.817 - type: map_at_100 value: 16.247 - type: map_at_1000 value: 17.683 - type: map_at_3 value: 9.334000000000001 - type: map_at_5 value: 10.886999999999999 - type: mrr_at_1 value: 45.201 - type: mrr_at_10 value: 52.7 - type: mrr_at_100 value: 53.425999999999995 - type: mrr_at_1000 value: 53.461000000000006 - type: mrr_at_3 value: 50.464 - type: mrr_at_5 value: 51.827 - type: ndcg_at_1 value: 41.949999999999996 - type: ndcg_at_10 value: 34.144999999999996 - type: ndcg_at_100 value: 31.556 - type: ndcg_at_1000 value: 40.265 - type: ndcg_at_3 value: 38.07 - type: ndcg_at_5 value: 36.571 - type: precision_at_1 value: 44.272 - type: precision_at_10 value: 25.697 - type: precision_at_100 value: 8.077 - type: precision_at_1000 value: 2.084 - type: precision_at_3 value: 36.016999999999996 - type: precision_at_5 value: 31.703 - type: recall_at_1 value: 5.877000000000001 - type: recall_at_10 value: 16.986 - type: recall_at_100 value: 32.719 - type: recall_at_1000 value: 63.763000000000005 - type: recall_at_3 value: 10.292 - type: recall_at_5 value: 12.886000000000001 - task: type: Retrieval dataset: name: MTEB NQ-PL type: nq-pl config: default split: test revision: None metrics: - type: map_at_1 value: 25.476 - type: map_at_10 value: 38.67 - type: map_at_100 value: 39.784000000000006 - type: map_at_1000 value: 39.831 - type: map_at_3 value: 34.829 - type: map_at_5 value: 37.025000000000006 - type: mrr_at_1 value: 28.621000000000002 - type: mrr_at_10 value: 41.13 - type: mrr_at_100 value: 42.028 - type: mrr_at_1000 value: 42.059999999999995 - type: mrr_at_3 value: 37.877 - type: mrr_at_5 value: 39.763999999999996 - type: ndcg_at_1 value: 28.563 - type: ndcg_at_10 value: 45.654 - type: ndcg_at_100 value: 50.695 - type: ndcg_at_1000 value: 51.873999999999995 - type: ndcg_at_3 value: 38.359 - type: ndcg_at_5 value: 42.045 - type: precision_at_1 value: 28.563 - type: precision_at_10 value: 7.6450000000000005 - type: precision_at_100 value: 1.052 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 17.458000000000002 - type: precision_at_5 value: 12.613 - type: recall_at_1 value: 25.476 - type: recall_at_10 value: 64.484 - type: recall_at_100 value: 86.96199999999999 - type: recall_at_1000 value: 95.872 - type: recall_at_3 value: 45.527 - type: recall_at_5 value: 54.029 - task: type: Classification dataset: name: MTEB PAC type: laugustyniak/abusive-clauses-pl config: default split: test revision: None metrics: - type: accuracy value: 65.87315377932232 - type: ap value: 76.41966964416534 - type: f1 value: 63.64417488639012 - task: type: PairClassification dataset: name: MTEB PPC type: PL-MTEB/ppc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 87.7 - type: cos_sim_ap value: 92.81319372631636 - type: cos_sim_f1 value: 90.04048582995952 - type: cos_sim_precision value: 88.11410459587957 - type: cos_sim_recall value: 92.05298013245033 - type: dot_accuracy value: 75.0 - type: dot_ap value: 83.63089957943261 - type: dot_f1 value: 80.76923076923077 - type: dot_precision value: 75.43103448275862 - type: dot_recall value: 86.9205298013245 - type: euclidean_accuracy value: 87.7 - type: euclidean_ap value: 92.94772245932825 - type: euclidean_f1 value: 90.10458567980692 - type: euclidean_precision value: 87.63693270735524 - type: euclidean_recall value: 92.71523178807946 - type: manhattan_accuracy value: 87.8 - type: manhattan_ap value: 92.95330512127123 - type: manhattan_f1 value: 90.08130081300813 - type: manhattan_precision value: 88.49840255591054 - type: manhattan_recall value: 91.72185430463577 - type: max_accuracy value: 87.8 - type: max_ap value: 92.95330512127123 - type: max_f1 value: 90.10458567980692 - task: type: PairClassification dataset: name: MTEB PSC type: PL-MTEB/psc-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 96.19666048237477 - type: cos_sim_ap value: 98.61237969571302 - type: cos_sim_f1 value: 93.77845220030349 - type: cos_sim_precision value: 93.35347432024169 - type: cos_sim_recall value: 94.20731707317073 - type: dot_accuracy value: 94.89795918367348 - type: dot_ap value: 97.02853491357943 - type: dot_f1 value: 91.85185185185186 - type: dot_precision value: 89.33717579250721 - type: dot_recall value: 94.51219512195121 - type: euclidean_accuracy value: 96.38218923933209 - type: euclidean_ap value: 98.58145584134218 - type: euclidean_f1 value: 94.04580152671755 - type: euclidean_precision value: 94.18960244648318 - type: euclidean_recall value: 93.90243902439023 - type: manhattan_accuracy value: 96.47495361781077 - type: manhattan_ap value: 98.6108221024781 - type: manhattan_f1 value: 94.18960244648318 - type: manhattan_precision value: 94.47852760736197 - type: manhattan_recall value: 93.90243902439023 - type: max_accuracy value: 96.47495361781077 - type: max_ap value: 98.61237969571302 - type: max_f1 value: 94.18960244648318 - task: type: Classification dataset: name: MTEB PolEmo2.0-IN type: PL-MTEB/polemo2_in config: default split: test revision: None metrics: - type: accuracy value: 71.73130193905818 - type: f1 value: 71.17731918813324 - task: type: Classification dataset: name: MTEB PolEmo2.0-OUT type: PL-MTEB/polemo2_out config: default split: test revision: None metrics: - type: accuracy value: 46.59919028340081 - type: f1 value: 37.216392949948954 - task: type: Retrieval dataset: name: MTEB Quora-PL type: quora-pl config: default split: test revision: None metrics: - type: map_at_1 value: 66.134 - type: map_at_10 value: 80.19 - type: map_at_100 value: 80.937 - type: map_at_1000 value: 80.95599999999999 - type: map_at_3 value: 77.074 - type: map_at_5 value: 79.054 - type: mrr_at_1 value: 75.88000000000001 - type: mrr_at_10 value: 83.226 - type: mrr_at_100 value: 83.403 - type: mrr_at_1000 value: 83.406 - type: mrr_at_3 value: 82.03200000000001 - type: mrr_at_5 value: 82.843 - type: ndcg_at_1 value: 75.94 - type: ndcg_at_10 value: 84.437 - type: ndcg_at_100 value: 86.13 - type: ndcg_at_1000 value: 86.29299999999999 - type: ndcg_at_3 value: 81.07799999999999 - type: ndcg_at_5 value: 83.0 - type: precision_at_1 value: 75.94 - type: precision_at_10 value: 12.953999999999999 - type: precision_at_100 value: 1.514 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 35.61 - type: precision_at_5 value: 23.652 - type: recall_at_1 value: 66.134 - type: recall_at_10 value: 92.991 - type: recall_at_100 value: 99.003 - type: recall_at_1000 value: 99.86 - type: recall_at_3 value: 83.643 - type: recall_at_5 value: 88.81099999999999 - task: type: Retrieval dataset: name: MTEB SCIDOCS-PL type: scidocs-pl config: default split: test revision: None metrics: - type: map_at_1 value: 4.183 - type: map_at_10 value: 10.626 - type: map_at_100 value: 12.485 - type: map_at_1000 value: 12.793 - type: map_at_3 value: 7.531000000000001 - type: map_at_5 value: 9.037 - type: mrr_at_1 value: 20.5 - type: mrr_at_10 value: 30.175 - type: mrr_at_100 value: 31.356 - type: mrr_at_1000 value: 31.421 - type: mrr_at_3 value: 26.900000000000002 - type: mrr_at_5 value: 28.689999999999998 - type: ndcg_at_1 value: 20.599999999999998 - type: ndcg_at_10 value: 17.84 - type: ndcg_at_100 value: 25.518 - type: ndcg_at_1000 value: 31.137999999999998 - type: ndcg_at_3 value: 16.677 - type: ndcg_at_5 value: 14.641000000000002 - type: precision_at_1 value: 20.599999999999998 - type: precision_at_10 value: 9.3 - type: precision_at_100 value: 2.048 - type: precision_at_1000 value: 0.33999999999999997 - type: precision_at_3 value: 15.533 - type: precision_at_5 value: 12.839999999999998 - type: recall_at_1 value: 4.183 - type: recall_at_10 value: 18.862000000000002 - type: recall_at_100 value: 41.592 - type: recall_at_1000 value: 69.037 - type: recall_at_3 value: 9.443 - type: recall_at_5 value: 13.028 - task: type: PairClassification dataset: name: MTEB SICK-E-PL type: PL-MTEB/sicke-pl-pairclassification config: default split: test revision: None metrics: - type: cos_sim_accuracy value: 86.32286995515696 - type: cos_sim_ap value: 82.04302619416443 - type: cos_sim_f1 value: 74.95572086432874 - type: cos_sim_precision value: 74.55954897815363 - type: cos_sim_recall value: 75.35612535612536 - type: dot_accuracy value: 83.9176518548716 - type: dot_ap value: 76.8608733580272 - type: dot_f1 value: 72.31936654569449 - type: dot_precision value: 67.36324523663184 - type: dot_recall value: 78.06267806267806 - type: euclidean_accuracy value: 86.32286995515696 - type: euclidean_ap value: 81.9648986659308 - type: euclidean_f1 value: 74.93796526054591 - type: euclidean_precision value: 74.59421312632321 - type: euclidean_recall value: 75.28490028490027 - type: manhattan_accuracy value: 86.30248675091724 - type: manhattan_ap value: 81.92853980116878 - type: manhattan_f1 value: 74.80968858131489 - type: manhattan_precision value: 72.74562584118439 - type: manhattan_recall value: 76.99430199430199 - type: max_accuracy value: 86.32286995515696 - type: max_ap value: 82.04302619416443 - type: max_f1 value: 74.95572086432874 - task: type: STS dataset: name: MTEB SICK-R-PL type: PL-MTEB/sickr-pl-sts config: default split: test revision: None metrics: - type: cos_sim_pearson value: 83.07566183637853 - type: cos_sim_spearman value: 79.20198022242548 - type: euclidean_pearson value: 81.27875473517936 - type: euclidean_spearman value: 79.21560102311153 - type: manhattan_pearson value: 81.21559474880459 - type: manhattan_spearman value: 79.1537846814979 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 36.39657573900194 - type: cos_sim_spearman value: 40.36403461037013 - type: euclidean_pearson value: 29.143416004776316 - type: euclidean_spearman value: 40.43197841306375 - type: manhattan_pearson value: 29.18632337290767 - type: manhattan_spearman value: 40.50563343395481 - task: type: Retrieval dataset: name: MTEB SciFact-PL type: scifact-pl config: default split: test revision: None metrics: - type: map_at_1 value: 49.428 - type: map_at_10 value: 60.423 - type: map_at_100 value: 61.037 - type: map_at_1000 value: 61.065999999999995 - type: map_at_3 value: 56.989000000000004 - type: map_at_5 value: 59.041999999999994 - type: mrr_at_1 value: 52.666999999999994 - type: mrr_at_10 value: 61.746 - type: mrr_at_100 value: 62.273 - type: mrr_at_1000 value: 62.300999999999995 - type: mrr_at_3 value: 59.278 - type: mrr_at_5 value: 60.611000000000004 - type: ndcg_at_1 value: 52.333 - type: ndcg_at_10 value: 65.75 - type: ndcg_at_100 value: 68.566 - type: ndcg_at_1000 value: 69.314 - type: ndcg_at_3 value: 59.768 - type: ndcg_at_5 value: 62.808 - type: precision_at_1 value: 52.333 - type: precision_at_10 value: 9.167 - type: precision_at_100 value: 1.0630000000000002 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 23.778 - type: precision_at_5 value: 16.2 - type: recall_at_1 value: 49.428 - type: recall_at_10 value: 81.07799999999999 - type: recall_at_100 value: 93.93299999999999 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 65.061 - type: recall_at_5 value: 72.667 - task: type: Retrieval dataset: name: MTEB TRECCOVID-PL type: trec-covid-pl config: default split: test revision: None metrics: - type: map_at_1 value: 0.22100000000000003 - type: map_at_10 value: 1.788 - type: map_at_100 value: 9.937 - type: map_at_1000 value: 24.762999999999998 - type: map_at_3 value: 0.579 - type: map_at_5 value: 0.947 - type: mrr_at_1 value: 78.0 - type: mrr_at_10 value: 88.067 - type: mrr_at_100 value: 88.067 - type: mrr_at_1000 value: 88.067 - type: mrr_at_3 value: 87.667 - type: mrr_at_5 value: 88.067 - type: ndcg_at_1 value: 76.0 - type: ndcg_at_10 value: 71.332 - type: ndcg_at_100 value: 54.80500000000001 - type: ndcg_at_1000 value: 49.504999999999995 - type: ndcg_at_3 value: 73.693 - type: ndcg_at_5 value: 73.733 - type: precision_at_1 value: 82.0 - type: precision_at_10 value: 76.8 - type: precision_at_100 value: 56.68 - type: precision_at_1000 value: 22.236 - type: precision_at_3 value: 78.667 - type: precision_at_5 value: 79.2 - type: recall_at_1 value: 0.22100000000000003 - type: recall_at_10 value: 2.033 - type: recall_at_100 value: 13.431999999999999 - type: recall_at_1000 value: 46.913 - type: recall_at_3 value: 0.625 - type: recall_at_5 value: 1.052 --- <h1 align="center">MMLW-roberta-base</h1> MMLW (muszę mieć lepszą wiadomość) are neural text encoders for Polish. This is a distilled model that can be used to generate embeddings applicable to many tasks such as semantic similarity, clustering, information retrieval. The model can also serve as a base for further fine-tuning. It transforms texts to 768 dimensional vectors. The model was initialized with Polish RoBERTa checkpoint, and then trained with [multilingual knowledge distillation method](https://aclanthology.org/2020.emnlp-main.365/) on a diverse corpus of 60 million Polish-English text pairs. We utilised [English FlagEmbeddings (BGE)](https://huggingface.co/BAAI/bge-base-en) as teacher models for distillation. ## Usage (Sentence-Transformers) ⚠️ Our embedding models require the use of specific prefixes and suffixes when encoding texts. For this model, each query should be preceded by the prefix **"zapytanie: "** ⚠️ You can use the model like this with [sentence-transformers](https://www.SBERT.net): ```python from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim query_prefix = "zapytanie: " answer_prefix = "" queries = [query_prefix + "Jak dożyć 100 lat?"] answers = [ answer_prefix + "Trzeba zdrowo się odżywiać i uprawiać sport.", answer_prefix + "Trzeba pić alkohol, imprezować i jeździć szybkimi autami.", answer_prefix + "Gdy trwała kampania politycy zapewniali, że rozprawią się z zakazem niedzielnego handlu." ] model = SentenceTransformer("sdadas/mmlw-roberta-base") queries_emb = model.encode(queries, convert_to_tensor=True, show_progress_bar=False) answers_emb = model.encode(answers, convert_to_tensor=True, show_progress_bar=False) best_answer = cos_sim(queries_emb, answers_emb).argmax().item() print(answers[best_answer]) # Trzeba zdrowo się odżywiać i uprawiać sport. ``` ## Evaluation Results - The model achieves an **Average Score** of **61.05** on the Polish Massive Text Embedding Benchmark (MTEB). See [MTEB Leaderboard](https://huggingface.co/spaces/mteb/leaderboard) for detailed results. - The model achieves **NDCG@10** of **53.60** on the Polish Information Retrieval Benchmark. See [PIRB Leaderboard](https://huggingface.co/spaces/sdadas/pirb) for detailed results. ## Acknowledgements This model was trained with the A100 GPU cluster support delivered by the Gdansk University of Technology within the TASK center initiative. ## Citation ```bibtex @article{dadas2024pirb, title={{PIRB}: A Comprehensive Benchmark of Polish Dense and Hybrid Text Retrieval Methods}, author={Sławomir Dadas and Michał Perełkiewicz and Rafał Poświata}, year={2024}, eprint={2402.13350}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ "SCIFACT" ]
BAAI/llm-embedder
BAAI
feature-extraction
[ "transformers", "pytorch", "safetensors", "bert", "feature-extraction", "arxiv:2310.07554", "arxiv:2309.07597", "license:mit", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2023-10-09T09:46:10Z"
2023-11-14T10:11:55+00:00
68,786
118
--- license: mit --- <h1 align="center">FlagEmbedding</h1> <h4 align="center"> <p> <a href=#model-list>Model List</a> | <a href=#frequently-asked-questions>FAQ</a> | <a href=#usage>Usage</a> | <a href="#evaluation">Evaluation</a> | <a href="#train">Train</a> | <a href="#contact">Contact</a> | <a href="#citation">Citation</a> | <a href="#license">License</a> <p> </h4> More details please refer to our Github: [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding). [English](README.md) | [中文](https://github.com/FlagOpen/FlagEmbedding/blob/master/README_zh.md) <span style="#FF69B4;"> **Hiring:** We're seeking experienced NLP researchers and intern students focusing on dense retrieval and retrieval-augmented LLMs. If you're interested, please feel free to reach out to us via email at [email protected].</span> FlagEmbedding can map any text to a low-dimensional dense vector, which can be used for tasks like retrieval, classification, clustering, and semantic search. And it can also be used in vector databases for LLMs. ************* 🌟**Updates**🌟 ************* - 10/12/2023: Release [LLM-Embedder](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder), a unified embedding model to support diverse retrieval augmentation needs for LLMs. [Paper](https://arxiv.org/pdf/2310.07554.pdf) :fire: - 09/15/2023: The [technical report](https://arxiv.org/pdf/2309.07597.pdf) of BGE has been released - 09/15/2023: The [massive training data](https://data.baai.ac.cn/details/BAAI-MTP) of BGE has been released - 09/12/2023: New models: - **New reranker model**: release cross-encoder models `BAAI/bge-reranker-base` and `BAAI/bge-reranker-large`, which are more powerful than embedding model. We recommend to use/fine-tune them to re-rank top-k documents returned by embedding models. - **update embedding model**: release `bge-*-v1.5` embedding model to alleviate the issue of the similarity distribution, and enhance its retrieval ability without instruction. <details> <summary>More</summary> <!-- ### More --> - 09/07/2023: Update [fine-tune code](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md): Add script to mine hard negatives and support adding instruction during fine-tuning. - 08/09/2023: BGE Models are integrated into **Langchain**, you can use it like [this](#using-langchain); C-MTEB **leaderboard** is [available](https://huggingface.co/spaces/mteb/leaderboard). - 08/05/2023: Release base-scale and small-scale models, **best performance among the models of the same size 🤗** - 08/02/2023: Release `bge-large-*`(short for BAAI General Embedding) Models, **rank 1st on MTEB and C-MTEB benchmark!** :tada: :tada: - 08/01/2023: We release the [Chinese Massive Text Embedding Benchmark](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB) (**C-MTEB**), consisting of 31 test dataset. </details> ## Model List `bge` is short for `BAAI general embedding`. | Model | Language | | Description | query instruction for retrieval [1] | |:-------------------------------|:--------:| :--------:| :--------:|:--------:| | [BAAI/llm-embedder](https://huggingface.co/BAAI/llm-embedder) | English | [Inference](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder) | a unified embedding model to support diverse retrieval augmentation needs for LLMs | See [README](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/llm_embedder) | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | Chinese and English | [Inference](#usage-for-reranker) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker) | a cross-encoder model which is more accurate but less efficient [2] | | | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh-v1.5](https://huggingface.co/BAAI/bge-large-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | version 1.5 with more reasonable similarity distribution | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-large-en](https://huggingface.co/BAAI/bge-large-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [MTEB](https://huggingface.co/spaces/mteb/leaderboard) leaderboard | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-base-en](https://huggingface.co/BAAI/bge-base-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-en` | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-small-en](https://huggingface.co/BAAI/bge-small-en) | English | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) |a small-scale model but with competitive performance | `Represent this sentence for searching relevant passages: ` | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | :trophy: rank **1st** in [C-MTEB](https://github.com/FlagOpen/FlagEmbedding/tree/master/C_MTEB) benchmark | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a base-scale model but with similar ability to `bge-large-zh` | `为这个句子生成表示以用于检索相关文章:` | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | Chinese | [Inference](#usage-for-embedding-model) [Fine-tune](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) | a small-scale model but with competitive performance | `为这个句子生成表示以用于检索相关文章:` | [1\]: If you need to search the relevant passages in a query, we suggest to add the instruction to the query; in other cases, no instruction is needed, just use the original query directly. In all cases, **no instruction** needs to be added to passages. [2\]: Different from the embedding model, reranker uses question and document as input and directly output similarity instead of embedding. To balance the accuracy and time cost, cross-encoder is widely used to re-rank top-k documents retrieved by other simple models. For example, use bge embedding model to retrieve top 100 relevant documents, and then use bge reranker to re-rank the top 100 documents to get the final top-3 results. All models have been uploaded to Huggingface Hub, and you can see them at https://huggingface.co/BAAI. If you cannot open the Huggingface Hub, you can also download the models at https://model.baai.ac.cn/models . ## Frequently asked questions **1. How to fine-tune bge embedding model?** Following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune) to prepare data and fine-tune your model. Some suggestions: - Mine hard negatives following this [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune#hard-negatives), which can improve the retrieval performance. - In general, larger hyper-parameter `per_device_train_batch_size` brings better performance. You can expand it by enabling `--fp16`, `--deepspeed df_config.json` (df_config.json can refer to [ds_config.json](https://github.com/FlagOpen/FlagEmbedding/blob/master/examples/finetune/ds_config.json), `--gradient_checkpointing`, etc. - If you pre-train bge on your data, the pre-trained model cannot be directly used to calculate similarity, and it must be fine-tuned with contrastive learning before computing similarity. - If the accuracy of the fine-tuned model is still not high, it is recommended to use/fine-tune the cross-encoder model (bge-reranker) to re-rank top-k results. Hard negatives also are needed to fine-tune reranker. <details> <summary>2. The similarity score between two dissimilar sentences is higher than 0.5</summary> <!-- ### The similarity score between two dissimilar sentences is higher than 0.5 --> **Suggest to use bge v1.5, which alleviates the issue of the similarity distribution.** Since we finetune the models by contrastive learning with a temperature of 0.01, the similarity distribution of the current BGE model is about in the interval \[0.6, 1\]. So a similarity score greater than 0.5 does not indicate that the two sentences are similar. For downstream tasks, such as passage retrieval or semantic similarity, **what matters is the relative order of the scores, not the absolute value.** If you need to filter similar sentences based on a similarity threshold, please select an appropriate similarity threshold based on the similarity distribution on your data (such as 0.8, 0.85, or even 0.9). </details> <details> <summary>3. When does the query instruction need to be used</summary> <!-- ### When does the query instruction need to be used --> For the `bge-*-v1.5`, we improve its retrieval ability when not using instruction. No instruction only has a slight degradation in retrieval performance compared with using instruction. So you can generate embedding without instruction in all cases for convenience. For a retrieval task that uses short queries to find long related documents, it is recommended to add instructions for these short queries. **The best method to decide whether to add instructions for queries is choosing the setting that achieves better performance on your task.** In all cases, the documents/passages do not need to add the instruction. </details> ## Usage ### Usage for Embedding Model Here are some examples of using `bge` models with [FlagEmbedding](#using-flagembedding), [Sentence-Transformers](#using-sentence-transformers), [Langchain](#using-langchain), or [Huggingface Transformers](#using-huggingface-transformers). #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` If it doesn't work for you, you can see [FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md) for more methods to install FlagEmbedding. ```python from FlagEmbedding import FlagModel sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = FlagModel('BAAI/bge-large-zh-v1.5', query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:", use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation embeddings_1 = model.encode(sentences_1) embeddings_2 = model.encode(sentences_2) similarity = embeddings_1 @ embeddings_2.T print(similarity) # for s2p(short query to long passage) retrieval task, suggest to use encode_queries() which will automatically add the instruction to each query # corpus in retrieval task can still use encode() or encode_corpus(), since they don't need instruction queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] q_embeddings = model.encode_queries(queries) p_embeddings = model.encode(passages) scores = q_embeddings @ p_embeddings.T ``` For the value of the argument `query_instruction_for_retrieval`, see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list). By default, FlagModel will use all available GPUs when encoding. Please set `os.environ["CUDA_VISIBLE_DEVICES"]` to select specific GPUs. You also can set `os.environ["CUDA_VISIBLE_DEVICES"]=""` to make all GPUs unavailable. #### Using Sentence-Transformers You can also use the `bge` models with [sentence-transformers](https://www.SBERT.net): ``` pip install -U sentence-transformers ``` ```python from sentence_transformers import SentenceTransformer sentences_1 = ["样例数据-1", "样例数据-2"] sentences_2 = ["样例数据-3", "样例数据-4"] model = SentenceTransformer('BAAI/bge-large-zh-v1.5') embeddings_1 = model.encode(sentences_1, normalize_embeddings=True) embeddings_2 = model.encode(sentences_2, normalize_embeddings=True) similarity = embeddings_1 @ embeddings_2.T print(similarity) ``` For s2p(short query to long passage) retrieval task, each short query should start with an instruction (instructions see [Model List](https://github.com/FlagOpen/FlagEmbedding/tree/master#model-list)). But the instruction is not needed for passages. ```python from sentence_transformers import SentenceTransformer queries = ['query_1', 'query_2'] passages = ["样例文档-1", "样例文档-2"] instruction = "为这个句子生成表示以用于检索相关文章:" model = SentenceTransformer('BAAI/bge-large-zh-v1.5') q_embeddings = model.encode([instruction+q for q in queries], normalize_embeddings=True) p_embeddings = model.encode(passages, normalize_embeddings=True) scores = q_embeddings @ p_embeddings.T ``` #### Using Langchain You can use `bge` in langchain like this: ```python from langchain.embeddings import HuggingFaceBgeEmbeddings model_name = "BAAI/bge-large-en-v1.5" model_kwargs = {'device': 'cuda'} encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity model = HuggingFaceBgeEmbeddings( model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs, query_instruction="为这个句子生成表示以用于检索相关文章:" ) model.query_instruction = "为这个句子生成表示以用于检索相关文章:" ``` #### Using HuggingFace Transformers With the transformers package, you can use the model like this: First, you pass your input through the transformer model, then you select the last hidden state of the first token (i.e., [CLS]) as the sentence embedding. ```python from transformers import AutoTokenizer, AutoModel import torch # Sentences we want sentence embeddings for sentences = ["样例数据-1", "样例数据-2"] # Load model from HuggingFace Hub tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5') model = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5') model.eval() # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages) # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, cls pooling. sentence_embeddings = model_output[0][:, 0] # normalize embeddings sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1) print("Sentence embeddings:", sentence_embeddings) ``` ### Usage for Reranker Different from embedding model, reranker uses question and document as input and directly output similarity instead of embedding. You can get a relevance score by inputting query and passage to the reranker. The reranker is optimized based cross-entropy loss, so the relevance score is not bounded to a specific range. #### Using FlagEmbedding ``` pip install -U FlagEmbedding ``` Get relevance scores (higher scores indicate more relevance): ```python from FlagEmbedding import FlagReranker reranker = FlagReranker('BAAI/bge-reranker-large', use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation score = reranker.compute_score(['query', 'passage']) print(score) scores = reranker.compute_score([['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']]) print(scores) ``` #### Using Huggingface transformers ```python import torch from transformers import AutoModelForSequenceClassification, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-reranker-large') model = AutoModelForSequenceClassification.from_pretrained('BAAI/bge-reranker-large') model.eval() pairs = [['what is panda?', 'hi'], ['what is panda?', 'The giant panda (Ailuropoda melanoleuca), sometimes called a panda bear or simply panda, is a bear species endemic to China.']] with torch.no_grad(): inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors='pt', max_length=512) scores = model(**inputs, return_dict=True).logits.view(-1, ).float() print(scores) ``` ## Evaluation `baai-general-embedding` models achieve **state-of-the-art performance on both MTEB and C-MTEB leaderboard!** For more details and evaluation tools see our [scripts](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md). - **MTEB**: | Model Name | Dimension | Sequence Length | Average (56) | Retrieval (15) |Clustering (11) | Pair Classification (3) | Reranking (4) | STS (10) | Summarization (1) | Classification (12) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [BAAI/bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5) | 1024 | 512 | **64.23** | **54.29** | 46.08 | 87.12 | 60.03 | 83.11 | 31.61 | 75.97 | | [BAAI/bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5) | 768 | 512 | 63.55 | 53.25 | 45.77 | 86.55 | 58.86 | 82.4 | 31.07 | 75.53 | | [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) | 384 | 512 | 62.17 |51.68 | 43.82 | 84.92 | 58.36 | 81.59 | 30.12 | 74.14 | | [bge-large-en](https://huggingface.co/BAAI/bge-large-en) | 1024 | 512 | 63.98 | 53.9 | 46.98 | 85.8 | 59.48 | 81.56 | 32.06 | 76.21 | | [bge-base-en](https://huggingface.co/BAAI/bge-base-en) | 768 | 512 | 63.36 | 53.0 | 46.32 | 85.86 | 58.7 | 81.84 | 29.27 | 75.27 | | [gte-large](https://huggingface.co/thenlper/gte-large) | 1024 | 512 | 63.13 | 52.22 | 46.84 | 85.00 | 59.13 | 83.35 | 31.66 | 73.33 | | [gte-base](https://huggingface.co/thenlper/gte-base) | 768 | 512 | 62.39 | 51.14 | 46.2 | 84.57 | 58.61 | 82.3 | 31.17 | 73.01 | | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1024| 512 | 62.25 | 50.56 | 44.49 | 86.03 | 56.61 | 82.05 | 30.19 | 75.24 | | [bge-small-en](https://huggingface.co/BAAI/bge-small-en) | 384 | 512 | 62.11 | 51.82 | 44.31 | 83.78 | 57.97 | 80.72 | 30.53 | 74.37 | | [instructor-xl](https://huggingface.co/hkunlp/instructor-xl) | 768 | 512 | 61.79 | 49.26 | 44.74 | 86.62 | 57.29 | 83.06 | 32.32 | 61.79 | | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 768 | 512 | 61.5 | 50.29 | 43.80 | 85.73 | 55.91 | 81.05 | 30.28 | 73.84 | | [gte-small](https://huggingface.co/thenlper/gte-small) | 384 | 512 | 61.36 | 49.46 | 44.89 | 83.54 | 57.7 | 82.07 | 30.42 | 72.31 | | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | 1536 | 8192 | 60.99 | 49.25 | 45.9 | 84.89 | 56.32 | 80.97 | 30.8 | 70.93 | | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 384 | 512 | 59.93 | 49.04 | 39.92 | 84.67 | 54.32 | 80.39 | 31.16 | 72.94 | | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 768 | 512 | 59.51 | 42.24 | 43.72 | 85.06 | 56.42 | 82.63 | 30.08 | 73.42 | | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 768 | 514 | 57.78 | 43.81 | 43.69 | 83.04 | 59.36 | 80.28 | 27.49 | 65.07 | | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 4096 | 2048 | 57.59 | 48.22 | 38.93 | 81.9 | 55.65 | 77.74 | 33.6 | 66.19 | - **C-MTEB**: We create the benchmark C-MTEB for Chinese text embedding which consists of 31 datasets from 6 tasks. Please refer to [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/README.md) for a detailed introduction. | Model | Embedding dimension | Avg | Retrieval | STS | PairClassification | Classification | Reranking | Clustering | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | [**BAAI/bge-large-zh-v1.5**](https://huggingface.co/BAAI/bge-large-zh-v1.5) | 1024 | **64.53** | 70.46 | 56.25 | 81.6 | 69.13 | 65.84 | 48.99 | | [BAAI/bge-base-zh-v1.5](https://huggingface.co/BAAI/bge-base-zh-v1.5) | 768 | 63.13 | 69.49 | 53.72 | 79.75 | 68.07 | 65.39 | 47.53 | | [BAAI/bge-small-zh-v1.5](https://huggingface.co/BAAI/bge-small-zh-v1.5) | 512 | 57.82 | 61.77 | 49.11 | 70.41 | 63.96 | 60.92 | 44.18 | | [BAAI/bge-large-zh](https://huggingface.co/BAAI/bge-large-zh) | 1024 | 64.20 | 71.53 | 54.98 | 78.94 | 68.32 | 65.11 | 48.39 | | [bge-large-zh-noinstruct](https://huggingface.co/BAAI/bge-large-zh-noinstruct) | 1024 | 63.53 | 70.55 | 53 | 76.77 | 68.58 | 64.91 | 50.01 | | [BAAI/bge-base-zh](https://huggingface.co/BAAI/bge-base-zh) | 768 | 62.96 | 69.53 | 54.12 | 77.5 | 67.07 | 64.91 | 47.63 | | [multilingual-e5-large](https://huggingface.co/intfloat/multilingual-e5-large) | 1024 | 58.79 | 63.66 | 48.44 | 69.89 | 67.34 | 56.00 | 48.23 | | [BAAI/bge-small-zh](https://huggingface.co/BAAI/bge-small-zh) | 512 | 58.27 | 63.07 | 49.45 | 70.35 | 63.64 | 61.48 | 45.09 | | [m3e-base](https://huggingface.co/moka-ai/m3e-base) | 768 | 57.10 | 56.91 | 50.47 | 63.99 | 67.52 | 59.34 | 47.68 | | [m3e-large](https://huggingface.co/moka-ai/m3e-large) | 1024 | 57.05 | 54.75 | 50.42 | 64.3 | 68.2 | 59.66 | 48.88 | | [multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) | 768 | 55.48 | 61.63 | 46.49 | 67.07 | 65.35 | 54.35 | 40.68 | | [multilingual-e5-small](https://huggingface.co/intfloat/multilingual-e5-small) | 384 | 55.38 | 59.95 | 45.27 | 66.45 | 65.85 | 53.86 | 45.26 | | [text-embedding-ada-002(OpenAI)](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) | 1536 | 53.02 | 52.0 | 43.35 | 69.56 | 64.31 | 54.28 | 45.68 | | [luotuo](https://huggingface.co/silk-road/luotuo-bert-medium) | 1024 | 49.37 | 44.4 | 42.78 | 66.62 | 61 | 49.25 | 44.39 | | [text2vec-base](https://huggingface.co/shibing624/text2vec-base-chinese) | 768 | 47.63 | 38.79 | 43.41 | 67.41 | 62.19 | 49.45 | 37.66 | | [text2vec-large](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 1024 | 47.36 | 41.94 | 44.97 | 70.86 | 60.66 | 49.16 | 30.02 | - **Reranking**: See [C_MTEB](https://github.com/FlagOpen/FlagEmbedding/blob/master/C_MTEB/) for evaluation script. | Model | T2Reranking | T2RerankingZh2En\* | T2RerankingEn2Zh\* | MMarcoReranking | CMedQAv1 | CMedQAv2 | Avg | |:-------------------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:| | text2vec-base-multilingual | 64.66 | 62.94 | 62.51 | 14.37 | 48.46 | 48.6 | 50.26 | | multilingual-e5-small | 65.62 | 60.94 | 56.41 | 29.91 | 67.26 | 66.54 | 57.78 | | multilingual-e5-large | 64.55 | 61.61 | 54.28 | 28.6 | 67.42 | 67.92 | 57.4 | | multilingual-e5-base | 64.21 | 62.13 | 54.68 | 29.5 | 66.23 | 66.98 | 57.29 | | m3e-base | 66.03 | 62.74 | 56.07 | 17.51 | 77.05 | 76.76 | 59.36 | | m3e-large | 66.13 | 62.72 | 56.1 | 16.46 | 77.76 | 78.27 | 59.57 | | bge-base-zh-v1.5 | 66.49 | 63.25 | 57.02 | 29.74 | 80.47 | 84.88 | 63.64 | | bge-large-zh-v1.5 | 65.74 | 63.39 | 57.03 | 28.74 | 83.45 | 85.44 | 63.97 | | [BAAI/bge-reranker-base](https://huggingface.co/BAAI/bge-reranker-base) | 67.28 | 63.95 | 60.45 | 35.46 | 81.26 | 84.1 | 65.42 | | [BAAI/bge-reranker-large](https://huggingface.co/BAAI/bge-reranker-large) | 67.6 | 64.03 | 61.44 | 37.16 | 82.15 | 84.18 | 66.09 | \* : T2RerankingZh2En and T2RerankingEn2Zh are cross-language retrieval tasks ## Train ### BAAI Embedding We pre-train the models using [retromae](https://github.com/staoxiao/RetroMAE) and train them on large-scale pair data using contrastive learning. **You can fine-tune the embedding model on your data following our [examples](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/finetune).** We also provide a [pre-train example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/pretrain). Note that the goal of pre-training is to reconstruct the text, and the pre-trained model cannot be used for similarity calculation directly, it needs to be fine-tuned. For more training details for bge see [baai_general_embedding](https://github.com/FlagOpen/FlagEmbedding/blob/master/FlagEmbedding/baai_general_embedding/README.md). ### BGE Reranker Cross-encoder will perform full-attention over the input pair, which is more accurate than embedding model (i.e., bi-encoder) but more time-consuming than embedding model. Therefore, it can be used to re-rank the top-k documents returned by embedding model. We train the cross-encoder on a multilingual pair data, The data format is the same as embedding model, so you can fine-tune it easily following our [example](https://github.com/FlagOpen/FlagEmbedding/tree/master/examples/reranker). For more details please refer to [./FlagEmbedding/reranker/README.md](https://github.com/FlagOpen/FlagEmbedding/tree/master/FlagEmbedding/reranker) ### Our Contributors: <a href="https://github.com/FlagOpen/FlagEmbedding/graphs/contributors"> <img src="https://contrib.rocks/image?repo=FlagOpen/FlagEmbedding" /> </a> ## Contact If you have any question or suggestion related to this project, feel free to open an issue or pull request. You also can email Shitao Xiao([email protected]) and Zheng Liu([email protected]). ## Citation If you find this repository useful, please consider giving a star :star: and citation ``` @misc{bge_embedding, title={C-Pack: Packaged Resources To Advance General Chinese Embedding}, author={Shitao Xiao and Zheng Liu and Peitian Zhang and Niklas Muennighoff}, year={2023}, eprint={2309.07597}, archivePrefix={arXiv}, primaryClass={cs.CL} } @misc{llm_embedder, title={Retrieve Anything To Augment Large Language Models}, author={Peitian Zhang and Shitao Xiao and Zheng Liu and Zhicheng Dou and Jian-Yun Nie}, year={2023}, eprint={2310.07554}, archivePrefix={arXiv}, primaryClass={cs.IR} } ``` ## License FlagEmbedding is licensed under the [MIT License](https://github.com/FlagOpen/FlagEmbedding/blob/master/LICENSE). The released models can be used for commercial purposes free of charge.
[ "BEAR" ]
beademiguelperez/sentence-transformers-multilingual-e5-small
beademiguelperez
sentence-similarity
[ "sentence-transformers", "safetensors", "bert", "mteb", "Sentence Transformers", "sentence-similarity", "multilingual", "af", "am", "ar", "as", "az", "be", "bg", "bn", "br", "bs", "ca", "cs", "cy", "da", "de", "el", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hr", "hu", "hy", "id", "is", "it", "ja", "jv", "ka", "kk", "km", "kn", "ko", "ku", "ky", "la", "lo", "lt", "lv", "mg", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "om", "or", "pa", "pl", "ps", "pt", "ro", "ru", "sa", "sd", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "th", "tl", "tr", "ug", "uk", "ur", "uz", "vi", "xh", "yi", "zh", "arxiv:2402.05672", "arxiv:2108.08787", "arxiv:2104.08663", "arxiv:2210.07316", "license:mit", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-03-25T14:48:23Z"
2024-03-25T14:56:59+00:00
68,126
0
--- language: - multilingual - af - am - ar - as - az - be - bg - bn - br - bs - ca - cs - cy - da - de - el - en - eo - es - et - eu - fa - fi - fr - fy - ga - gd - gl - gu - ha - he - hi - hr - hu - hy - id - is - it - ja - jv - ka - kk - km - kn - ko - ku - ky - la - lo - lt - lv - mg - mk - ml - mn - mr - ms - my - ne - nl - 'no' - om - or - pa - pl - ps - pt - ro - ru - sa - sd - si - sk - sl - so - sq - sr - su - sv - sw - ta - te - th - tl - tr - ug - uk - ur - uz - vi - xh - yi - zh license: mit tags: - mteb - Sentence Transformers - sentence-similarity - sentence-transformers model-index: - name: multilingual-e5-small results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 73.79104477611939 - type: ap value: 36.9996434842022 - type: f1 value: 67.95453679103099 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (de) type: mteb/amazon_counterfactual config: de split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 71.64882226980728 - type: ap value: 82.11942130026586 - type: f1 value: 69.87963421606715 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en-ext) type: mteb/amazon_counterfactual config: en-ext split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 75.8095952023988 - type: ap value: 24.46869495579561 - type: f1 value: 63.00108480037597 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (ja) type: mteb/amazon_counterfactual config: ja split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 64.186295503212 - type: ap value: 15.496804690197042 - type: f1 value: 52.07153895475031 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 88.699325 - type: ap value: 85.27039559917269 - type: f1 value: 88.65556295032513 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 44.69799999999999 - type: f1 value: 43.73187348654165 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (de) type: mteb/amazon_reviews_multi config: de split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.245999999999995 - type: f1 value: 39.3863530637684 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (es) type: mteb/amazon_reviews_multi config: es split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 40.394 - type: f1 value: 39.301223469483446 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (fr) type: mteb/amazon_reviews_multi config: fr split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 38.864 - type: f1 value: 37.97974261868003 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (ja) type: mteb/amazon_reviews_multi config: ja split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.682 - type: f1 value: 37.07399369768313 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 37.504 - type: f1 value: 36.62317273874278 - task: type: Retrieval dataset: name: MTEB ArguAna type: arguana config: default split: test revision: None metrics: - type: map_at_1 value: 19.061 - type: map_at_10 value: 31.703 - type: map_at_100 value: 32.967 - type: map_at_1000 value: 33.001000000000005 - type: map_at_3 value: 27.466 - type: map_at_5 value: 29.564 - type: mrr_at_1 value: 19.559 - type: mrr_at_10 value: 31.874999999999996 - type: mrr_at_100 value: 33.146 - type: mrr_at_1000 value: 33.18 - type: mrr_at_3 value: 27.667 - type: mrr_at_5 value: 29.74 - type: ndcg_at_1 value: 19.061 - type: ndcg_at_10 value: 39.062999999999995 - type: ndcg_at_100 value: 45.184000000000005 - type: ndcg_at_1000 value: 46.115 - type: ndcg_at_3 value: 30.203000000000003 - type: ndcg_at_5 value: 33.953 - type: precision_at_1 value: 19.061 - type: precision_at_10 value: 6.279999999999999 - type: precision_at_100 value: 0.9129999999999999 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 12.706999999999999 - type: precision_at_5 value: 9.431000000000001 - type: recall_at_1 value: 19.061 - type: recall_at_10 value: 62.802 - type: recall_at_100 value: 91.323 - type: recall_at_1000 value: 98.72 - type: recall_at_3 value: 38.122 - type: recall_at_5 value: 47.155 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 39.22266660528253 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 30.79980849482483 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 57.8790068352054 - type: mrr value: 71.78791276436706 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 82.36328364043163 - type: cos_sim_spearman value: 82.26211536195868 - type: euclidean_pearson value: 80.3183865039173 - type: euclidean_spearman value: 79.88495276296132 - type: manhattan_pearson value: 80.14484480692127 - type: manhattan_spearman value: 80.39279565980743 - task: type: BitextMining dataset: name: MTEB BUCC (de-en) type: mteb/bucc-bitext-mining config: de-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 98.0375782881002 - type: f1 value: 97.86012526096033 - type: precision value: 97.77139874739039 - type: recall value: 98.0375782881002 - task: type: BitextMining dataset: name: MTEB BUCC (fr-en) type: mteb/bucc-bitext-mining config: fr-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 93.35241030156286 - type: f1 value: 92.66050333846944 - type: precision value: 92.3306919069631 - type: recall value: 93.35241030156286 - task: type: BitextMining dataset: name: MTEB BUCC (ru-en) type: mteb/bucc-bitext-mining config: ru-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 94.0699688257707 - type: f1 value: 93.50236693222492 - type: precision value: 93.22791825424315 - type: recall value: 94.0699688257707 - task: type: BitextMining dataset: name: MTEB BUCC (zh-en) type: mteb/bucc-bitext-mining config: zh-en split: test revision: d51519689f32196a32af33b075a01d0e7c51e252 metrics: - type: accuracy value: 89.25750394944708 - type: f1 value: 88.79234684921889 - type: precision value: 88.57293312269616 - type: recall value: 89.25750394944708 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 79.41558441558442 - type: f1 value: 79.25886487487219 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 35.747820820329736 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 27.045143830596146 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: None metrics: - type: map_at_1 value: 24.252999999999997 - type: map_at_10 value: 31.655916666666666 - type: map_at_100 value: 32.680749999999996 - type: map_at_1000 value: 32.79483333333334 - type: map_at_3 value: 29.43691666666666 - type: map_at_5 value: 30.717416666666665 - type: mrr_at_1 value: 28.602750000000004 - type: mrr_at_10 value: 35.56875 - type: mrr_at_100 value: 36.3595 - type: mrr_at_1000 value: 36.427749999999996 - type: mrr_at_3 value: 33.586166666666664 - type: mrr_at_5 value: 34.73641666666666 - type: ndcg_at_1 value: 28.602750000000004 - type: ndcg_at_10 value: 36.06933333333334 - type: ndcg_at_100 value: 40.70141666666667 - type: ndcg_at_1000 value: 43.24341666666667 - type: ndcg_at_3 value: 32.307916666666664 - type: ndcg_at_5 value: 34.129999999999995 - type: precision_at_1 value: 28.602750000000004 - type: precision_at_10 value: 6.097666666666667 - type: precision_at_100 value: 0.9809166666666668 - type: precision_at_1000 value: 0.13766666666666663 - type: precision_at_3 value: 14.628166666666667 - type: precision_at_5 value: 10.266916666666667 - type: recall_at_1 value: 24.252999999999997 - type: recall_at_10 value: 45.31916666666667 - type: recall_at_100 value: 66.03575000000001 - type: recall_at_1000 value: 83.94708333333334 - type: recall_at_3 value: 34.71941666666666 - type: recall_at_5 value: 39.46358333333333 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: climate-fever config: default split: test revision: None metrics: - type: map_at_1 value: 9.024000000000001 - type: map_at_10 value: 15.644 - type: map_at_100 value: 17.154 - type: map_at_1000 value: 17.345 - type: map_at_3 value: 13.028 - type: map_at_5 value: 14.251 - type: mrr_at_1 value: 19.674 - type: mrr_at_10 value: 29.826999999999998 - type: mrr_at_100 value: 30.935000000000002 - type: mrr_at_1000 value: 30.987 - type: mrr_at_3 value: 26.645000000000003 - type: mrr_at_5 value: 28.29 - type: ndcg_at_1 value: 19.674 - type: ndcg_at_10 value: 22.545 - type: ndcg_at_100 value: 29.207 - type: ndcg_at_1000 value: 32.912 - type: ndcg_at_3 value: 17.952 - type: ndcg_at_5 value: 19.363 - type: precision_at_1 value: 19.674 - type: precision_at_10 value: 7.212000000000001 - type: precision_at_100 value: 1.435 - type: precision_at_1000 value: 0.212 - type: precision_at_3 value: 13.507 - type: precision_at_5 value: 10.397 - type: recall_at_1 value: 9.024000000000001 - type: recall_at_10 value: 28.077999999999996 - type: recall_at_100 value: 51.403 - type: recall_at_1000 value: 72.406 - type: recall_at_3 value: 16.768 - type: recall_at_5 value: 20.737 - task: type: Retrieval dataset: name: MTEB DBPedia type: dbpedia-entity config: default split: test revision: None metrics: - type: map_at_1 value: 8.012 - type: map_at_10 value: 17.138 - type: map_at_100 value: 24.146 - type: map_at_1000 value: 25.622 - type: map_at_3 value: 12.552 - type: map_at_5 value: 14.435 - type: mrr_at_1 value: 62.25000000000001 - type: mrr_at_10 value: 71.186 - type: mrr_at_100 value: 71.504 - type: mrr_at_1000 value: 71.514 - type: mrr_at_3 value: 69.333 - type: mrr_at_5 value: 70.408 - type: ndcg_at_1 value: 49.75 - type: ndcg_at_10 value: 37.76 - type: ndcg_at_100 value: 42.071 - type: ndcg_at_1000 value: 49.309 - type: ndcg_at_3 value: 41.644 - type: ndcg_at_5 value: 39.812999999999995 - type: precision_at_1 value: 62.25000000000001 - type: precision_at_10 value: 30.15 - type: precision_at_100 value: 9.753 - type: precision_at_1000 value: 1.9189999999999998 - type: precision_at_3 value: 45.667 - type: precision_at_5 value: 39.15 - type: recall_at_1 value: 8.012 - type: recall_at_10 value: 22.599 - type: recall_at_100 value: 48.068 - type: recall_at_1000 value: 71.328 - type: recall_at_3 value: 14.043 - type: recall_at_5 value: 17.124 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 42.455 - type: f1 value: 37.59462649781862 - task: type: Retrieval dataset: name: MTEB FEVER type: fever config: default split: test revision: None metrics: - type: map_at_1 value: 58.092 - type: map_at_10 value: 69.586 - type: map_at_100 value: 69.968 - type: map_at_1000 value: 69.982 - type: map_at_3 value: 67.48100000000001 - type: map_at_5 value: 68.915 - type: mrr_at_1 value: 62.166 - type: mrr_at_10 value: 73.588 - type: mrr_at_100 value: 73.86399999999999 - type: mrr_at_1000 value: 73.868 - type: mrr_at_3 value: 71.6 - type: mrr_at_5 value: 72.99 - type: ndcg_at_1 value: 62.166 - type: ndcg_at_10 value: 75.27199999999999 - type: ndcg_at_100 value: 76.816 - type: ndcg_at_1000 value: 77.09700000000001 - type: ndcg_at_3 value: 71.36 - type: ndcg_at_5 value: 73.785 - type: precision_at_1 value: 62.166 - type: precision_at_10 value: 9.716 - type: precision_at_100 value: 1.065 - type: precision_at_1000 value: 0.11 - type: precision_at_3 value: 28.278 - type: precision_at_5 value: 18.343999999999998 - type: recall_at_1 value: 58.092 - type: recall_at_10 value: 88.73400000000001 - type: recall_at_100 value: 95.195 - type: recall_at_1000 value: 97.04599999999999 - type: recall_at_3 value: 78.45 - type: recall_at_5 value: 84.316 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: fiqa config: default split: test revision: None metrics: - type: map_at_1 value: 16.649 - type: map_at_10 value: 26.457000000000004 - type: map_at_100 value: 28.169 - type: map_at_1000 value: 28.352 - type: map_at_3 value: 23.305 - type: map_at_5 value: 25.169000000000004 - type: mrr_at_1 value: 32.407000000000004 - type: mrr_at_10 value: 40.922 - type: mrr_at_100 value: 41.931000000000004 - type: mrr_at_1000 value: 41.983 - type: mrr_at_3 value: 38.786 - type: mrr_at_5 value: 40.205999999999996 - type: ndcg_at_1 value: 32.407000000000004 - type: ndcg_at_10 value: 33.314 - type: ndcg_at_100 value: 40.312 - type: ndcg_at_1000 value: 43.685 - type: ndcg_at_3 value: 30.391000000000002 - type: ndcg_at_5 value: 31.525 - type: precision_at_1 value: 32.407000000000004 - type: precision_at_10 value: 8.966000000000001 - type: precision_at_100 value: 1.6019999999999999 - type: precision_at_1000 value: 0.22200000000000003 - type: precision_at_3 value: 20.165 - type: precision_at_5 value: 14.722 - type: recall_at_1 value: 16.649 - type: recall_at_10 value: 39.117000000000004 - type: recall_at_100 value: 65.726 - type: recall_at_1000 value: 85.784 - type: recall_at_3 value: 27.914 - type: recall_at_5 value: 33.289 - task: type: Retrieval dataset: name: MTEB HotpotQA type: hotpotqa config: default split: test revision: None metrics: - type: map_at_1 value: 36.253 - type: map_at_10 value: 56.16799999999999 - type: map_at_100 value: 57.06099999999999 - type: map_at_1000 value: 57.126 - type: map_at_3 value: 52.644999999999996 - type: map_at_5 value: 54.909 - type: mrr_at_1 value: 72.505 - type: mrr_at_10 value: 79.66 - type: mrr_at_100 value: 79.869 - type: mrr_at_1000 value: 79.88 - type: mrr_at_3 value: 78.411 - type: mrr_at_5 value: 79.19800000000001 - type: ndcg_at_1 value: 72.505 - type: ndcg_at_10 value: 65.094 - type: ndcg_at_100 value: 68.219 - type: ndcg_at_1000 value: 69.515 - type: ndcg_at_3 value: 59.99 - type: ndcg_at_5 value: 62.909000000000006 - type: precision_at_1 value: 72.505 - type: precision_at_10 value: 13.749 - type: precision_at_100 value: 1.619 - type: precision_at_1000 value: 0.179 - type: precision_at_3 value: 38.357 - type: precision_at_5 value: 25.313000000000002 - type: recall_at_1 value: 36.253 - type: recall_at_10 value: 68.744 - type: recall_at_100 value: 80.925 - type: recall_at_1000 value: 89.534 - type: recall_at_3 value: 57.535000000000004 - type: recall_at_5 value: 63.282000000000004 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 80.82239999999999 - type: ap value: 75.65895781725314 - type: f1 value: 80.75880969095746 - task: type: Retrieval dataset: name: MTEB MSMARCO type: msmarco config: default split: dev revision: None metrics: - type: map_at_1 value: 21.624 - type: map_at_10 value: 34.075 - type: map_at_100 value: 35.229 - type: map_at_1000 value: 35.276999999999994 - type: map_at_3 value: 30.245 - type: map_at_5 value: 32.42 - type: mrr_at_1 value: 22.264 - type: mrr_at_10 value: 34.638000000000005 - type: mrr_at_100 value: 35.744 - type: mrr_at_1000 value: 35.787 - type: mrr_at_3 value: 30.891000000000002 - type: mrr_at_5 value: 33.042 - type: ndcg_at_1 value: 22.264 - type: ndcg_at_10 value: 40.991 - type: ndcg_at_100 value: 46.563 - type: ndcg_at_1000 value: 47.743 - type: ndcg_at_3 value: 33.198 - type: ndcg_at_5 value: 37.069 - type: precision_at_1 value: 22.264 - type: precision_at_10 value: 6.5089999999999995 - type: precision_at_100 value: 0.9299999999999999 - type: precision_at_1000 value: 0.10300000000000001 - type: precision_at_3 value: 14.216999999999999 - type: precision_at_5 value: 10.487 - type: recall_at_1 value: 21.624 - type: recall_at_10 value: 62.303 - type: recall_at_100 value: 88.124 - type: recall_at_1000 value: 97.08 - type: recall_at_3 value: 41.099999999999994 - type: recall_at_5 value: 50.381 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 91.06703146374831 - type: f1 value: 90.86867815863172 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (de) type: mteb/mtop_domain config: de split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 87.46970977740209 - type: f1 value: 86.36832872036588 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (es) type: mteb/mtop_domain config: es split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 89.26951300867245 - type: f1 value: 88.93561193959502 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (fr) type: mteb/mtop_domain config: fr split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 84.22799874725963 - type: f1 value: 84.30490069236556 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (hi) type: mteb/mtop_domain config: hi split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 86.02007888131948 - type: f1 value: 85.39376041027991 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (th) type: mteb/mtop_domain config: th split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 85.34900542495481 - type: f1 value: 85.39859673336713 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 71.078431372549 - type: f1 value: 53.45071102002276 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (de) type: mteb/mtop_intent config: de split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 65.85798816568047 - type: f1 value: 46.53112748993529 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (es) type: mteb/mtop_intent config: es split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 67.96864576384256 - type: f1 value: 45.966703022829506 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (fr) type: mteb/mtop_intent config: fr split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 61.31537738803633 - type: f1 value: 45.52601712835461 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (hi) type: mteb/mtop_intent config: hi split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 66.29616349946218 - type: f1 value: 47.24166485726613 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (th) type: mteb/mtop_intent config: th split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 67.51537070524412 - type: f1 value: 49.463476319014276 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (af) type: mteb/amazon_massive_intent config: af split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.06792199058508 - type: f1 value: 54.094921857502285 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (am) type: mteb/amazon_massive_intent config: am split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 51.960322797579025 - type: f1 value: 48.547371223370945 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ar) type: mteb/amazon_massive_intent config: ar split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.425016812373904 - type: f1 value: 50.47069202054312 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (az) type: mteb/amazon_massive_intent config: az split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.798251513113655 - type: f1 value: 57.05013069086648 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (bn) type: mteb/amazon_massive_intent config: bn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.37794216543376 - type: f1 value: 56.3607992649805 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (cy) type: mteb/amazon_massive_intent config: cy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 46.56018829858777 - type: f1 value: 43.87319715715134 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (da) type: mteb/amazon_massive_intent config: da split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.9724277067922 - type: f1 value: 59.36480066245562 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (de) type: mteb/amazon_massive_intent config: de split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.72696704774715 - type: f1 value: 59.143595966615855 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (el) type: mteb/amazon_massive_intent config: el split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.5971755211836 - type: f1 value: 59.169445724946726 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 70.29589778076665 - type: f1 value: 67.7577001808977 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (es) type: mteb/amazon_massive_intent config: es split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.31136516476126 - type: f1 value: 64.52032955983242 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fa) type: mteb/amazon_massive_intent config: fa split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.54472091459314 - type: f1 value: 61.47903120066317 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fi) type: mteb/amazon_massive_intent config: fi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.45595158036314 - type: f1 value: 58.0891846024637 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (fr) type: mteb/amazon_massive_intent config: fr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.47074646940149 - type: f1 value: 62.84830858877575 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (he) type: mteb/amazon_massive_intent config: he split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.046402151983855 - type: f1 value: 55.269074430533195 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hi) type: mteb/amazon_massive_intent config: hi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.06523201075991 - type: f1 value: 61.35339643021369 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hu) type: mteb/amazon_massive_intent config: hu split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.954942837928726 - type: f1 value: 57.07035922704846 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (hy) type: mteb/amazon_massive_intent config: hy split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.404169468728995 - type: f1 value: 53.94259011839138 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (id) type: mteb/amazon_massive_intent config: id split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.16610625420309 - type: f1 value: 61.337103431499365 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (is) type: mteb/amazon_massive_intent config: is split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 52.262945527908535 - type: f1 value: 49.7610691598921 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (it) type: mteb/amazon_massive_intent config: it split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.54472091459314 - type: f1 value: 63.469099018440154 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ja) type: mteb/amazon_massive_intent config: ja split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.22797579018157 - type: f1 value: 64.89098471083001 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (jv) type: mteb/amazon_massive_intent config: jv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 50.847343644922674 - type: f1 value: 47.8536963168393 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ka) type: mteb/amazon_massive_intent config: ka split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 48.45326160053799 - type: f1 value: 46.370078045805556 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (km) type: mteb/amazon_massive_intent config: km split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 42.83120376597175 - type: f1 value: 39.68948521599982 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (kn) type: mteb/amazon_massive_intent config: kn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.5084061869536 - type: f1 value: 53.961876160401545 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ko) type: mteb/amazon_massive_intent config: ko split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.7895090786819 - type: f1 value: 61.134223684676 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (lv) type: mteb/amazon_massive_intent config: lv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 54.98991257565569 - type: f1 value: 52.579862862826296 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ml) type: mteb/amazon_massive_intent config: ml split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.90316072629456 - type: f1 value: 58.203024538290336 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (mn) type: mteb/amazon_massive_intent config: mn split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.09818426361802 - type: f1 value: 54.22718458445455 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ms) type: mteb/amazon_massive_intent config: ms split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.991257565568255 - type: f1 value: 55.84892781767421 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (my) type: mteb/amazon_massive_intent config: my split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 55.901143241425686 - type: f1 value: 52.25264332199797 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nb) type: mteb/amazon_massive_intent config: nb split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 61.96368527236047 - type: f1 value: 58.927243876153454 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (nl) type: mteb/amazon_massive_intent config: nl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.64223268325489 - type: f1 value: 62.340453718379706 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pl) type: mteb/amazon_massive_intent config: pl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.52589105581708 - type: f1 value: 61.661113187022174 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (pt) type: mteb/amazon_massive_intent config: pt split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 66.84599865501009 - type: f1 value: 64.59342572873005 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ro) type: mteb/amazon_massive_intent config: ro split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 60.81035642232684 - type: f1 value: 57.5169089806797 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ru) type: mteb/amazon_massive_intent config: ru split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.75991930060525 - type: f1 value: 62.89531115787938 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sl) type: mteb/amazon_massive_intent config: sl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 56.51647612642906 - type: f1 value: 54.33154780100043 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sq) type: mteb/amazon_massive_intent config: sq split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.985877605917956 - type: f1 value: 54.46187524463802 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sv) type: mteb/amazon_massive_intent config: sv split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 65.03026227303296 - type: f1 value: 62.34377392877748 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (sw) type: mteb/amazon_massive_intent config: sw split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 53.567585743106925 - type: f1 value: 50.73770655983206 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ta) type: mteb/amazon_massive_intent config: ta split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.2595830531271 - type: f1 value: 53.657327291708626 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (te) type: mteb/amazon_massive_intent config: te split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 57.82784129119032 - type: f1 value: 54.82518072665301 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (th) type: mteb/amazon_massive_intent config: th split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.06859448554137 - type: f1 value: 63.00185280500495 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tl) type: mteb/amazon_massive_intent config: tl split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 58.91055817081371 - type: f1 value: 55.54116301224262 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (tr) type: mteb/amazon_massive_intent config: tr split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 63.54404841963686 - type: f1 value: 59.57650946030184 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (ur) type: mteb/amazon_massive_intent config: ur split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 59.27706792199059 - type: f1 value: 56.50010066083435 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (vi) type: mteb/amazon_massive_intent config: vi split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 64.0719569603228 - type: f1 value: 61.817075925647956 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 68.23806321452591 - type: f1 value: 65.24917026029749 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-TW) type: mteb/amazon_massive_intent config: zh-TW split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 62.53530598520511 - type: f1 value: 61.71131132295768 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (af) type: mteb/amazon_massive_scenario config: af split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.04303967720243 - type: f1 value: 60.3950085685985 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (am) type: mteb/amazon_massive_scenario config: am split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.83591123066578 - type: f1 value: 54.95059828830849 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ar) type: mteb/amazon_massive_scenario config: ar split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.62340282447881 - type: f1 value: 59.525159996498225 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (az) type: mteb/amazon_massive_scenario config: az split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.85406859448555 - type: f1 value: 59.129299095681276 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (bn) type: mteb/amazon_massive_scenario config: bn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.76731674512441 - type: f1 value: 61.159560612627715 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (cy) type: mteb/amazon_massive_scenario config: cy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 50.181573638197705 - type: f1 value: 46.98422176289957 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (da) type: mteb/amazon_massive_scenario config: da split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.92737054472092 - type: f1 value: 67.69135611952979 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (de) type: mteb/amazon_massive_scenario config: de split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.18964357767318 - type: f1 value: 68.46106138186214 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (el) type: mteb/amazon_massive_scenario config: el split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.0712844653665 - type: f1 value: 66.75545422473901 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.4754539340955 - type: f1 value: 74.38427146553252 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (es) type: mteb/amazon_massive_scenario config: es split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.82515131136518 - type: f1 value: 69.63516462173847 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fa) type: mteb/amazon_massive_scenario config: fa split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.70880968392737 - type: f1 value: 67.45420662567926 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fi) type: mteb/amazon_massive_scenario config: fi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 65.95494283792871 - type: f1 value: 65.06191009049222 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (fr) type: mteb/amazon_massive_scenario config: fr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.75924680564896 - type: f1 value: 68.30833379585945 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (he) type: mteb/amazon_massive_scenario config: he split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 63.806321452589096 - type: f1 value: 63.273048243765054 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hi) type: mteb/amazon_massive_scenario config: hi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.68997982515133 - type: f1 value: 66.54703855381324 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hu) type: mteb/amazon_massive_scenario config: hu split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.46940147948891 - type: f1 value: 65.91017343463396 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (hy) type: mteb/amazon_massive_scenario config: hy split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.49899125756556 - type: f1 value: 57.90333469917769 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (id) type: mteb/amazon_massive_scenario config: id split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.9219905850706 - type: f1 value: 67.23169403762938 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (is) type: mteb/amazon_massive_scenario config: is split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.486213853396094 - type: f1 value: 54.85282355583758 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (it) type: mteb/amazon_massive_scenario config: it split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.04169468728985 - type: f1 value: 68.83833333320462 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ja) type: mteb/amazon_massive_scenario config: ja split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 73.88702084734365 - type: f1 value: 74.04474735232299 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (jv) type: mteb/amazon_massive_scenario config: jv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.63416274377943 - type: f1 value: 55.11332211687954 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ka) type: mteb/amazon_massive_scenario config: ka split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 52.23604572965702 - type: f1 value: 50.86529813991055 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (km) type: mteb/amazon_massive_scenario config: km split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 46.62407531943511 - type: f1 value: 43.63485467164535 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (kn) type: mteb/amazon_massive_scenario config: kn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.15601882985878 - type: f1 value: 57.522837510959924 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ko) type: mteb/amazon_massive_scenario config: ko split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.84532616005382 - type: f1 value: 69.60021127179697 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (lv) type: mteb/amazon_massive_scenario config: lv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 56.65770006724949 - type: f1 value: 55.84219135523227 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ml) type: mteb/amazon_massive_scenario config: ml split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 66.53665097511768 - type: f1 value: 65.09087787792639 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (mn) type: mteb/amazon_massive_scenario config: mn split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.31405514458642 - type: f1 value: 58.06135303831491 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ms) type: mteb/amazon_massive_scenario config: ms split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.88231338264964 - type: f1 value: 62.751099407787926 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (my) type: mteb/amazon_massive_scenario config: my split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.86012104909213 - type: f1 value: 56.29118323058282 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nb) type: mteb/amazon_massive_scenario config: nb split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.37390719569602 - type: f1 value: 66.27922244885102 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (nl) type: mteb/amazon_massive_scenario config: nl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.8675184936113 - type: f1 value: 70.22146529932019 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pl) type: mteb/amazon_massive_scenario config: pl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.2212508406187 - type: f1 value: 67.77454802056282 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (pt) type: mteb/amazon_massive_scenario config: pt split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.18090114324143 - type: f1 value: 68.03737625431621 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ro) type: mteb/amazon_massive_scenario config: ro split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 64.65030262273034 - type: f1 value: 63.792945486912856 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ru) type: mteb/amazon_massive_scenario config: ru split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 69.48217888365838 - type: f1 value: 69.96028997292197 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sl) type: mteb/amazon_massive_scenario config: sl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.17821116341627 - type: f1 value: 59.3935969827171 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sq) type: mteb/amazon_massive_scenario config: sq split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.86146603900471 - type: f1 value: 60.133692735032376 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sv) type: mteb/amazon_massive_scenario config: sv split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.89441829186282 - type: f1 value: 70.03064076194089 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (sw) type: mteb/amazon_massive_scenario config: sw split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 58.15063887020847 - type: f1 value: 56.23326278499678 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ta) type: mteb/amazon_massive_scenario config: ta split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 59.43846671149966 - type: f1 value: 57.70440450281974 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (te) type: mteb/amazon_massive_scenario config: te split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.8507061197041 - type: f1 value: 59.22916396061171 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (th) type: mteb/amazon_massive_scenario config: th split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 70.65568258238063 - type: f1 value: 69.90736239440633 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tl) type: mteb/amazon_massive_scenario config: tl split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 60.8843308675185 - type: f1 value: 59.30332663713599 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (tr) type: mteb/amazon_massive_scenario config: tr split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.05312710154674 - type: f1 value: 67.44024062594775 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (ur) type: mteb/amazon_massive_scenario config: ur split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 62.111634162743776 - type: f1 value: 60.89083013084519 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (vi) type: mteb/amazon_massive_scenario config: vi split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 67.44115669132482 - type: f1 value: 67.92227541674552 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 74.4687289845326 - type: f1 value: 74.16376793486025 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-TW) type: mteb/amazon_massive_scenario config: zh-TW split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 68.31876260928043 - type: f1 value: 68.5246745215607 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 30.90431696479766 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 27.259158476693774 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 30.28445330838555 - type: mrr value: 31.15758529581164 - task: type: Retrieval dataset: name: MTEB NFCorpus type: nfcorpus config: default split: test revision: None metrics: - type: map_at_1 value: 5.353 - type: map_at_10 value: 11.565 - type: map_at_100 value: 14.097000000000001 - type: map_at_1000 value: 15.354999999999999 - type: map_at_3 value: 8.749 - type: map_at_5 value: 9.974 - type: mrr_at_1 value: 42.105 - type: mrr_at_10 value: 50.589 - type: mrr_at_100 value: 51.187000000000005 - type: mrr_at_1000 value: 51.233 - type: mrr_at_3 value: 48.246 - type: mrr_at_5 value: 49.546 - type: ndcg_at_1 value: 40.402 - type: ndcg_at_10 value: 31.009999999999998 - type: ndcg_at_100 value: 28.026 - type: ndcg_at_1000 value: 36.905 - type: ndcg_at_3 value: 35.983 - type: ndcg_at_5 value: 33.764 - type: precision_at_1 value: 42.105 - type: precision_at_10 value: 22.786 - type: precision_at_100 value: 6.916 - type: precision_at_1000 value: 1.981 - type: precision_at_3 value: 33.333 - type: precision_at_5 value: 28.731 - type: recall_at_1 value: 5.353 - type: recall_at_10 value: 15.039 - type: recall_at_100 value: 27.348 - type: recall_at_1000 value: 59.453 - type: recall_at_3 value: 9.792 - type: recall_at_5 value: 11.882 - task: type: Retrieval dataset: name: MTEB NQ type: nq config: default split: test revision: None metrics: - type: map_at_1 value: 33.852 - type: map_at_10 value: 48.924 - type: map_at_100 value: 49.854 - type: map_at_1000 value: 49.886 - type: map_at_3 value: 44.9 - type: map_at_5 value: 47.387 - type: mrr_at_1 value: 38.035999999999994 - type: mrr_at_10 value: 51.644 - type: mrr_at_100 value: 52.339 - type: mrr_at_1000 value: 52.35999999999999 - type: mrr_at_3 value: 48.421 - type: mrr_at_5 value: 50.468999999999994 - type: ndcg_at_1 value: 38.007000000000005 - type: ndcg_at_10 value: 56.293000000000006 - type: ndcg_at_100 value: 60.167 - type: ndcg_at_1000 value: 60.916000000000004 - type: ndcg_at_3 value: 48.903999999999996 - type: ndcg_at_5 value: 52.978 - type: precision_at_1 value: 38.007000000000005 - type: precision_at_10 value: 9.041 - type: precision_at_100 value: 1.1199999999999999 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 22.084 - type: precision_at_5 value: 15.608 - type: recall_at_1 value: 33.852 - type: recall_at_10 value: 75.893 - type: recall_at_100 value: 92.589 - type: recall_at_1000 value: 98.153 - type: recall_at_3 value: 56.969 - type: recall_at_5 value: 66.283 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: quora config: default split: test revision: None metrics: - type: map_at_1 value: 69.174 - type: map_at_10 value: 82.891 - type: map_at_100 value: 83.545 - type: map_at_1000 value: 83.56700000000001 - type: map_at_3 value: 79.944 - type: map_at_5 value: 81.812 - type: mrr_at_1 value: 79.67999999999999 - type: mrr_at_10 value: 86.279 - type: mrr_at_100 value: 86.39 - type: mrr_at_1000 value: 86.392 - type: mrr_at_3 value: 85.21 - type: mrr_at_5 value: 85.92999999999999 - type: ndcg_at_1 value: 79.69000000000001 - type: ndcg_at_10 value: 86.929 - type: ndcg_at_100 value: 88.266 - type: ndcg_at_1000 value: 88.428 - type: ndcg_at_3 value: 83.899 - type: ndcg_at_5 value: 85.56700000000001 - type: precision_at_1 value: 79.69000000000001 - type: precision_at_10 value: 13.161000000000001 - type: precision_at_100 value: 1.513 - type: precision_at_1000 value: 0.156 - type: precision_at_3 value: 36.603 - type: precision_at_5 value: 24.138 - type: recall_at_1 value: 69.174 - type: recall_at_10 value: 94.529 - type: recall_at_100 value: 99.15 - type: recall_at_1000 value: 99.925 - type: recall_at_3 value: 85.86200000000001 - type: recall_at_5 value: 90.501 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 39.13064340585255 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 58.97884249325877 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 3.4680000000000004 - type: map_at_10 value: 7.865 - type: map_at_100 value: 9.332 - type: map_at_1000 value: 9.587 - type: map_at_3 value: 5.800000000000001 - type: map_at_5 value: 6.8790000000000004 - type: mrr_at_1 value: 17.0 - type: mrr_at_10 value: 25.629 - type: mrr_at_100 value: 26.806 - type: mrr_at_1000 value: 26.889000000000003 - type: mrr_at_3 value: 22.8 - type: mrr_at_5 value: 24.26 - type: ndcg_at_1 value: 17.0 - type: ndcg_at_10 value: 13.895 - type: ndcg_at_100 value: 20.491999999999997 - type: ndcg_at_1000 value: 25.759999999999998 - type: ndcg_at_3 value: 13.347999999999999 - type: ndcg_at_5 value: 11.61 - type: precision_at_1 value: 17.0 - type: precision_at_10 value: 7.090000000000001 - type: precision_at_100 value: 1.669 - type: precision_at_1000 value: 0.294 - type: precision_at_3 value: 12.3 - type: precision_at_5 value: 10.02 - type: recall_at_1 value: 3.4680000000000004 - type: recall_at_10 value: 14.363000000000001 - type: recall_at_100 value: 33.875 - type: recall_at_1000 value: 59.711999999999996 - type: recall_at_3 value: 7.483 - type: recall_at_5 value: 10.173 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 83.04084311714061 - type: cos_sim_spearman value: 77.51342467443078 - type: euclidean_pearson value: 80.0321166028479 - type: euclidean_spearman value: 77.29249114733226 - type: manhattan_pearson value: 80.03105964262431 - type: manhattan_spearman value: 77.22373689514794 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 84.1680158034387 - type: cos_sim_spearman value: 76.55983344071117 - type: euclidean_pearson value: 79.75266678300143 - type: euclidean_spearman value: 75.34516823467025 - type: manhattan_pearson value: 79.75959151517357 - type: manhattan_spearman value: 75.42330344141912 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 76.48898993209346 - type: cos_sim_spearman value: 76.96954120323366 - type: euclidean_pearson value: 76.94139109279668 - type: euclidean_spearman value: 76.85860283201711 - type: manhattan_pearson value: 76.6944095091912 - type: manhattan_spearman value: 76.61096912972553 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 77.85082366246944 - type: cos_sim_spearman value: 75.52053350101731 - type: euclidean_pearson value: 77.1165845070926 - type: euclidean_spearman value: 75.31216065884388 - type: manhattan_pearson value: 77.06193941833494 - type: manhattan_spearman value: 75.31003701700112 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.36305246526497 - type: cos_sim_spearman value: 87.11704613927415 - type: euclidean_pearson value: 86.04199125810939 - type: euclidean_spearman value: 86.51117572414263 - type: manhattan_pearson value: 86.0805106816633 - type: manhattan_spearman value: 86.52798366512229 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 82.18536255599724 - type: cos_sim_spearman value: 83.63377151025418 - type: euclidean_pearson value: 83.24657467993141 - type: euclidean_spearman value: 84.02751481993825 - type: manhattan_pearson value: 83.11941806582371 - type: manhattan_spearman value: 83.84251281019304 - task: type: STS dataset: name: MTEB STS17 (ko-ko) type: mteb/sts17-crosslingual-sts config: ko-ko split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 78.95816528475514 - type: cos_sim_spearman value: 78.86607380120462 - type: euclidean_pearson value: 78.51268699230545 - type: euclidean_spearman value: 79.11649316502229 - type: manhattan_pearson value: 78.32367302808157 - type: manhattan_spearman value: 78.90277699624637 - task: type: STS dataset: name: MTEB STS17 (ar-ar) type: mteb/sts17-crosslingual-sts config: ar-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 72.89126914997624 - type: cos_sim_spearman value: 73.0296921832678 - type: euclidean_pearson value: 71.50385903677738 - type: euclidean_spearman value: 73.13368899716289 - type: manhattan_pearson value: 71.47421463379519 - type: manhattan_spearman value: 73.03383242946575 - task: type: STS dataset: name: MTEB STS17 (en-ar) type: mteb/sts17-crosslingual-sts config: en-ar split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 59.22923684492637 - type: cos_sim_spearman value: 57.41013211368396 - type: euclidean_pearson value: 61.21107388080905 - type: euclidean_spearman value: 60.07620768697254 - type: manhattan_pearson value: 59.60157142786555 - type: manhattan_spearman value: 59.14069604103739 - task: type: STS dataset: name: MTEB STS17 (en-de) type: mteb/sts17-crosslingual-sts config: en-de split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 76.24345978774299 - type: cos_sim_spearman value: 77.24225743830719 - type: euclidean_pearson value: 76.66226095469165 - type: euclidean_spearman value: 77.60708820493146 - type: manhattan_pearson value: 76.05303324760429 - type: manhattan_spearman value: 76.96353149912348 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.50879160160852 - type: cos_sim_spearman value: 86.43594662965224 - type: euclidean_pearson value: 86.06846012826577 - type: euclidean_spearman value: 86.02041395794136 - type: manhattan_pearson value: 86.10916255616904 - type: manhattan_spearman value: 86.07346068198953 - task: type: STS dataset: name: MTEB STS17 (en-tr) type: mteb/sts17-crosslingual-sts config: en-tr split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 58.39803698977196 - type: cos_sim_spearman value: 55.96910950423142 - type: euclidean_pearson value: 58.17941175613059 - type: euclidean_spearman value: 55.03019330522745 - type: manhattan_pearson value: 57.333358138183286 - type: manhattan_spearman value: 54.04614023149965 - task: type: STS dataset: name: MTEB STS17 (es-en) type: mteb/sts17-crosslingual-sts config: es-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 70.98304089637197 - type: cos_sim_spearman value: 72.44071656215888 - type: euclidean_pearson value: 72.19224359033983 - type: euclidean_spearman value: 73.89871188913025 - type: manhattan_pearson value: 71.21098311547406 - type: manhattan_spearman value: 72.93405764824821 - task: type: STS dataset: name: MTEB STS17 (es-es) type: mteb/sts17-crosslingual-sts config: es-es split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 85.99792397466308 - type: cos_sim_spearman value: 84.83824377879495 - type: euclidean_pearson value: 85.70043288694438 - type: euclidean_spearman value: 84.70627558703686 - type: manhattan_pearson value: 85.89570850150801 - type: manhattan_spearman value: 84.95806105313007 - task: type: STS dataset: name: MTEB STS17 (fr-en) type: mteb/sts17-crosslingual-sts config: fr-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 72.21850322994712 - type: cos_sim_spearman value: 72.28669398117248 - type: euclidean_pearson value: 73.40082510412948 - type: euclidean_spearman value: 73.0326539281865 - type: manhattan_pearson value: 71.8659633964841 - type: manhattan_spearman value: 71.57817425823303 - task: type: STS dataset: name: MTEB STS17 (it-en) type: mteb/sts17-crosslingual-sts config: it-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 75.80921368595645 - type: cos_sim_spearman value: 77.33209091229315 - type: euclidean_pearson value: 76.53159540154829 - type: euclidean_spearman value: 78.17960842810093 - type: manhattan_pearson value: 76.13530186637601 - type: manhattan_spearman value: 78.00701437666875 - task: type: STS dataset: name: MTEB STS17 (nl-en) type: mteb/sts17-crosslingual-sts config: nl-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 74.74980608267349 - type: cos_sim_spearman value: 75.37597374318821 - type: euclidean_pearson value: 74.90506081911661 - type: euclidean_spearman value: 75.30151613124521 - type: manhattan_pearson value: 74.62642745918002 - type: manhattan_spearman value: 75.18619716592303 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 59.632662289205584 - type: cos_sim_spearman value: 60.938543391610914 - type: euclidean_pearson value: 62.113200529767056 - type: euclidean_spearman value: 61.410312633261164 - type: manhattan_pearson value: 61.75494698945686 - type: manhattan_spearman value: 60.92726195322362 - task: type: STS dataset: name: MTEB STS22 (de) type: mteb/sts22-crosslingual-sts config: de split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 45.283470551557244 - type: cos_sim_spearman value: 53.44833015864201 - type: euclidean_pearson value: 41.17892011120893 - type: euclidean_spearman value: 53.81441383126767 - type: manhattan_pearson value: 41.17482200420659 - type: manhattan_spearman value: 53.82180269276363 - task: type: STS dataset: name: MTEB STS22 (es) type: mteb/sts22-crosslingual-sts config: es split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 60.5069165306236 - type: cos_sim_spearman value: 66.87803259033826 - type: euclidean_pearson value: 63.5428979418236 - type: euclidean_spearman value: 66.9293576586897 - type: manhattan_pearson value: 63.59789526178922 - type: manhattan_spearman value: 66.86555009875066 - task: type: STS dataset: name: MTEB STS22 (pl) type: mteb/sts22-crosslingual-sts config: pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 28.23026196280264 - type: cos_sim_spearman value: 35.79397812652861 - type: euclidean_pearson value: 17.828102102767353 - type: euclidean_spearman value: 35.721501145568894 - type: manhattan_pearson value: 17.77134274219677 - type: manhattan_spearman value: 35.98107902846267 - task: type: STS dataset: name: MTEB STS22 (tr) type: mteb/sts22-crosslingual-sts config: tr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 56.51946541393812 - type: cos_sim_spearman value: 63.714686006214485 - type: euclidean_pearson value: 58.32104651305898 - type: euclidean_spearman value: 62.237110895702216 - type: manhattan_pearson value: 58.579416468759185 - type: manhattan_spearman value: 62.459738981727 - task: type: STS dataset: name: MTEB STS22 (ar) type: mteb/sts22-crosslingual-sts config: ar split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 48.76009839569795 - type: cos_sim_spearman value: 56.65188431953149 - type: euclidean_pearson value: 50.997682160915595 - type: euclidean_spearman value: 55.99910008818135 - type: manhattan_pearson value: 50.76220659606342 - type: manhattan_spearman value: 55.517347595391456 - task: type: STS dataset: name: MTEB STS22 (ru) type: mteb/sts22-crosslingual-sts config: ru split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 51.232731157702425 - type: cos_sim_spearman value: 59.89531877658345 - type: euclidean_pearson value: 49.937914570348376 - type: euclidean_spearman value: 60.220905659334036 - type: manhattan_pearson value: 50.00987996844193 - type: manhattan_spearman value: 60.081341480977926 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.717524559088005 - type: cos_sim_spearman value: 66.83570886252286 - type: euclidean_pearson value: 58.41338625505467 - type: euclidean_spearman value: 66.68991427704938 - type: manhattan_pearson value: 58.78638572916807 - type: manhattan_spearman value: 66.58684161046335 - task: type: STS dataset: name: MTEB STS22 (fr) type: mteb/sts22-crosslingual-sts config: fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 73.2962042954962 - type: cos_sim_spearman value: 76.58255504852025 - type: euclidean_pearson value: 75.70983192778257 - type: euclidean_spearman value: 77.4547684870542 - type: manhattan_pearson value: 75.75565853870485 - type: manhattan_spearman value: 76.90208974949428 - task: type: STS dataset: name: MTEB STS22 (de-en) type: mteb/sts22-crosslingual-sts config: de-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.47396266924846 - type: cos_sim_spearman value: 56.492267162048606 - type: euclidean_pearson value: 55.998505203070195 - type: euclidean_spearman value: 56.46447012960222 - type: manhattan_pearson value: 54.873172394430995 - type: manhattan_spearman value: 56.58111534551218 - task: type: STS dataset: name: MTEB STS22 (es-en) type: mteb/sts22-crosslingual-sts config: es-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 69.87177267688686 - type: cos_sim_spearman value: 74.57160943395763 - type: euclidean_pearson value: 70.88330406826788 - type: euclidean_spearman value: 74.29767636038422 - type: manhattan_pearson value: 71.38245248369536 - type: manhattan_spearman value: 74.53102232732175 - task: type: STS dataset: name: MTEB STS22 (it) type: mteb/sts22-crosslingual-sts config: it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 72.80225656959544 - type: cos_sim_spearman value: 76.52646173725735 - type: euclidean_pearson value: 73.95710720200799 - type: euclidean_spearman value: 76.54040031984111 - type: manhattan_pearson value: 73.89679971946774 - type: manhattan_spearman value: 76.60886958161574 - task: type: STS dataset: name: MTEB STS22 (pl-en) type: mteb/sts22-crosslingual-sts config: pl-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 70.70844249898789 - type: cos_sim_spearman value: 72.68571783670241 - type: euclidean_pearson value: 72.38800772441031 - type: euclidean_spearman value: 72.86804422703312 - type: manhattan_pearson value: 71.29840508203515 - type: manhattan_spearman value: 71.86264441749513 - task: type: STS dataset: name: MTEB STS22 (zh-en) type: mteb/sts22-crosslingual-sts config: zh-en split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 58.647478923935694 - type: cos_sim_spearman value: 63.74453623540931 - type: euclidean_pearson value: 59.60138032437505 - type: euclidean_spearman value: 63.947930832166065 - type: manhattan_pearson value: 58.59735509491861 - type: manhattan_spearman value: 62.082503844627404 - task: type: STS dataset: name: MTEB STS22 (es-it) type: mteb/sts22-crosslingual-sts config: es-it split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 65.8722516867162 - type: cos_sim_spearman value: 71.81208592523012 - type: euclidean_pearson value: 67.95315252165956 - type: euclidean_spearman value: 73.00749822046009 - type: manhattan_pearson value: 68.07884688638924 - type: manhattan_spearman value: 72.34210325803069 - task: type: STS dataset: name: MTEB STS22 (de-fr) type: mteb/sts22-crosslingual-sts config: de-fr split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 54.5405814240949 - type: cos_sim_spearman value: 60.56838649023775 - type: euclidean_pearson value: 53.011731611314104 - type: euclidean_spearman value: 58.533194841668426 - type: manhattan_pearson value: 53.623067729338494 - type: manhattan_spearman value: 58.018756154446926 - task: type: STS dataset: name: MTEB STS22 (de-pl) type: mteb/sts22-crosslingual-sts config: de-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 13.611046866216112 - type: cos_sim_spearman value: 28.238192909158492 - type: euclidean_pearson value: 22.16189199885129 - type: euclidean_spearman value: 35.012895679076564 - type: manhattan_pearson value: 21.969771178698387 - type: manhattan_spearman value: 32.456985088607475 - task: type: STS dataset: name: MTEB STS22 (fr-pl) type: mteb/sts22-crosslingual-sts config: fr-pl split: test revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80 metrics: - type: cos_sim_pearson value: 74.58077407011655 - type: cos_sim_spearman value: 84.51542547285167 - type: euclidean_pearson value: 74.64613843596234 - type: euclidean_spearman value: 84.51542547285167 - type: manhattan_pearson value: 75.15335973101396 - type: manhattan_spearman value: 84.51542547285167 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 82.0739825531578 - type: cos_sim_spearman value: 84.01057479311115 - type: euclidean_pearson value: 83.85453227433344 - type: euclidean_spearman value: 84.01630226898655 - type: manhattan_pearson value: 83.75323603028978 - type: manhattan_spearman value: 83.89677983727685 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 78.12945623123957 - type: mrr value: 93.87738713719106 - task: type: Retrieval dataset: name: MTEB SciFact type: scifact config: default split: test revision: None metrics: - type: map_at_1 value: 52.983000000000004 - type: map_at_10 value: 62.946000000000005 - type: map_at_100 value: 63.514 - type: map_at_1000 value: 63.554 - type: map_at_3 value: 60.183 - type: map_at_5 value: 61.672000000000004 - type: mrr_at_1 value: 55.667 - type: mrr_at_10 value: 64.522 - type: mrr_at_100 value: 64.957 - type: mrr_at_1000 value: 64.995 - type: mrr_at_3 value: 62.388999999999996 - type: mrr_at_5 value: 63.639 - type: ndcg_at_1 value: 55.667 - type: ndcg_at_10 value: 67.704 - type: ndcg_at_100 value: 70.299 - type: ndcg_at_1000 value: 71.241 - type: ndcg_at_3 value: 62.866 - type: ndcg_at_5 value: 65.16999999999999 - type: precision_at_1 value: 55.667 - type: precision_at_10 value: 9.033 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 24.444 - type: precision_at_5 value: 16.133 - type: recall_at_1 value: 52.983000000000004 - type: recall_at_10 value: 80.656 - type: recall_at_100 value: 92.5 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 67.744 - type: recall_at_5 value: 73.433 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.72772277227723 - type: cos_sim_ap value: 92.17845897992215 - type: cos_sim_f1 value: 85.9746835443038 - type: cos_sim_precision value: 87.07692307692308 - type: cos_sim_recall value: 84.89999999999999 - type: dot_accuracy value: 99.3039603960396 - type: dot_ap value: 60.70244020124878 - type: dot_f1 value: 59.92742353551063 - type: dot_precision value: 62.21743810548978 - type: dot_recall value: 57.8 - type: euclidean_accuracy value: 99.71683168316832 - type: euclidean_ap value: 91.53997039964659 - type: euclidean_f1 value: 84.88372093023257 - type: euclidean_precision value: 90.02242152466367 - type: euclidean_recall value: 80.30000000000001 - type: manhattan_accuracy value: 99.72376237623763 - type: manhattan_ap value: 91.80756777790289 - type: manhattan_f1 value: 85.48468106479157 - type: manhattan_precision value: 85.8728557013118 - type: manhattan_recall value: 85.1 - type: max_accuracy value: 99.72772277227723 - type: max_ap value: 92.17845897992215 - type: max_f1 value: 85.9746835443038 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 53.52464042600003 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 32.071631948736 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 49.19552407604654 - type: mrr value: 49.95269130379425 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 29.345293033095427 - type: cos_sim_spearman value: 29.976931423258403 - type: dot_pearson value: 27.047078008958408 - type: dot_spearman value: 27.75894368380218 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22 - type: map_at_10 value: 1.706 - type: map_at_100 value: 9.634 - type: map_at_1000 value: 23.665 - type: map_at_3 value: 0.5950000000000001 - type: map_at_5 value: 0.95 - type: mrr_at_1 value: 86.0 - type: mrr_at_10 value: 91.8 - type: mrr_at_100 value: 91.8 - type: mrr_at_1000 value: 91.8 - type: mrr_at_3 value: 91.0 - type: mrr_at_5 value: 91.8 - type: ndcg_at_1 value: 80.0 - type: ndcg_at_10 value: 72.573 - type: ndcg_at_100 value: 53.954 - type: ndcg_at_1000 value: 47.760999999999996 - type: ndcg_at_3 value: 76.173 - type: ndcg_at_5 value: 75.264 - type: precision_at_1 value: 86.0 - type: precision_at_10 value: 76.4 - type: precision_at_100 value: 55.50000000000001 - type: precision_at_1000 value: 21.802 - type: precision_at_3 value: 81.333 - type: precision_at_5 value: 80.4 - type: recall_at_1 value: 0.22 - type: recall_at_10 value: 1.925 - type: recall_at_100 value: 12.762 - type: recall_at_1000 value: 44.946000000000005 - type: recall_at_3 value: 0.634 - type: recall_at_5 value: 1.051 - task: type: BitextMining dataset: name: MTEB Tatoeba (sqi-eng) type: mteb/tatoeba-bitext-mining config: sqi-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.0 - type: f1 value: 88.55666666666666 - type: precision value: 87.46166666666667 - type: recall value: 91.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (fry-eng) type: mteb/tatoeba-bitext-mining config: fry-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 57.22543352601156 - type: f1 value: 51.03220478943021 - type: precision value: 48.8150289017341 - type: recall value: 57.22543352601156 - task: type: BitextMining dataset: name: MTEB Tatoeba (kur-eng) type: mteb/tatoeba-bitext-mining config: kur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 46.58536585365854 - type: f1 value: 39.66870798578116 - type: precision value: 37.416085946573745 - type: recall value: 46.58536585365854 - task: type: BitextMining dataset: name: MTEB Tatoeba (tur-eng) type: mteb/tatoeba-bitext-mining config: tur-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.7 - type: f1 value: 86.77999999999999 - type: precision value: 85.45333333333332 - type: recall value: 89.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (deu-eng) type: mteb/tatoeba-bitext-mining config: deu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 97.39999999999999 - type: f1 value: 96.58333333333331 - type: precision value: 96.2 - type: recall value: 97.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (nld-eng) type: mteb/tatoeba-bitext-mining config: nld-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.4 - type: f1 value: 90.3 - type: precision value: 89.31666666666668 - type: recall value: 92.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (ron-eng) type: mteb/tatoeba-bitext-mining config: ron-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.9 - type: f1 value: 83.67190476190476 - type: precision value: 82.23333333333332 - type: recall value: 86.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (ang-eng) type: mteb/tatoeba-bitext-mining config: ang-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 50.0 - type: f1 value: 42.23229092632078 - type: precision value: 39.851634683724235 - type: recall value: 50.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ido-eng) type: mteb/tatoeba-bitext-mining config: ido-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.3 - type: f1 value: 70.86190476190477 - type: precision value: 68.68777777777777 - type: recall value: 76.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (jav-eng) type: mteb/tatoeba-bitext-mining config: jav-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 57.073170731707314 - type: f1 value: 50.658958927251604 - type: precision value: 48.26480836236933 - type: recall value: 57.073170731707314 - task: type: BitextMining dataset: name: MTEB Tatoeba (isl-eng) type: mteb/tatoeba-bitext-mining config: isl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 68.2 - type: f1 value: 62.156507936507936 - type: precision value: 59.84964285714286 - type: recall value: 68.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (slv-eng) type: mteb/tatoeba-bitext-mining config: slv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.52126366950182 - type: f1 value: 72.8496210148701 - type: precision value: 70.92171498003819 - type: recall value: 77.52126366950182 - task: type: BitextMining dataset: name: MTEB Tatoeba (cym-eng) type: mteb/tatoeba-bitext-mining config: cym-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.78260869565217 - type: f1 value: 65.32422360248447 - type: precision value: 63.063067367415194 - type: recall value: 70.78260869565217 - task: type: BitextMining dataset: name: MTEB Tatoeba (kaz-eng) type: mteb/tatoeba-bitext-mining config: kaz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.43478260869566 - type: f1 value: 73.02608695652172 - type: precision value: 70.63768115942028 - type: recall value: 78.43478260869566 - task: type: BitextMining dataset: name: MTEB Tatoeba (est-eng) type: mteb/tatoeba-bitext-mining config: est-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 60.9 - type: f1 value: 55.309753694581275 - type: precision value: 53.130476190476195 - type: recall value: 60.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (heb-eng) type: mteb/tatoeba-bitext-mining config: heb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 72.89999999999999 - type: f1 value: 67.92023809523809 - type: precision value: 65.82595238095237 - type: recall value: 72.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (gla-eng) type: mteb/tatoeba-bitext-mining config: gla-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 46.80337756332931 - type: f1 value: 39.42174900558496 - type: precision value: 36.97101116280851 - type: recall value: 46.80337756332931 - task: type: BitextMining dataset: name: MTEB Tatoeba (mar-eng) type: mteb/tatoeba-bitext-mining config: mar-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.8 - type: f1 value: 86.79 - type: precision value: 85.375 - type: recall value: 89.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (lat-eng) type: mteb/tatoeba-bitext-mining config: lat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.199999999999996 - type: f1 value: 39.95484348984349 - type: precision value: 37.561071428571424 - type: recall value: 47.199999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (bel-eng) type: mteb/tatoeba-bitext-mining config: bel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.8 - type: f1 value: 84.68190476190475 - type: precision value: 83.275 - type: recall value: 87.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (pms-eng) type: mteb/tatoeba-bitext-mining config: pms-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.76190476190476 - type: f1 value: 42.14965986394558 - type: precision value: 39.96743626743626 - type: recall value: 48.76190476190476 - task: type: BitextMining dataset: name: MTEB Tatoeba (gle-eng) type: mteb/tatoeba-bitext-mining config: gle-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.10000000000001 - type: f1 value: 59.58580086580086 - type: precision value: 57.150238095238095 - type: recall value: 66.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (pes-eng) type: mteb/tatoeba-bitext-mining config: pes-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.3 - type: f1 value: 84.0 - type: precision value: 82.48666666666666 - type: recall value: 87.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (nob-eng) type: mteb/tatoeba-bitext-mining config: nob-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.4 - type: f1 value: 87.79523809523809 - type: precision value: 86.6 - type: recall value: 90.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (bul-eng) type: mteb/tatoeba-bitext-mining config: bul-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.0 - type: f1 value: 83.81 - type: precision value: 82.36666666666666 - type: recall value: 87.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (cbk-eng) type: mteb/tatoeba-bitext-mining config: cbk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.9 - type: f1 value: 57.76533189033189 - type: precision value: 55.50595238095239 - type: recall value: 63.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (hun-eng) type: mteb/tatoeba-bitext-mining config: hun-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 76.1 - type: f1 value: 71.83690476190478 - type: precision value: 70.04928571428573 - type: recall value: 76.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (uig-eng) type: mteb/tatoeba-bitext-mining config: uig-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.3 - type: f1 value: 59.32626984126984 - type: precision value: 56.62535714285713 - type: recall value: 66.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (rus-eng) type: mteb/tatoeba-bitext-mining config: rus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.60000000000001 - type: f1 value: 87.96333333333334 - type: precision value: 86.73333333333333 - type: recall value: 90.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (spa-eng) type: mteb/tatoeba-bitext-mining config: spa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 93.10000000000001 - type: f1 value: 91.10000000000001 - type: precision value: 90.16666666666666 - type: recall value: 93.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (hye-eng) type: mteb/tatoeba-bitext-mining config: hye-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.71428571428571 - type: f1 value: 82.29142600436403 - type: precision value: 80.8076626877166 - type: recall value: 85.71428571428571 - task: type: BitextMining dataset: name: MTEB Tatoeba (tel-eng) type: mteb/tatoeba-bitext-mining config: tel-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.88888888888889 - type: f1 value: 85.7834757834758 - type: precision value: 84.43732193732193 - type: recall value: 88.88888888888889 - task: type: BitextMining dataset: name: MTEB Tatoeba (afr-eng) type: mteb/tatoeba-bitext-mining config: afr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.5 - type: f1 value: 85.67190476190476 - type: precision value: 84.43333333333332 - type: recall value: 88.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (mon-eng) type: mteb/tatoeba-bitext-mining config: mon-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.72727272727273 - type: f1 value: 78.21969696969695 - type: precision value: 76.18181818181819 - type: recall value: 82.72727272727273 - task: type: BitextMining dataset: name: MTEB Tatoeba (arz-eng) type: mteb/tatoeba-bitext-mining config: arz-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 61.0062893081761 - type: f1 value: 55.13976240391334 - type: precision value: 52.92112499659669 - type: recall value: 61.0062893081761 - task: type: BitextMining dataset: name: MTEB Tatoeba (hrv-eng) type: mteb/tatoeba-bitext-mining config: hrv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 89.5 - type: f1 value: 86.86666666666666 - type: precision value: 85.69166666666668 - type: recall value: 89.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nov-eng) type: mteb/tatoeba-bitext-mining config: nov-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.54085603112841 - type: f1 value: 68.56031128404669 - type: precision value: 66.53047989623866 - type: recall value: 73.54085603112841 - task: type: BitextMining dataset: name: MTEB Tatoeba (gsw-eng) type: mteb/tatoeba-bitext-mining config: gsw-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.58974358974359 - type: f1 value: 36.45299145299145 - type: precision value: 33.81155881155882 - type: recall value: 43.58974358974359 - task: type: BitextMining dataset: name: MTEB Tatoeba (nds-eng) type: mteb/tatoeba-bitext-mining config: nds-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 59.599999999999994 - type: f1 value: 53.264689754689755 - type: precision value: 50.869166666666665 - type: recall value: 59.599999999999994 - task: type: BitextMining dataset: name: MTEB Tatoeba (ukr-eng) type: mteb/tatoeba-bitext-mining config: ukr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.2 - type: f1 value: 81.61666666666665 - type: precision value: 80.02833333333335 - type: recall value: 85.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (uzb-eng) type: mteb/tatoeba-bitext-mining config: uzb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 63.78504672897196 - type: f1 value: 58.00029669188548 - type: precision value: 55.815809968847354 - type: recall value: 63.78504672897196 - task: type: BitextMining dataset: name: MTEB Tatoeba (lit-eng) type: mteb/tatoeba-bitext-mining config: lit-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 66.5 - type: f1 value: 61.518333333333345 - type: precision value: 59.622363699102834 - type: recall value: 66.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (ina-eng) type: mteb/tatoeba-bitext-mining config: ina-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.6 - type: f1 value: 85.60222222222221 - type: precision value: 84.27916666666665 - type: recall value: 88.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (lfn-eng) type: mteb/tatoeba-bitext-mining config: lfn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 58.699999999999996 - type: f1 value: 52.732375957375965 - type: precision value: 50.63214035964035 - type: recall value: 58.699999999999996 - task: type: BitextMining dataset: name: MTEB Tatoeba (zsm-eng) type: mteb/tatoeba-bitext-mining config: zsm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.10000000000001 - type: f1 value: 89.99666666666667 - type: precision value: 89.03333333333333 - type: recall value: 92.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (ita-eng) type: mteb/tatoeba-bitext-mining config: ita-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.10000000000001 - type: f1 value: 87.55666666666667 - type: precision value: 86.36166666666668 - type: recall value: 90.10000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (cmn-eng) type: mteb/tatoeba-bitext-mining config: cmn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.4 - type: f1 value: 88.89000000000001 - type: precision value: 87.71166666666666 - type: recall value: 91.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (lvs-eng) type: mteb/tatoeba-bitext-mining config: lvs-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.7 - type: f1 value: 60.67427750410509 - type: precision value: 58.71785714285714 - type: recall value: 65.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (glg-eng) type: mteb/tatoeba-bitext-mining config: glg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.39999999999999 - type: f1 value: 81.93190476190475 - type: precision value: 80.37833333333333 - type: recall value: 85.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (ceb-eng) type: mteb/tatoeba-bitext-mining config: ceb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 47.833333333333336 - type: f1 value: 42.006625781625786 - type: precision value: 40.077380952380956 - type: recall value: 47.833333333333336 - task: type: BitextMining dataset: name: MTEB Tatoeba (bre-eng) type: mteb/tatoeba-bitext-mining config: bre-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.4 - type: f1 value: 8.24465007215007 - type: precision value: 7.664597069597071 - type: recall value: 10.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (ben-eng) type: mteb/tatoeba-bitext-mining config: ben-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.6 - type: f1 value: 77.76333333333334 - type: precision value: 75.57833333333332 - type: recall value: 82.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (swg-eng) type: mteb/tatoeba-bitext-mining config: swg-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 52.67857142857143 - type: f1 value: 44.302721088435376 - type: precision value: 41.49801587301587 - type: recall value: 52.67857142857143 - task: type: BitextMining dataset: name: MTEB Tatoeba (arq-eng) type: mteb/tatoeba-bitext-mining config: arq-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 28.3205268935236 - type: f1 value: 22.426666605171157 - type: precision value: 20.685900116470915 - type: recall value: 28.3205268935236 - task: type: BitextMining dataset: name: MTEB Tatoeba (kab-eng) type: mteb/tatoeba-bitext-mining config: kab-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 22.7 - type: f1 value: 17.833970473970474 - type: precision value: 16.407335164835164 - type: recall value: 22.7 - task: type: BitextMining dataset: name: MTEB Tatoeba (fra-eng) type: mteb/tatoeba-bitext-mining config: fra-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.2 - type: f1 value: 89.92999999999999 - type: precision value: 88.87 - type: recall value: 92.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (por-eng) type: mteb/tatoeba-bitext-mining config: por-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.4 - type: f1 value: 89.25 - type: precision value: 88.21666666666667 - type: recall value: 91.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (tat-eng) type: mteb/tatoeba-bitext-mining config: tat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 69.19999999999999 - type: f1 value: 63.38269841269841 - type: precision value: 61.14773809523809 - type: recall value: 69.19999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (oci-eng) type: mteb/tatoeba-bitext-mining config: oci-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 48.8 - type: f1 value: 42.839915639915645 - type: precision value: 40.770287114845935 - type: recall value: 48.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (pol-eng) type: mteb/tatoeba-bitext-mining config: pol-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.8 - type: f1 value: 85.90666666666668 - type: precision value: 84.54166666666666 - type: recall value: 88.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (war-eng) type: mteb/tatoeba-bitext-mining config: war-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 46.6 - type: f1 value: 40.85892920804686 - type: precision value: 38.838223114604695 - type: recall value: 46.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (aze-eng) type: mteb/tatoeba-bitext-mining config: aze-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.0 - type: f1 value: 80.14190476190475 - type: precision value: 78.45333333333333 - type: recall value: 84.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (vie-eng) type: mteb/tatoeba-bitext-mining config: vie-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.5 - type: f1 value: 87.78333333333333 - type: precision value: 86.5 - type: recall value: 90.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (nno-eng) type: mteb/tatoeba-bitext-mining config: nno-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.5 - type: f1 value: 69.48397546897547 - type: precision value: 67.51869047619049 - type: recall value: 74.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (cha-eng) type: mteb/tatoeba-bitext-mining config: cha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 32.846715328467155 - type: f1 value: 27.828177499710343 - type: precision value: 26.63451511991658 - type: recall value: 32.846715328467155 - task: type: BitextMining dataset: name: MTEB Tatoeba (mhr-eng) type: mteb/tatoeba-bitext-mining config: mhr-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.0 - type: f1 value: 6.07664116764988 - type: precision value: 5.544177607179943 - type: recall value: 8.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (dan-eng) type: mteb/tatoeba-bitext-mining config: dan-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.6 - type: f1 value: 84.38555555555554 - type: precision value: 82.91583333333334 - type: recall value: 87.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (ell-eng) type: mteb/tatoeba-bitext-mining config: ell-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 87.5 - type: f1 value: 84.08333333333331 - type: precision value: 82.47333333333333 - type: recall value: 87.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (amh-eng) type: mteb/tatoeba-bitext-mining config: amh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.95238095238095 - type: f1 value: 76.13095238095238 - type: precision value: 74.05753968253967 - type: recall value: 80.95238095238095 - task: type: BitextMining dataset: name: MTEB Tatoeba (pam-eng) type: mteb/tatoeba-bitext-mining config: pam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.799999999999999 - type: f1 value: 6.971422975172975 - type: precision value: 6.557814916172301 - type: recall value: 8.799999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (hsb-eng) type: mteb/tatoeba-bitext-mining config: hsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 44.099378881987576 - type: f1 value: 37.01649742022413 - type: precision value: 34.69420618488942 - type: recall value: 44.099378881987576 - task: type: BitextMining dataset: name: MTEB Tatoeba (srp-eng) type: mteb/tatoeba-bitext-mining config: srp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 84.3 - type: f1 value: 80.32666666666667 - type: precision value: 78.60666666666665 - type: recall value: 84.3 - task: type: BitextMining dataset: name: MTEB Tatoeba (epo-eng) type: mteb/tatoeba-bitext-mining config: epo-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 92.5 - type: f1 value: 90.49666666666666 - type: precision value: 89.56666666666668 - type: recall value: 92.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (kzj-eng) type: mteb/tatoeba-bitext-mining config: kzj-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 10.0 - type: f1 value: 8.268423529875141 - type: precision value: 7.878118605532398 - type: recall value: 10.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (awa-eng) type: mteb/tatoeba-bitext-mining config: awa-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 79.22077922077922 - type: f1 value: 74.27128427128426 - type: precision value: 72.28715728715729 - type: recall value: 79.22077922077922 - task: type: BitextMining dataset: name: MTEB Tatoeba (fao-eng) type: mteb/tatoeba-bitext-mining config: fao-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.64885496183206 - type: f1 value: 58.87495456197747 - type: precision value: 55.992366412213734 - type: recall value: 65.64885496183206 - task: type: BitextMining dataset: name: MTEB Tatoeba (mal-eng) type: mteb/tatoeba-bitext-mining config: mal-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 96.06986899563319 - type: f1 value: 94.78408539543909 - type: precision value: 94.15332362930616 - type: recall value: 96.06986899563319 - task: type: BitextMining dataset: name: MTEB Tatoeba (ile-eng) type: mteb/tatoeba-bitext-mining config: ile-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.2 - type: f1 value: 71.72571428571428 - type: precision value: 69.41000000000001 - type: recall value: 77.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (bos-eng) type: mteb/tatoeba-bitext-mining config: bos-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.4406779661017 - type: f1 value: 83.2391713747646 - type: precision value: 81.74199623352166 - type: recall value: 86.4406779661017 - task: type: BitextMining dataset: name: MTEB Tatoeba (cor-eng) type: mteb/tatoeba-bitext-mining config: cor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 8.4 - type: f1 value: 6.017828743398003 - type: precision value: 5.4829865484756795 - type: recall value: 8.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (cat-eng) type: mteb/tatoeba-bitext-mining config: cat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.5 - type: f1 value: 79.74833333333333 - type: precision value: 78.04837662337664 - type: recall value: 83.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (eus-eng) type: mteb/tatoeba-bitext-mining config: eus-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 60.4 - type: f1 value: 54.467301587301584 - type: precision value: 52.23242424242424 - type: recall value: 60.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (yue-eng) type: mteb/tatoeba-bitext-mining config: yue-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.9 - type: f1 value: 69.68699134199134 - type: precision value: 67.59873015873016 - type: recall value: 74.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (swe-eng) type: mteb/tatoeba-bitext-mining config: swe-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.0 - type: f1 value: 84.9652380952381 - type: precision value: 83.66166666666666 - type: recall value: 88.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (dtp-eng) type: mteb/tatoeba-bitext-mining config: dtp-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 9.1 - type: f1 value: 7.681244588744588 - type: precision value: 7.370043290043291 - type: recall value: 9.1 - task: type: BitextMining dataset: name: MTEB Tatoeba (kat-eng) type: mteb/tatoeba-bitext-mining config: kat-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 80.9651474530831 - type: f1 value: 76.84220605132133 - type: precision value: 75.19606398962966 - type: recall value: 80.9651474530831 - task: type: BitextMining dataset: name: MTEB Tatoeba (jpn-eng) type: mteb/tatoeba-bitext-mining config: jpn-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 86.9 - type: f1 value: 83.705 - type: precision value: 82.3120634920635 - type: recall value: 86.9 - task: type: BitextMining dataset: name: MTEB Tatoeba (csb-eng) type: mteb/tatoeba-bitext-mining config: csb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 29.64426877470356 - type: f1 value: 23.98763072676116 - type: precision value: 22.506399397703746 - type: recall value: 29.64426877470356 - task: type: BitextMining dataset: name: MTEB Tatoeba (xho-eng) type: mteb/tatoeba-bitext-mining config: xho-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 70.4225352112676 - type: f1 value: 62.84037558685445 - type: precision value: 59.56572769953053 - type: recall value: 70.4225352112676 - task: type: BitextMining dataset: name: MTEB Tatoeba (orv-eng) type: mteb/tatoeba-bitext-mining config: orv-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 19.64071856287425 - type: f1 value: 15.125271011207756 - type: precision value: 13.865019261197494 - type: recall value: 19.64071856287425 - task: type: BitextMining dataset: name: MTEB Tatoeba (ind-eng) type: mteb/tatoeba-bitext-mining config: ind-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 90.2 - type: f1 value: 87.80666666666666 - type: precision value: 86.70833333333331 - type: recall value: 90.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (tuk-eng) type: mteb/tatoeba-bitext-mining config: tuk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 23.15270935960591 - type: f1 value: 18.407224958949097 - type: precision value: 16.982385430661292 - type: recall value: 23.15270935960591 - task: type: BitextMining dataset: name: MTEB Tatoeba (max-eng) type: mteb/tatoeba-bitext-mining config: max-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.98591549295775 - type: f1 value: 49.94718309859154 - type: precision value: 47.77864154624717 - type: recall value: 55.98591549295775 - task: type: BitextMining dataset: name: MTEB Tatoeba (swh-eng) type: mteb/tatoeba-bitext-mining config: swh-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.07692307692307 - type: f1 value: 66.74358974358974 - type: precision value: 64.06837606837607 - type: recall value: 73.07692307692307 - task: type: BitextMining dataset: name: MTEB Tatoeba (hin-eng) type: mteb/tatoeba-bitext-mining config: hin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 94.89999999999999 - type: f1 value: 93.25 - type: precision value: 92.43333333333332 - type: recall value: 94.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (dsb-eng) type: mteb/tatoeba-bitext-mining config: dsb-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 37.78705636743215 - type: f1 value: 31.63899658680452 - type: precision value: 29.72264397629742 - type: recall value: 37.78705636743215 - task: type: BitextMining dataset: name: MTEB Tatoeba (ber-eng) type: mteb/tatoeba-bitext-mining config: ber-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 21.6 - type: f1 value: 16.91697302697303 - type: precision value: 15.71225147075147 - type: recall value: 21.6 - task: type: BitextMining dataset: name: MTEB Tatoeba (tam-eng) type: mteb/tatoeba-bitext-mining config: tam-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 85.01628664495115 - type: f1 value: 81.38514037536838 - type: precision value: 79.83170466883823 - type: recall value: 85.01628664495115 - task: type: BitextMining dataset: name: MTEB Tatoeba (slk-eng) type: mteb/tatoeba-bitext-mining config: slk-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.39999999999999 - type: f1 value: 79.96380952380952 - type: precision value: 78.48333333333333 - type: recall value: 83.39999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tgl-eng) type: mteb/tatoeba-bitext-mining config: tgl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 83.2 - type: f1 value: 79.26190476190476 - type: precision value: 77.58833333333334 - type: recall value: 83.2 - task: type: BitextMining dataset: name: MTEB Tatoeba (ast-eng) type: mteb/tatoeba-bitext-mining config: ast-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 75.59055118110236 - type: f1 value: 71.66854143232096 - type: precision value: 70.30183727034121 - type: recall value: 75.59055118110236 - task: type: BitextMining dataset: name: MTEB Tatoeba (mkd-eng) type: mteb/tatoeba-bitext-mining config: mkd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 65.5 - type: f1 value: 59.26095238095238 - type: precision value: 56.81909090909092 - type: recall value: 65.5 - task: type: BitextMining dataset: name: MTEB Tatoeba (khm-eng) type: mteb/tatoeba-bitext-mining config: khm-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 55.26315789473685 - type: f1 value: 47.986523325858506 - type: precision value: 45.33950006595436 - type: recall value: 55.26315789473685 - task: type: BitextMining dataset: name: MTEB Tatoeba (ces-eng) type: mteb/tatoeba-bitext-mining config: ces-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 82.89999999999999 - type: f1 value: 78.835 - type: precision value: 77.04761904761905 - type: recall value: 82.89999999999999 - task: type: BitextMining dataset: name: MTEB Tatoeba (tzl-eng) type: mteb/tatoeba-bitext-mining config: tzl-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 43.269230769230774 - type: f1 value: 36.20421245421245 - type: precision value: 33.57371794871795 - type: recall value: 43.269230769230774 - task: type: BitextMining dataset: name: MTEB Tatoeba (urd-eng) type: mteb/tatoeba-bitext-mining config: urd-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 88.0 - type: f1 value: 84.70666666666666 - type: precision value: 83.23166666666665 - type: recall value: 88.0 - task: type: BitextMining dataset: name: MTEB Tatoeba (ara-eng) type: mteb/tatoeba-bitext-mining config: ara-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 77.4 - type: f1 value: 72.54666666666667 - type: precision value: 70.54318181818181 - type: recall value: 77.4 - task: type: BitextMining dataset: name: MTEB Tatoeba (kor-eng) type: mteb/tatoeba-bitext-mining config: kor-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 78.60000000000001 - type: f1 value: 74.1588888888889 - type: precision value: 72.30250000000001 - type: recall value: 78.60000000000001 - task: type: BitextMining dataset: name: MTEB Tatoeba (yid-eng) type: mteb/tatoeba-bitext-mining config: yid-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 72.40566037735849 - type: f1 value: 66.82587328813744 - type: precision value: 64.75039308176099 - type: recall value: 72.40566037735849 - task: type: BitextMining dataset: name: MTEB Tatoeba (fin-eng) type: mteb/tatoeba-bitext-mining config: fin-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 73.8 - type: f1 value: 68.56357142857144 - type: precision value: 66.3178822055138 - type: recall value: 73.8 - task: type: BitextMining dataset: name: MTEB Tatoeba (tha-eng) type: mteb/tatoeba-bitext-mining config: tha-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 91.78832116788321 - type: f1 value: 89.3552311435523 - type: precision value: 88.20559610705597 - type: recall value: 91.78832116788321 - task: type: BitextMining dataset: name: MTEB Tatoeba (wuu-eng) type: mteb/tatoeba-bitext-mining config: wuu-eng split: test revision: 9080400076fbadbb4c4dcb136ff4eddc40b42553 metrics: - type: accuracy value: 74.3 - type: f1 value: 69.05085581085581 - type: precision value: 66.955 - type: recall value: 74.3 - task: type: Retrieval dataset: name: MTEB Touche2020 type: webis-touche2020 config: default split: test revision: None metrics: - type: map_at_1 value: 2.896 - type: map_at_10 value: 8.993 - type: map_at_100 value: 14.133999999999999 - type: map_at_1000 value: 15.668000000000001 - type: map_at_3 value: 5.862 - type: map_at_5 value: 7.17 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 42.931000000000004 - type: mrr_at_100 value: 44.81 - type: mrr_at_1000 value: 44.81 - type: mrr_at_3 value: 38.435 - type: mrr_at_5 value: 41.701 - type: ndcg_at_1 value: 31.633 - type: ndcg_at_10 value: 21.163 - type: ndcg_at_100 value: 33.306000000000004 - type: ndcg_at_1000 value: 45.275999999999996 - type: ndcg_at_3 value: 25.685999999999996 - type: ndcg_at_5 value: 23.732 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 17.755000000000003 - type: precision_at_100 value: 6.938999999999999 - type: precision_at_1000 value: 1.48 - type: precision_at_3 value: 25.85 - type: precision_at_5 value: 23.265 - type: recall_at_1 value: 2.896 - type: recall_at_10 value: 13.333999999999998 - type: recall_at_100 value: 43.517 - type: recall_at_1000 value: 79.836 - type: recall_at_3 value: 6.306000000000001 - type: recall_at_5 value: 8.825 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 69.3874 - type: ap value: 13.829909072469423 - type: f1 value: 53.54534203543492 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 62.62026032823995 - type: f1 value: 62.85251350485221 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 33.21527881409797 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 84.97943613280086 - type: cos_sim_ap value: 70.75454316885921 - type: cos_sim_f1 value: 65.38274012676743 - type: cos_sim_precision value: 60.761214318078835 - type: cos_sim_recall value: 70.76517150395777 - type: dot_accuracy value: 79.0546581629612 - type: dot_ap value: 47.3197121792147 - type: dot_f1 value: 49.20106524633821 - type: dot_precision value: 42.45499808502489 - type: dot_recall value: 58.49604221635884 - type: euclidean_accuracy value: 85.08076533349228 - type: euclidean_ap value: 70.95016106374474 - type: euclidean_f1 value: 65.43987900176455 - type: euclidean_precision value: 62.64478764478765 - type: euclidean_recall value: 68.49604221635884 - type: manhattan_accuracy value: 84.93771234428085 - type: manhattan_ap value: 70.63668388755362 - type: manhattan_f1 value: 65.23895401262398 - type: manhattan_precision value: 56.946084218811485 - type: manhattan_recall value: 76.35883905013192 - type: max_accuracy value: 85.08076533349228 - type: max_ap value: 70.95016106374474 - type: max_f1 value: 65.43987900176455 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.69096130709822 - type: cos_sim_ap value: 84.82526278228542 - type: cos_sim_f1 value: 77.65485060585536 - type: cos_sim_precision value: 75.94582658619167 - type: cos_sim_recall value: 79.44256236526024 - type: dot_accuracy value: 80.97954748321496 - type: dot_ap value: 64.81642914145866 - type: dot_f1 value: 60.631996987229975 - type: dot_precision value: 54.5897293631712 - type: dot_recall value: 68.17831844779796 - type: euclidean_accuracy value: 88.6987231730508 - type: euclidean_ap value: 84.80003825477253 - type: euclidean_f1 value: 77.67194179854496 - type: euclidean_precision value: 75.7128235122094 - type: euclidean_recall value: 79.73514012935017 - type: manhattan_accuracy value: 88.62692591298949 - type: manhattan_ap value: 84.80451408255276 - type: manhattan_f1 value: 77.69888949572183 - type: manhattan_precision value: 73.70311528631622 - type: manhattan_recall value: 82.15275639051433 - type: max_accuracy value: 88.6987231730508 - type: max_ap value: 84.82526278228542 - type: max_f1 value: 77.69888949572183 --- ## Multilingual-E5-small [Multilingual E5 Text Embeddings: A Technical Report](https://arxiv.org/pdf/2402.05672). Liang Wang, Nan Yang, Xiaolong Huang, Linjun Yang, Rangan Majumder, Furu Wei, arXiv 2024 This model has 12 layers and the embedding size is 384. ## Usage Below is an example to encode queries and passages from the MS-MARCO passage ranking dataset. ```python import torch.nn.functional as F from torch import Tensor from transformers import AutoTokenizer, AutoModel def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor: last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0) return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None] # Each input text should start with "query: " or "passage: ", even for non-English texts. # For tasks other than retrieval, you can simply use the "query: " prefix. input_texts = ['query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 is 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or training for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右,放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅"] tokenizer = AutoTokenizer.from_pretrained('intfloat/multilingual-e5-small') model = AutoModel.from_pretrained('intfloat/multilingual-e5-small') # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask']) # normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:2] @ embeddings[2:].T) * 100 print(scores.tolist()) ``` ## Supported Languages This model is initialized from [microsoft/Multilingual-MiniLM-L12-H384](https://huggingface.co/microsoft/Multilingual-MiniLM-L12-H384) and continually trained on a mixture of multilingual datasets. It supports 100 languages from xlm-roberta, but low-resource languages may see performance degradation. ## Training Details **Initialization**: [microsoft/Multilingual-MiniLM-L12-H384](https://huggingface.co/microsoft/Multilingual-MiniLM-L12-H384) **First stage**: contrastive pre-training with weak supervision | Dataset | Weak supervision | # of text pairs | |--------------------------------------------------------------------------------------------------------|---------------------------------------|-----------------| | Filtered [mC4](https://huggingface.co/datasets/mc4) | (title, page content) | 1B | | [CC News](https://huggingface.co/datasets/intfloat/multilingual_cc_news) | (title, news content) | 400M | | [NLLB](https://huggingface.co/datasets/allenai/nllb) | translation pairs | 2.4B | | [Wikipedia](https://huggingface.co/datasets/intfloat/wikipedia) | (hierarchical section title, passage) | 150M | | Filtered [Reddit](https://www.reddit.com/) | (comment, response) | 800M | | [S2ORC](https://github.com/allenai/s2orc) | (title, abstract) and citation pairs | 100M | | [Stackexchange](https://stackexchange.com/) | (question, answer) | 50M | | [xP3](https://huggingface.co/datasets/bigscience/xP3) | (input prompt, response) | 80M | | [Miscellaneous unsupervised SBERT data](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | - | 10M | **Second stage**: supervised fine-tuning | Dataset | Language | # of text pairs | |----------------------------------------------------------------------------------------|--------------|-----------------| | [MS MARCO](https://microsoft.github.io/msmarco/) | English | 500k | | [NQ](https://github.com/facebookresearch/DPR) | English | 70k | | [Trivia QA](https://github.com/facebookresearch/DPR) | English | 60k | | [NLI from SimCSE](https://github.com/princeton-nlp/SimCSE) | English | <300k | | [ELI5](https://huggingface.co/datasets/eli5) | English | 500k | | [DuReader Retrieval](https://github.com/baidu/DuReader/tree/master/DuReader-Retrieval) | Chinese | 86k | | [KILT Fever](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [KILT HotpotQA](https://huggingface.co/datasets/kilt_tasks) | English | 70k | | [SQuAD](https://huggingface.co/datasets/squad) | English | 87k | | [Quora](https://huggingface.co/datasets/quora) | English | 150k | | [Mr. TyDi](https://huggingface.co/datasets/castorini/mr-tydi) | 11 languages | 50k | | [MIRACL](https://huggingface.co/datasets/miracl/miracl) | 16 languages | 40k | For all labeled datasets, we only use its training set for fine-tuning. For other training details, please refer to our paper at [https://arxiv.org/pdf/2402.05672](https://arxiv.org/pdf/2402.05672). ## Benchmark Results on [Mr. TyDi](https://arxiv.org/abs/2108.08787) | Model | Avg MRR@10 | | ar | bn | en | fi | id | ja | ko | ru | sw | te | th | |-----------------------|------------|-------|------| --- | --- | --- | --- | --- | --- | --- |------| --- | --- | | BM25 | 33.3 | | 36.7 | 41.3 | 15.1 | 28.8 | 38.2 | 21.7 | 28.1 | 32.9 | 39.6 | 42.4 | 41.7 | | mDPR | 16.7 | | 26.0 | 25.8 | 16.2 | 11.3 | 14.6 | 18.1 | 21.9 | 18.5 | 7.3 | 10.6 | 13.5 | | BM25 + mDPR | 41.7 | | 49.1 | 53.5 | 28.4 | 36.5 | 45.5 | 35.5 | 36.2 | 42.7 | 40.5 | 42.0 | 49.2 | | | | | multilingual-e5-small | 64.4 | | 71.5 | 66.3 | 54.5 | 57.7 | 63.2 | 55.4 | 54.3 | 60.8 | 65.4 | 89.1 | 70.1 | | multilingual-e5-base | 65.9 | | 72.3 | 65.0 | 58.5 | 60.8 | 64.9 | 56.6 | 55.8 | 62.7 | 69.0 | 86.6 | 72.7 | | multilingual-e5-large | **70.5** | | 77.5 | 73.2 | 60.8 | 66.8 | 68.5 | 62.5 | 61.6 | 65.8 | 72.7 | 90.2 | 76.2 | ## MTEB Benchmark Evaluation Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB benchmark](https://arxiv.org/abs/2210.07316). ## Support for Sentence Transformers Below is an example for usage with sentence_transformers. ```python from sentence_transformers import SentenceTransformer model = SentenceTransformer('intfloat/multilingual-e5-small') input_texts = [ 'query: how much protein should a female eat', 'query: 南瓜的家常做法', "passage: As a general guideline, the CDC's average requirement of protein for women ages 19 to 70 i s 46 grams per day. But, as you can see from this chart, you'll need to increase that if you're expecting or traini ng for a marathon. Check out the chart below to see how much protein you should be eating each day.", "passage: 1.清炒南瓜丝 原料:嫩南瓜半个 调料:葱、盐、白糖、鸡精 做法: 1、南瓜用刀薄薄的削去表面一层皮 ,用勺子刮去瓤 2、擦成细丝(没有擦菜板就用刀慢慢切成细丝) 3、锅烧热放油,入葱花煸出香味 4、入南瓜丝快速翻炒一分钟左右, 放盐、一点白糖和鸡精调味出锅 2.香葱炒南瓜 原料:南瓜1只 调料:香葱、蒜末、橄榄油、盐 做法: 1、将南瓜去皮,切成片 2、油 锅8成热后,将蒜末放入爆香 3、爆香后,将南瓜片放入,翻炒 4、在翻炒的同时,可以不时地往锅里加水,但不要太多 5、放入盐,炒匀 6、南瓜差不多软和绵了之后,就可以关火 7、撒入香葱,即可出锅" ] embeddings = model.encode(input_texts, normalize_embeddings=True) ``` Package requirements `pip install sentence_transformers~=2.2.2` Contributors: [michaelfeil](https://huggingface.co/michaelfeil) ## FAQ **1. Do I need to add the prefix "query: " and "passage: " to input texts?** Yes, this is how the model is trained, otherwise you will see a performance degradation. Here are some rules of thumb: - Use "query: " and "passage: " correspondingly for asymmetric tasks such as passage retrieval in open QA, ad-hoc information retrieval. - Use "query: " prefix for symmetric tasks such as semantic similarity, bitext mining, paraphrase retrieval. - Use "query: " prefix if you want to use embeddings as features, such as linear probing classification, clustering. **2. Why are my reproduced results slightly different from reported in the model card?** Different versions of `transformers` and `pytorch` could cause negligible but non-zero performance differences. **3. Why does the cosine similarity scores distribute around 0.7 to 1.0?** This is a known and expected behavior as we use a low temperature 0.01 for InfoNCE contrastive loss. For text embedding tasks like text retrieval or semantic similarity, what matters is the relative order of the scores instead of the absolute values, so this should not be an issue. ## Citation If you find our paper or models helpful, please consider cite as follows: ``` @article{wang2024multilingual, title={Multilingual E5 Text Embeddings: A Technical Report}, author={Wang, Liang and Yang, Nan and Huang, Xiaolong and Yang, Linjun and Majumder, Rangan and Wei, Furu}, journal={arXiv preprint arXiv:2402.05672}, year={2024} } ``` ## Limitations Long texts will be truncated to at most 512 tokens.
[ "BIOSSES", "SCIFACT" ]
Alibaba-NLP/gme-Qwen2-VL-2B-Instruct
Alibaba-NLP
sentence-similarity
[ "sentence-transformers", "safetensors", "qwen2_vl", "image-text-to-text", "mteb", "transformers", "Qwen2-VL", "sentence-similarity", "vidore", "en", "zh", "arxiv:2412.16855", "base_model:Qwen/Qwen2-VL-2B-Instruct", "base_model:finetune:Qwen/Qwen2-VL-2B-Instruct", "license:apache-2.0", "model-index", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2024-12-21T03:45:36Z"
2025-01-21T11:54:11+00:00
65,883
47
--- base_model: - Qwen/Qwen2-VL-2B-Instruct language: - en - zh license: apache-2.0 tags: - mteb - sentence-transformers - transformers - Qwen2-VL - sentence-similarity - vidore model-index: - name: external results: - task: type: STS dataset: name: MTEB AFQMC type: C-MTEB/AFQMC config: default split: validation revision: b44c3b011063adb25877c13823db83bb193913c4 metrics: - type: cos_sim_pearson value: 61.03190209456061 - type: cos_sim_spearman value: 67.54853383020948 - type: euclidean_pearson value: 65.38958681599493 - type: euclidean_spearman value: 67.54853383020948 - type: manhattan_pearson value: 65.25341659273157 - type: manhattan_spearman value: 67.34190190683134 - task: type: STS dataset: name: MTEB ATEC type: C-MTEB/ATEC config: default split: test revision: 0f319b1142f28d00e055a6770f3f726ae9b7d865 metrics: - type: cos_sim_pearson value: 50.83794357648487 - type: cos_sim_spearman value: 54.03230997664373 - type: euclidean_pearson value: 55.2072028123375 - type: euclidean_spearman value: 54.032311102613264 - type: manhattan_pearson value: 55.05163232251946 - type: manhattan_spearman value: 53.81272176804127 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 72.55223880597015 - type: ap value: 35.01515316721116 - type: f1 value: 66.44086070814382 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 96.75819999999999 - type: ap value: 95.51009242092881 - type: f1 value: 96.75713119357414 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 61.971999999999994 - type: f1 value: 60.50745575187704 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (zh) type: mteb/amazon_reviews_multi config: zh split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.49 - type: f1 value: 51.576550662258434 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 36.272999999999996 - type: map_at_10 value: 52.782 - type: map_at_100 value: 53.339999999999996 - type: map_at_1000 value: 53.342999999999996 - type: map_at_3 value: 48.4 - type: map_at_5 value: 50.882000000000005 - type: mrr_at_1 value: 36.984 - type: mrr_at_10 value: 53.052 - type: mrr_at_100 value: 53.604 - type: mrr_at_1000 value: 53.607000000000006 - type: mrr_at_3 value: 48.613 - type: mrr_at_5 value: 51.159 - type: ndcg_at_1 value: 36.272999999999996 - type: ndcg_at_10 value: 61.524 - type: ndcg_at_100 value: 63.796 - type: ndcg_at_1000 value: 63.869 - type: ndcg_at_3 value: 52.456 - type: ndcg_at_5 value: 56.964000000000006 - type: precision_at_1 value: 36.272999999999996 - type: precision_at_10 value: 8.926 - type: precision_at_100 value: 0.989 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 21.407999999999998 - type: precision_at_5 value: 15.049999999999999 - type: recall_at_1 value: 36.272999999999996 - type: recall_at_10 value: 89.25999999999999 - type: recall_at_100 value: 98.933 - type: recall_at_1000 value: 99.502 - type: recall_at_3 value: 64.225 - type: recall_at_5 value: 75.249 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 52.45236368396085 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 46.83781937870832 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 60.653430349851746 - type: mrr value: 74.28736314470387 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 89.18568151905953 - type: cos_sim_spearman value: 86.47666922475281 - type: euclidean_pearson value: 87.25416218056225 - type: euclidean_spearman value: 86.47666922475281 - type: manhattan_pearson value: 87.04960508086356 - type: manhattan_spearman value: 86.73992823533615 - task: type: STS dataset: name: MTEB BQ type: C-MTEB/BQ config: default split: test revision: e3dda5e115e487b39ec7e618c0c6a29137052a55 metrics: - type: cos_sim_pearson value: 75.7464284612374 - type: cos_sim_spearman value: 77.71894224189296 - type: euclidean_pearson value: 77.63454068918787 - type: euclidean_spearman value: 77.71894224189296 - type: manhattan_pearson value: 77.58744810404339 - type: manhattan_spearman value: 77.63293552726073 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 80.2435064935065 - type: f1 value: 79.44078343737895 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 44.68220155432257 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 40.666150477589284 - task: type: Clustering dataset: name: MTEB CLSClusteringP2P type: C-MTEB/CLSClusteringP2P config: default split: test revision: 4b6227591c6c1a73bc76b1055f3b7f3588e72476 metrics: - type: v_measure value: 44.23533333311907 - task: type: Clustering dataset: name: MTEB CLSClusteringS2S type: C-MTEB/CLSClusteringS2S config: default split: test revision: e458b3f5414b62b7f9f83499ac1f5497ae2e869f metrics: - type: v_measure value: 43.01114481307774 - task: type: Reranking dataset: name: MTEB CMedQAv1 type: C-MTEB/CMedQAv1-reranking config: default split: test revision: 8d7f1e942507dac42dc58017c1a001c3717da7df metrics: - type: map value: 86.4349853821696 - type: mrr value: 88.80150793650795 - task: type: Reranking dataset: name: MTEB CMedQAv2 type: C-MTEB/CMedQAv2-reranking config: default split: test revision: 23d186750531a14a0357ca22cd92d712fd512ea0 metrics: - type: map value: 87.56417400982208 - type: mrr value: 89.85813492063491 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: BeIR/cqadupstack config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 30.623 - type: map_at_10 value: 40.482 - type: map_at_100 value: 41.997 - type: map_at_1000 value: 42.135 - type: map_at_3 value: 37.754 - type: map_at_5 value: 39.031 - type: mrr_at_1 value: 37.482 - type: mrr_at_10 value: 46.311 - type: mrr_at_100 value: 47.211999999999996 - type: mrr_at_1000 value: 47.27 - type: mrr_at_3 value: 44.157999999999994 - type: mrr_at_5 value: 45.145 - type: ndcg_at_1 value: 37.482 - type: ndcg_at_10 value: 46.142 - type: ndcg_at_100 value: 51.834 - type: ndcg_at_1000 value: 54.164 - type: ndcg_at_3 value: 42.309000000000005 - type: ndcg_at_5 value: 43.485 - type: precision_at_1 value: 37.482 - type: precision_at_10 value: 8.455 - type: precision_at_100 value: 1.3780000000000001 - type: precision_at_1000 value: 0.188 - type: precision_at_3 value: 20.172 - type: precision_at_5 value: 13.705 - type: recall_at_1 value: 30.623 - type: recall_at_10 value: 56.77100000000001 - type: recall_at_100 value: 80.034 - type: recall_at_1000 value: 94.62899999999999 - type: recall_at_3 value: 44.663000000000004 - type: recall_at_5 value: 48.692 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: BeIR/cqadupstack config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 27.941 - type: map_at_10 value: 38.437 - type: map_at_100 value: 39.625 - type: map_at_1000 value: 39.753 - type: map_at_3 value: 35.388999999999996 - type: map_at_5 value: 37.113 - type: mrr_at_1 value: 34.522000000000006 - type: mrr_at_10 value: 43.864999999999995 - type: mrr_at_100 value: 44.533 - type: mrr_at_1000 value: 44.580999999999996 - type: mrr_at_3 value: 41.55 - type: mrr_at_5 value: 42.942 - type: ndcg_at_1 value: 34.522000000000006 - type: ndcg_at_10 value: 44.330000000000005 - type: ndcg_at_100 value: 48.61 - type: ndcg_at_1000 value: 50.712999999999994 - type: ndcg_at_3 value: 39.834 - type: ndcg_at_5 value: 42.016 - type: precision_at_1 value: 34.522000000000006 - type: precision_at_10 value: 8.471 - type: precision_at_100 value: 1.3379999999999999 - type: precision_at_1000 value: 0.182 - type: precision_at_3 value: 19.363 - type: precision_at_5 value: 13.898 - type: recall_at_1 value: 27.941 - type: recall_at_10 value: 55.336 - type: recall_at_100 value: 73.51100000000001 - type: recall_at_1000 value: 86.636 - type: recall_at_3 value: 42.54 - type: recall_at_5 value: 48.392 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: BeIR/cqadupstack config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 32.681 - type: map_at_10 value: 45.48 - type: map_at_100 value: 46.542 - type: map_at_1000 value: 46.604 - type: map_at_3 value: 42.076 - type: map_at_5 value: 44.076 - type: mrr_at_1 value: 37.492 - type: mrr_at_10 value: 48.746 - type: mrr_at_100 value: 49.485 - type: mrr_at_1000 value: 49.517 - type: mrr_at_3 value: 45.998 - type: mrr_at_5 value: 47.681000000000004 - type: ndcg_at_1 value: 37.492 - type: ndcg_at_10 value: 51.778999999999996 - type: ndcg_at_100 value: 56.294 - type: ndcg_at_1000 value: 57.58 - type: ndcg_at_3 value: 45.856 - type: ndcg_at_5 value: 48.968 - type: precision_at_1 value: 37.492 - type: precision_at_10 value: 8.620999999999999 - type: precision_at_100 value: 1.189 - type: precision_at_1000 value: 0.135 - type: precision_at_3 value: 20.773 - type: precision_at_5 value: 14.596 - type: recall_at_1 value: 32.681 - type: recall_at_10 value: 67.196 - type: recall_at_100 value: 87.027 - type: recall_at_1000 value: 96.146 - type: recall_at_3 value: 51.565000000000005 - type: recall_at_5 value: 59.123999999999995 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: BeIR/cqadupstack config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 22.421 - type: map_at_10 value: 30.127 - type: map_at_100 value: 31.253999999999998 - type: map_at_1000 value: 31.344 - type: map_at_3 value: 27.673 - type: map_at_5 value: 29.182000000000002 - type: mrr_at_1 value: 24.068 - type: mrr_at_10 value: 31.857000000000003 - type: mrr_at_100 value: 32.808 - type: mrr_at_1000 value: 32.881 - type: mrr_at_3 value: 29.397000000000002 - type: mrr_at_5 value: 30.883 - type: ndcg_at_1 value: 24.068 - type: ndcg_at_10 value: 34.642 - type: ndcg_at_100 value: 40.327 - type: ndcg_at_1000 value: 42.55 - type: ndcg_at_3 value: 29.868 - type: ndcg_at_5 value: 32.461 - type: precision_at_1 value: 24.068 - type: precision_at_10 value: 5.390000000000001 - type: precision_at_100 value: 0.873 - type: precision_at_1000 value: 0.109 - type: precision_at_3 value: 12.692999999999998 - type: precision_at_5 value: 9.107 - type: recall_at_1 value: 22.421 - type: recall_at_10 value: 46.846 - type: recall_at_100 value: 73.409 - type: recall_at_1000 value: 90.06 - type: recall_at_3 value: 34.198 - type: recall_at_5 value: 40.437 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: BeIR/cqadupstack config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 16.494 - type: map_at_10 value: 24.4 - type: map_at_100 value: 25.718999999999998 - type: map_at_1000 value: 25.840000000000003 - type: map_at_3 value: 21.731 - type: map_at_5 value: 23.247999999999998 - type: mrr_at_1 value: 20.274 - type: mrr_at_10 value: 28.866000000000003 - type: mrr_at_100 value: 29.889 - type: mrr_at_1000 value: 29.957 - type: mrr_at_3 value: 26.284999999999997 - type: mrr_at_5 value: 27.79 - type: ndcg_at_1 value: 20.274 - type: ndcg_at_10 value: 29.666999999999998 - type: ndcg_at_100 value: 36.095 - type: ndcg_at_1000 value: 38.87 - type: ndcg_at_3 value: 24.672 - type: ndcg_at_5 value: 27.106 - type: precision_at_1 value: 20.274 - type: precision_at_10 value: 5.5969999999999995 - type: precision_at_100 value: 1.04 - type: precision_at_1000 value: 0.14100000000000001 - type: precision_at_3 value: 12.023 - type: precision_at_5 value: 8.98 - type: recall_at_1 value: 16.494 - type: recall_at_10 value: 41.400999999999996 - type: recall_at_100 value: 69.811 - type: recall_at_1000 value: 89.422 - type: recall_at_3 value: 27.834999999999997 - type: recall_at_5 value: 33.774 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: BeIR/cqadupstack config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 26.150000000000002 - type: map_at_10 value: 36.012 - type: map_at_100 value: 37.377 - type: map_at_1000 value: 37.497 - type: map_at_3 value: 32.712 - type: map_at_5 value: 34.475 - type: mrr_at_1 value: 32.05 - type: mrr_at_10 value: 41.556 - type: mrr_at_100 value: 42.451 - type: mrr_at_1000 value: 42.498000000000005 - type: mrr_at_3 value: 38.659 - type: mrr_at_5 value: 40.314 - type: ndcg_at_1 value: 32.05 - type: ndcg_at_10 value: 42.132 - type: ndcg_at_100 value: 48.028999999999996 - type: ndcg_at_1000 value: 50.229 - type: ndcg_at_3 value: 36.622 - type: ndcg_at_5 value: 39.062000000000005 - type: precision_at_1 value: 32.05 - type: precision_at_10 value: 7.767 - type: precision_at_100 value: 1.269 - type: precision_at_1000 value: 0.164 - type: precision_at_3 value: 17.355999999999998 - type: precision_at_5 value: 12.474 - type: recall_at_1 value: 26.150000000000002 - type: recall_at_10 value: 55.205000000000005 - type: recall_at_100 value: 80.2 - type: recall_at_1000 value: 94.524 - type: recall_at_3 value: 39.322 - type: recall_at_5 value: 45.761 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: BeIR/cqadupstack config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 23.741 - type: map_at_10 value: 33.51 - type: map_at_100 value: 34.882999999999996 - type: map_at_1000 value: 34.995 - type: map_at_3 value: 30.514000000000003 - type: map_at_5 value: 32.085 - type: mrr_at_1 value: 28.653000000000002 - type: mrr_at_10 value: 38.059 - type: mrr_at_100 value: 39.050000000000004 - type: mrr_at_1000 value: 39.107 - type: mrr_at_3 value: 35.445 - type: mrr_at_5 value: 36.849 - type: ndcg_at_1 value: 28.653000000000002 - type: ndcg_at_10 value: 39.186 - type: ndcg_at_100 value: 45.301 - type: ndcg_at_1000 value: 47.547 - type: ndcg_at_3 value: 34.103 - type: ndcg_at_5 value: 36.239 - type: precision_at_1 value: 28.653000000000002 - type: precision_at_10 value: 7.295 - type: precision_at_100 value: 1.2189999999999999 - type: precision_at_1000 value: 0.159 - type: precision_at_3 value: 16.438 - type: precision_at_5 value: 11.804 - type: recall_at_1 value: 23.741 - type: recall_at_10 value: 51.675000000000004 - type: recall_at_100 value: 78.13799999999999 - type: recall_at_1000 value: 93.12700000000001 - type: recall_at_3 value: 37.033 - type: recall_at_5 value: 42.793 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: BeIR/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 25.281666666666663 - type: map_at_10 value: 34.080666666666666 - type: map_at_100 value: 35.278749999999995 - type: map_at_1000 value: 35.40183333333333 - type: map_at_3 value: 31.45316666666667 - type: map_at_5 value: 32.92716666666667 - type: mrr_at_1 value: 29.78783333333333 - type: mrr_at_10 value: 38.077333333333335 - type: mrr_at_100 value: 38.936499999999995 - type: mrr_at_1000 value: 39.000249999999994 - type: mrr_at_3 value: 35.7735 - type: mrr_at_5 value: 37.07683333333334 - type: ndcg_at_1 value: 29.78783333333333 - type: ndcg_at_10 value: 39.18300000000001 - type: ndcg_at_100 value: 44.444750000000006 - type: ndcg_at_1000 value: 46.90316666666667 - type: ndcg_at_3 value: 34.69308333333333 - type: ndcg_at_5 value: 36.80316666666666 - type: precision_at_1 value: 29.78783333333333 - type: precision_at_10 value: 6.820749999999999 - type: precision_at_100 value: 1.1224166666666666 - type: precision_at_1000 value: 0.1525 - type: precision_at_3 value: 15.936333333333335 - type: precision_at_5 value: 11.282333333333334 - type: recall_at_1 value: 25.281666666666663 - type: recall_at_10 value: 50.282 - type: recall_at_100 value: 73.54558333333334 - type: recall_at_1000 value: 90.64241666666666 - type: recall_at_3 value: 37.800999999999995 - type: recall_at_5 value: 43.223000000000006 - type: map_at_1 value: 19.07 - type: map_at_10 value: 26.608999999999998 - type: map_at_100 value: 27.625 - type: map_at_1000 value: 27.743000000000002 - type: map_at_3 value: 24.532999999999998 - type: map_at_5 value: 25.671 - type: mrr_at_1 value: 20.518 - type: mrr_at_10 value: 28.541 - type: mrr_at_100 value: 29.453000000000003 - type: mrr_at_1000 value: 29.536 - type: mrr_at_3 value: 26.71 - type: mrr_at_5 value: 27.708 - type: ndcg_at_1 value: 20.518 - type: ndcg_at_10 value: 30.855 - type: ndcg_at_100 value: 35.973 - type: ndcg_at_1000 value: 38.827 - type: ndcg_at_3 value: 26.868 - type: ndcg_at_5 value: 28.74 - type: precision_at_1 value: 20.518 - type: precision_at_10 value: 4.843 - type: precision_at_100 value: 0.799 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 11.645 - type: precision_at_5 value: 8.133 - type: recall_at_1 value: 19.07 - type: recall_at_10 value: 41.925000000000004 - type: recall_at_100 value: 65.68 - type: recall_at_1000 value: 86.713 - type: recall_at_3 value: 31.251 - type: recall_at_5 value: 35.653 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: BeIR/cqadupstack config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 23.452 - type: map_at_10 value: 30.231 - type: map_at_100 value: 31.227 - type: map_at_1000 value: 31.338 - type: map_at_3 value: 28.083000000000002 - type: map_at_5 value: 29.125 - type: mrr_at_1 value: 25.613000000000003 - type: mrr_at_10 value: 32.62 - type: mrr_at_100 value: 33.469 - type: mrr_at_1000 value: 33.554 - type: mrr_at_3 value: 30.368000000000002 - type: mrr_at_5 value: 31.502999999999997 - type: ndcg_at_1 value: 25.613000000000003 - type: ndcg_at_10 value: 34.441 - type: ndcg_at_100 value: 39.253 - type: ndcg_at_1000 value: 42.105 - type: ndcg_at_3 value: 30.183 - type: ndcg_at_5 value: 31.917 - type: precision_at_1 value: 25.613000000000003 - type: precision_at_10 value: 5.367999999999999 - type: precision_at_100 value: 0.848 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 12.73 - type: precision_at_5 value: 8.773 - type: recall_at_1 value: 23.452 - type: recall_at_10 value: 45.021 - type: recall_at_100 value: 66.563 - type: recall_at_1000 value: 87.713 - type: recall_at_3 value: 33.433 - type: recall_at_5 value: 37.637 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: BeIR/cqadupstack config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 16.11 - type: map_at_10 value: 22.832 - type: map_at_100 value: 23.829 - type: map_at_1000 value: 23.959 - type: map_at_3 value: 20.66 - type: map_at_5 value: 21.851000000000003 - type: mrr_at_1 value: 19.408 - type: mrr_at_10 value: 26.354 - type: mrr_at_100 value: 27.237000000000002 - type: mrr_at_1000 value: 27.32 - type: mrr_at_3 value: 24.243000000000002 - type: mrr_at_5 value: 25.430000000000003 - type: ndcg_at_1 value: 19.408 - type: ndcg_at_10 value: 27.239 - type: ndcg_at_100 value: 32.286 - type: ndcg_at_1000 value: 35.498000000000005 - type: ndcg_at_3 value: 23.244 - type: ndcg_at_5 value: 25.080999999999996 - type: precision_at_1 value: 19.408 - type: precision_at_10 value: 4.917 - type: precision_at_100 value: 0.874 - type: precision_at_1000 value: 0.133 - type: precision_at_3 value: 10.863 - type: precision_at_5 value: 7.887 - type: recall_at_1 value: 16.11 - type: recall_at_10 value: 37.075 - type: recall_at_100 value: 60.251999999999995 - type: recall_at_1000 value: 83.38600000000001 - type: recall_at_3 value: 25.901999999999997 - type: recall_at_5 value: 30.612000000000002 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: BeIR/cqadupstack config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 25.941 - type: map_at_10 value: 33.711999999999996 - type: map_at_100 value: 34.926 - type: map_at_1000 value: 35.05 - type: map_at_3 value: 31.075000000000003 - type: map_at_5 value: 32.611000000000004 - type: mrr_at_1 value: 30.784 - type: mrr_at_10 value: 38.079 - type: mrr_at_100 value: 39.018 - type: mrr_at_1000 value: 39.09 - type: mrr_at_3 value: 35.603 - type: mrr_at_5 value: 36.988 - type: ndcg_at_1 value: 30.784 - type: ndcg_at_10 value: 38.586 - type: ndcg_at_100 value: 44.205 - type: ndcg_at_1000 value: 46.916000000000004 - type: ndcg_at_3 value: 33.899 - type: ndcg_at_5 value: 36.11 - type: precision_at_1 value: 30.784 - type: precision_at_10 value: 6.409 - type: precision_at_100 value: 1.034 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 15.112 - type: precision_at_5 value: 10.728 - type: recall_at_1 value: 25.941 - type: recall_at_10 value: 49.242999999999995 - type: recall_at_100 value: 73.85000000000001 - type: recall_at_1000 value: 92.782 - type: recall_at_3 value: 36.204 - type: recall_at_5 value: 41.908 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: BeIR/cqadupstack config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 24.401999999999997 - type: map_at_10 value: 33.195 - type: map_at_100 value: 34.699999999999996 - type: map_at_1000 value: 34.946 - type: map_at_3 value: 30.570999999999998 - type: map_at_5 value: 32.0 - type: mrr_at_1 value: 28.656 - type: mrr_at_10 value: 37.039 - type: mrr_at_100 value: 38.049 - type: mrr_at_1000 value: 38.108 - type: mrr_at_3 value: 34.717 - type: mrr_at_5 value: 36.07 - type: ndcg_at_1 value: 28.656 - type: ndcg_at_10 value: 38.557 - type: ndcg_at_100 value: 44.511 - type: ndcg_at_1000 value: 47.346 - type: ndcg_at_3 value: 34.235 - type: ndcg_at_5 value: 36.260999999999996 - type: precision_at_1 value: 28.656 - type: precision_at_10 value: 7.312 - type: precision_at_100 value: 1.451 - type: precision_at_1000 value: 0.242 - type: precision_at_3 value: 15.942 - type: precision_at_5 value: 11.66 - type: recall_at_1 value: 24.401999999999997 - type: recall_at_10 value: 48.791000000000004 - type: recall_at_100 value: 76.211 - type: recall_at_1000 value: 93.92 - type: recall_at_3 value: 36.975 - type: recall_at_5 value: 42.01 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 18.762 - type: map_at_10 value: 32.412 - type: map_at_100 value: 34.506 - type: map_at_1000 value: 34.678 - type: map_at_3 value: 27.594 - type: map_at_5 value: 30.128 - type: mrr_at_1 value: 42.345 - type: mrr_at_10 value: 54.443 - type: mrr_at_100 value: 55.05799999999999 - type: mrr_at_1000 value: 55.076 - type: mrr_at_3 value: 51.553000000000004 - type: mrr_at_5 value: 53.269 - type: ndcg_at_1 value: 42.345 - type: ndcg_at_10 value: 42.304 - type: ndcg_at_100 value: 49.425000000000004 - type: ndcg_at_1000 value: 52.123 - type: ndcg_at_3 value: 36.271 - type: ndcg_at_5 value: 38.216 - type: precision_at_1 value: 42.345 - type: precision_at_10 value: 12.808 - type: precision_at_100 value: 2.062 - type: precision_at_1000 value: 0.258 - type: precision_at_3 value: 26.840000000000003 - type: precision_at_5 value: 20.052 - type: recall_at_1 value: 18.762 - type: recall_at_10 value: 47.976 - type: recall_at_100 value: 71.86 - type: recall_at_1000 value: 86.61999999999999 - type: recall_at_3 value: 32.708999999999996 - type: recall_at_5 value: 39.151 - task: type: Retrieval dataset: name: MTEB CmedqaRetrieval type: C-MTEB/CmedqaRetrieval config: default split: dev revision: cd540c506dae1cf9e9a59c3e06f42030d54e7301 metrics: - type: map_at_1 value: 24.871 - type: map_at_10 value: 37.208999999999996 - type: map_at_100 value: 38.993 - type: map_at_1000 value: 39.122 - type: map_at_3 value: 33.2 - type: map_at_5 value: 35.33 - type: mrr_at_1 value: 37.884 - type: mrr_at_10 value: 46.189 - type: mrr_at_100 value: 47.147 - type: mrr_at_1000 value: 47.195 - type: mrr_at_3 value: 43.728 - type: mrr_at_5 value: 44.994 - type: ndcg_at_1 value: 37.884 - type: ndcg_at_10 value: 43.878 - type: ndcg_at_100 value: 51.002 - type: ndcg_at_1000 value: 53.161 - type: ndcg_at_3 value: 38.729 - type: ndcg_at_5 value: 40.628 - type: precision_at_1 value: 37.884 - type: precision_at_10 value: 9.75 - type: precision_at_100 value: 1.558 - type: precision_at_1000 value: 0.183 - type: precision_at_3 value: 21.964 - type: precision_at_5 value: 15.719 - type: recall_at_1 value: 24.871 - type: recall_at_10 value: 54.615 - type: recall_at_100 value: 84.276 - type: recall_at_1000 value: 98.578 - type: recall_at_3 value: 38.936 - type: recall_at_5 value: 45.061 - task: type: PairClassification dataset: name: MTEB Cmnli type: C-MTEB/CMNLI config: default split: validation revision: 41bc36f332156f7adc9e38f53777c959b2ae9766 metrics: - type: cos_sim_accuracy value: 76.12748045700542 - type: cos_sim_ap value: 84.47948419710998 - type: cos_sim_f1 value: 77.88108108108108 - type: cos_sim_precision value: 72.43112809169516 - type: cos_sim_recall value: 84.21790974982464 - type: dot_accuracy value: 76.12748045700542 - type: dot_ap value: 84.4933237839786 - type: dot_f1 value: 77.88108108108108 - type: dot_precision value: 72.43112809169516 - type: dot_recall value: 84.21790974982464 - type: euclidean_accuracy value: 76.12748045700542 - type: euclidean_ap value: 84.47947997540409 - type: euclidean_f1 value: 77.88108108108108 - type: euclidean_precision value: 72.43112809169516 - type: euclidean_recall value: 84.21790974982464 - type: manhattan_accuracy value: 75.40589296452195 - type: manhattan_ap value: 83.74383956930585 - type: manhattan_f1 value: 77.0983342289092 - type: manhattan_precision value: 71.34049323786795 - type: manhattan_recall value: 83.86719663315408 - type: max_accuracy value: 76.12748045700542 - type: max_ap value: 84.4933237839786 - type: max_f1 value: 77.88108108108108 - task: type: Retrieval dataset: name: MTEB CovidRetrieval type: C-MTEB/CovidRetrieval config: default split: dev revision: 1271c7809071a13532e05f25fb53511ffce77117 metrics: - type: map_at_1 value: 66.781 - type: map_at_10 value: 74.539 - type: map_at_100 value: 74.914 - type: map_at_1000 value: 74.921 - type: map_at_3 value: 72.734 - type: map_at_5 value: 73.788 - type: mrr_at_1 value: 66.913 - type: mrr_at_10 value: 74.543 - type: mrr_at_100 value: 74.914 - type: mrr_at_1000 value: 74.921 - type: mrr_at_3 value: 72.831 - type: mrr_at_5 value: 73.76899999999999 - type: ndcg_at_1 value: 67.018 - type: ndcg_at_10 value: 78.34299999999999 - type: ndcg_at_100 value: 80.138 - type: ndcg_at_1000 value: 80.322 - type: ndcg_at_3 value: 74.667 - type: ndcg_at_5 value: 76.518 - type: precision_at_1 value: 67.018 - type: precision_at_10 value: 9.115 - type: precision_at_100 value: 0.996 - type: precision_at_1000 value: 0.101 - type: precision_at_3 value: 26.906000000000002 - type: precision_at_5 value: 17.092 - type: recall_at_1 value: 66.781 - type: recall_at_10 value: 90.253 - type: recall_at_100 value: 98.52499999999999 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 80.05799999999999 - type: recall_at_5 value: 84.615 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 9.685 - type: map_at_10 value: 21.65 - type: map_at_100 value: 30.952 - type: map_at_1000 value: 33.049 - type: map_at_3 value: 14.953 - type: map_at_5 value: 17.592 - type: mrr_at_1 value: 72.0 - type: mrr_at_10 value: 78.054 - type: mrr_at_100 value: 78.41900000000001 - type: mrr_at_1000 value: 78.425 - type: mrr_at_3 value: 76.5 - type: mrr_at_5 value: 77.28699999999999 - type: ndcg_at_1 value: 61.25000000000001 - type: ndcg_at_10 value: 46.306000000000004 - type: ndcg_at_100 value: 50.867 - type: ndcg_at_1000 value: 58.533 - type: ndcg_at_3 value: 50.857 - type: ndcg_at_5 value: 48.283 - type: precision_at_1 value: 72.0 - type: precision_at_10 value: 37.3 - type: precision_at_100 value: 11.95 - type: precision_at_1000 value: 2.528 - type: precision_at_3 value: 53.583000000000006 - type: precision_at_5 value: 46.6 - type: recall_at_1 value: 9.685 - type: recall_at_10 value: 27.474999999999998 - type: recall_at_100 value: 56.825 - type: recall_at_1000 value: 81.792 - type: recall_at_3 value: 15.939 - type: recall_at_5 value: 19.853 - task: type: Retrieval dataset: name: MTEB DuRetrieval type: C-MTEB/DuRetrieval config: default split: dev revision: a1a333e290fe30b10f3f56498e3a0d911a693ced metrics: - type: map_at_1 value: 24.528 - type: map_at_10 value: 76.304 - type: map_at_100 value: 79.327 - type: map_at_1000 value: 79.373 - type: map_at_3 value: 52.035 - type: map_at_5 value: 66.074 - type: mrr_at_1 value: 86.05000000000001 - type: mrr_at_10 value: 90.74 - type: mrr_at_100 value: 90.809 - type: mrr_at_1000 value: 90.81099999999999 - type: mrr_at_3 value: 90.30799999999999 - type: mrr_at_5 value: 90.601 - type: ndcg_at_1 value: 86.05000000000001 - type: ndcg_at_10 value: 84.518 - type: ndcg_at_100 value: 87.779 - type: ndcg_at_1000 value: 88.184 - type: ndcg_at_3 value: 82.339 - type: ndcg_at_5 value: 81.613 - type: precision_at_1 value: 86.05000000000001 - type: precision_at_10 value: 40.945 - type: precision_at_100 value: 4.787 - type: precision_at_1000 value: 0.48900000000000005 - type: precision_at_3 value: 74.117 - type: precision_at_5 value: 62.86000000000001 - type: recall_at_1 value: 24.528 - type: recall_at_10 value: 86.78 - type: recall_at_100 value: 97.198 - type: recall_at_1000 value: 99.227 - type: recall_at_3 value: 54.94799999999999 - type: recall_at_5 value: 72.053 - task: type: Retrieval dataset: name: MTEB EcomRetrieval type: C-MTEB/EcomRetrieval config: default split: dev revision: 687de13dc7294d6fd9be10c6945f9e8fec8166b9 metrics: - type: map_at_1 value: 52.1 - type: map_at_10 value: 62.502 - type: map_at_100 value: 63.026 - type: map_at_1000 value: 63.04 - type: map_at_3 value: 59.782999999999994 - type: map_at_5 value: 61.443000000000005 - type: mrr_at_1 value: 52.1 - type: mrr_at_10 value: 62.502 - type: mrr_at_100 value: 63.026 - type: mrr_at_1000 value: 63.04 - type: mrr_at_3 value: 59.782999999999994 - type: mrr_at_5 value: 61.443000000000005 - type: ndcg_at_1 value: 52.1 - type: ndcg_at_10 value: 67.75999999999999 - type: ndcg_at_100 value: 70.072 - type: ndcg_at_1000 value: 70.441 - type: ndcg_at_3 value: 62.28 - type: ndcg_at_5 value: 65.25800000000001 - type: precision_at_1 value: 52.1 - type: precision_at_10 value: 8.43 - type: precision_at_100 value: 0.946 - type: precision_at_1000 value: 0.098 - type: precision_at_3 value: 23.166999999999998 - type: precision_at_5 value: 15.340000000000002 - type: recall_at_1 value: 52.1 - type: recall_at_10 value: 84.3 - type: recall_at_100 value: 94.6 - type: recall_at_1000 value: 97.5 - type: recall_at_3 value: 69.5 - type: recall_at_5 value: 76.7 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 62.805000000000014 - type: f1 value: 56.401757250989384 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 83.734 - type: map_at_10 value: 90.089 - type: map_at_100 value: 90.274 - type: map_at_1000 value: 90.286 - type: map_at_3 value: 89.281 - type: map_at_5 value: 89.774 - type: mrr_at_1 value: 90.039 - type: mrr_at_10 value: 94.218 - type: mrr_at_100 value: 94.24 - type: mrr_at_1000 value: 94.24 - type: mrr_at_3 value: 93.979 - type: mrr_at_5 value: 94.137 - type: ndcg_at_1 value: 90.039 - type: ndcg_at_10 value: 92.597 - type: ndcg_at_100 value: 93.147 - type: ndcg_at_1000 value: 93.325 - type: ndcg_at_3 value: 91.64999999999999 - type: ndcg_at_5 value: 92.137 - type: precision_at_1 value: 90.039 - type: precision_at_10 value: 10.809000000000001 - type: precision_at_100 value: 1.133 - type: precision_at_1000 value: 0.116 - type: precision_at_3 value: 34.338 - type: precision_at_5 value: 21.089 - type: recall_at_1 value: 83.734 - type: recall_at_10 value: 96.161 - type: recall_at_100 value: 98.137 - type: recall_at_1000 value: 99.182 - type: recall_at_3 value: 93.551 - type: recall_at_5 value: 94.878 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 24.529999999999998 - type: map_at_10 value: 37.229 - type: map_at_100 value: 39.333 - type: map_at_1000 value: 39.491 - type: map_at_3 value: 32.177 - type: map_at_5 value: 35.077999999999996 - type: mrr_at_1 value: 45.678999999999995 - type: mrr_at_10 value: 53.952 - type: mrr_at_100 value: 54.727000000000004 - type: mrr_at_1000 value: 54.761 - type: mrr_at_3 value: 51.568999999999996 - type: mrr_at_5 value: 52.973000000000006 - type: ndcg_at_1 value: 45.678999999999995 - type: ndcg_at_10 value: 45.297 - type: ndcg_at_100 value: 52.516 - type: ndcg_at_1000 value: 55.16 - type: ndcg_at_3 value: 40.569 - type: ndcg_at_5 value: 42.49 - type: precision_at_1 value: 45.678999999999995 - type: precision_at_10 value: 12.269 - type: precision_at_100 value: 1.9709999999999999 - type: precision_at_1000 value: 0.244 - type: precision_at_3 value: 25.72 - type: precision_at_5 value: 19.66 - type: recall_at_1 value: 24.529999999999998 - type: recall_at_10 value: 51.983999999999995 - type: recall_at_100 value: 78.217 - type: recall_at_1000 value: 94.104 - type: recall_at_3 value: 36.449999999999996 - type: recall_at_5 value: 43.336999999999996 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 41.519 - type: map_at_10 value: 64.705 - type: map_at_100 value: 65.554 - type: map_at_1000 value: 65.613 - type: map_at_3 value: 61.478 - type: map_at_5 value: 63.55800000000001 - type: mrr_at_1 value: 83.038 - type: mrr_at_10 value: 87.82900000000001 - type: mrr_at_100 value: 87.96000000000001 - type: mrr_at_1000 value: 87.96300000000001 - type: mrr_at_3 value: 87.047 - type: mrr_at_5 value: 87.546 - type: ndcg_at_1 value: 83.038 - type: ndcg_at_10 value: 72.928 - type: ndcg_at_100 value: 75.778 - type: ndcg_at_1000 value: 76.866 - type: ndcg_at_3 value: 68.46600000000001 - type: ndcg_at_5 value: 71.036 - type: precision_at_1 value: 83.038 - type: precision_at_10 value: 15.040999999999999 - type: precision_at_100 value: 1.7260000000000002 - type: precision_at_1000 value: 0.187 - type: precision_at_3 value: 43.597 - type: precision_at_5 value: 28.188999999999997 - type: recall_at_1 value: 41.519 - type: recall_at_10 value: 75.20599999999999 - type: recall_at_100 value: 86.3 - type: recall_at_1000 value: 93.437 - type: recall_at_3 value: 65.39500000000001 - type: recall_at_5 value: 70.473 - task: type: Classification dataset: name: MTEB IFlyTek type: C-MTEB/IFlyTek-classification config: default split: validation revision: 421605374b29664c5fc098418fe20ada9bd55f8a metrics: - type: accuracy value: 52.04309349749903 - type: f1 value: 39.91893257315586 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 96.0428 - type: ap value: 94.48278082595033 - type: f1 value: 96.0409595432081 - task: type: Classification dataset: name: MTEB JDReview type: C-MTEB/JDReview-classification config: default split: test revision: b7c64bd89eb87f8ded463478346f76731f07bf8b metrics: - type: accuracy value: 85.60975609756099 - type: ap value: 54.30148799475452 - type: f1 value: 80.55899583002706 - task: type: STS dataset: name: MTEB LCQMC type: C-MTEB/LCQMC config: default split: test revision: 17f9b096f80380fce5ed12a9be8be7784b337daf metrics: - type: cos_sim_pearson value: 66.44418108776416 - type: cos_sim_spearman value: 72.79912770347306 - type: euclidean_pearson value: 71.11194894579198 - type: euclidean_spearman value: 72.79912104971427 - type: manhattan_pearson value: 70.96800061808604 - type: manhattan_spearman value: 72.63525186107175 - task: type: Reranking dataset: name: MTEB MMarcoReranking type: C-MTEB/Mmarco-reranking config: default split: dev revision: 8e0c766dbe9e16e1d221116a3f36795fbade07f6 metrics: - type: map value: 27.9616280919871 - type: mrr value: 26.544047619047618 - task: type: Retrieval dataset: name: MTEB MMarcoRetrieval type: C-MTEB/MMarcoRetrieval config: default split: dev revision: 539bbde593d947e2a124ba72651aafc09eb33fc2 metrics: - type: map_at_1 value: 68.32300000000001 - type: map_at_10 value: 77.187 - type: map_at_100 value: 77.496 - type: map_at_1000 value: 77.503 - type: map_at_3 value: 75.405 - type: map_at_5 value: 76.539 - type: mrr_at_1 value: 70.616 - type: mrr_at_10 value: 77.703 - type: mrr_at_100 value: 77.97699999999999 - type: mrr_at_1000 value: 77.984 - type: mrr_at_3 value: 76.139 - type: mrr_at_5 value: 77.125 - type: ndcg_at_1 value: 70.616 - type: ndcg_at_10 value: 80.741 - type: ndcg_at_100 value: 82.123 - type: ndcg_at_1000 value: 82.32300000000001 - type: ndcg_at_3 value: 77.35600000000001 - type: ndcg_at_5 value: 79.274 - type: precision_at_1 value: 70.616 - type: precision_at_10 value: 9.696 - type: precision_at_100 value: 1.038 - type: precision_at_1000 value: 0.106 - type: precision_at_3 value: 29.026000000000003 - type: precision_at_5 value: 18.433 - type: recall_at_1 value: 68.32300000000001 - type: recall_at_10 value: 91.186 - type: recall_at_100 value: 97.439 - type: recall_at_1000 value: 99.004 - type: recall_at_3 value: 82.218 - type: recall_at_5 value: 86.797 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 21.496000000000002 - type: map_at_10 value: 33.82 - type: map_at_100 value: 35.013 - type: map_at_1000 value: 35.063 - type: map_at_3 value: 29.910999999999998 - type: map_at_5 value: 32.086 - type: mrr_at_1 value: 22.092 - type: mrr_at_10 value: 34.404 - type: mrr_at_100 value: 35.534 - type: mrr_at_1000 value: 35.577999999999996 - type: mrr_at_3 value: 30.544 - type: mrr_at_5 value: 32.711 - type: ndcg_at_1 value: 22.092 - type: ndcg_at_10 value: 40.877 - type: ndcg_at_100 value: 46.619 - type: ndcg_at_1000 value: 47.823 - type: ndcg_at_3 value: 32.861000000000004 - type: ndcg_at_5 value: 36.769 - type: precision_at_1 value: 22.092 - type: precision_at_10 value: 6.54 - type: precision_at_100 value: 0.943 - type: precision_at_1000 value: 0.105 - type: precision_at_3 value: 14.069 - type: precision_at_5 value: 10.424 - type: recall_at_1 value: 21.496000000000002 - type: recall_at_10 value: 62.67 - type: recall_at_100 value: 89.24499999999999 - type: recall_at_1000 value: 98.312 - type: recall_at_3 value: 40.796 - type: recall_at_5 value: 50.21600000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 95.74555403556772 - type: f1 value: 95.61381879323093 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 85.82763337893297 - type: f1 value: 63.17139719465236 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 78.51714862138535 - type: f1 value: 76.3995118440293 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (zh-CN) type: mteb/amazon_massive_intent config: zh-CN split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 74.78143913920646 - type: f1 value: 72.6141122227626 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 80.03698722259583 - type: f1 value: 79.36511484240766 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (zh-CN) type: mteb/amazon_massive_scenario config: zh-CN split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 76.98722259583053 - type: f1 value: 76.5974920207624 - task: type: Retrieval dataset: name: MTEB MedicalRetrieval type: C-MTEB/MedicalRetrieval config: default split: dev revision: 2039188fb5800a9803ba5048df7b76e6fb151fc6 metrics: - type: map_at_1 value: 51.800000000000004 - type: map_at_10 value: 57.938 - type: map_at_100 value: 58.494 - type: map_at_1000 value: 58.541 - type: map_at_3 value: 56.617 - type: map_at_5 value: 57.302 - type: mrr_at_1 value: 51.800000000000004 - type: mrr_at_10 value: 57.938 - type: mrr_at_100 value: 58.494 - type: mrr_at_1000 value: 58.541 - type: mrr_at_3 value: 56.617 - type: mrr_at_5 value: 57.302 - type: ndcg_at_1 value: 51.800000000000004 - type: ndcg_at_10 value: 60.891 - type: ndcg_at_100 value: 63.897000000000006 - type: ndcg_at_1000 value: 65.231 - type: ndcg_at_3 value: 58.108000000000004 - type: ndcg_at_5 value: 59.343 - type: precision_at_1 value: 51.800000000000004 - type: precision_at_10 value: 7.02 - type: precision_at_100 value: 0.8500000000000001 - type: precision_at_1000 value: 0.096 - type: precision_at_3 value: 20.8 - type: precision_at_5 value: 13.08 - type: recall_at_1 value: 51.800000000000004 - type: recall_at_10 value: 70.19999999999999 - type: recall_at_100 value: 85.0 - type: recall_at_1000 value: 95.7 - type: recall_at_3 value: 62.4 - type: recall_at_5 value: 65.4 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 38.68901889835701 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 38.0740589898848 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 33.41312482460189 - type: mrr value: 34.713530863302495 - task: type: Classification dataset: name: MTEB MultilingualSentiment type: C-MTEB/MultilingualSentiment-classification config: default split: validation revision: 46958b007a63fdbf239b7672c25d0bea67b5ea1a metrics: - type: accuracy value: 80.39333333333335 - type: f1 value: 80.42683132366277 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 6.232 - type: map_at_10 value: 13.442000000000002 - type: map_at_100 value: 17.443 - type: map_at_1000 value: 19.1 - type: map_at_3 value: 9.794 - type: map_at_5 value: 11.375 - type: mrr_at_1 value: 50.15500000000001 - type: mrr_at_10 value: 58.628 - type: mrr_at_100 value: 59.077 - type: mrr_at_1000 value: 59.119 - type: mrr_at_3 value: 56.914 - type: mrr_at_5 value: 57.921 - type: ndcg_at_1 value: 48.762 - type: ndcg_at_10 value: 37.203 - type: ndcg_at_100 value: 34.556 - type: ndcg_at_1000 value: 43.601 - type: ndcg_at_3 value: 43.004 - type: ndcg_at_5 value: 40.181 - type: precision_at_1 value: 50.15500000000001 - type: precision_at_10 value: 27.276 - type: precision_at_100 value: 8.981 - type: precision_at_1000 value: 2.228 - type: precision_at_3 value: 39.628 - type: precision_at_5 value: 33.808 - type: recall_at_1 value: 6.232 - type: recall_at_10 value: 18.137 - type: recall_at_100 value: 36.101 - type: recall_at_1000 value: 68.733 - type: recall_at_3 value: 10.978 - type: recall_at_5 value: 13.718 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 35.545 - type: map_at_10 value: 52.083 - type: map_at_100 value: 52.954 - type: map_at_1000 value: 52.96999999999999 - type: map_at_3 value: 47.508 - type: map_at_5 value: 50.265 - type: mrr_at_1 value: 40.122 - type: mrr_at_10 value: 54.567 - type: mrr_at_100 value: 55.19199999999999 - type: mrr_at_1000 value: 55.204 - type: mrr_at_3 value: 51.043000000000006 - type: mrr_at_5 value: 53.233 - type: ndcg_at_1 value: 40.122 - type: ndcg_at_10 value: 60.012 - type: ndcg_at_100 value: 63.562 - type: ndcg_at_1000 value: 63.94 - type: ndcg_at_3 value: 51.681 - type: ndcg_at_5 value: 56.154 - type: precision_at_1 value: 40.122 - type: precision_at_10 value: 9.774 - type: precision_at_100 value: 1.176 - type: precision_at_1000 value: 0.121 - type: precision_at_3 value: 23.426 - type: precision_at_5 value: 16.686 - type: recall_at_1 value: 35.545 - type: recall_at_10 value: 81.557 - type: recall_at_100 value: 96.729 - type: recall_at_1000 value: 99.541 - type: recall_at_3 value: 60.185 - type: recall_at_5 value: 70.411 - task: type: PairClassification dataset: name: MTEB Ocnli type: C-MTEB/OCNLI config: default split: validation revision: 66e76a618a34d6d565d5538088562851e6daa7ec metrics: - type: cos_sim_accuracy value: 70.7634001082837 - type: cos_sim_ap value: 74.97527385556558 - type: cos_sim_f1 value: 72.77277277277277 - type: cos_sim_precision value: 69.17221693625119 - type: cos_sim_recall value: 76.76874340021119 - type: dot_accuracy value: 70.7634001082837 - type: dot_ap value: 74.97527385556558 - type: dot_f1 value: 72.77277277277277 - type: dot_precision value: 69.17221693625119 - type: dot_recall value: 76.76874340021119 - type: euclidean_accuracy value: 70.7634001082837 - type: euclidean_ap value: 74.97527385556558 - type: euclidean_f1 value: 72.77277277277277 - type: euclidean_precision value: 69.17221693625119 - type: euclidean_recall value: 76.76874340021119 - type: manhattan_accuracy value: 69.89713048186248 - type: manhattan_ap value: 74.25943370061067 - type: manhattan_f1 value: 72.17268887846082 - type: manhattan_precision value: 64.94932432432432 - type: manhattan_recall value: 81.20380147835269 - type: max_accuracy value: 70.7634001082837 - type: max_ap value: 74.97527385556558 - type: max_f1 value: 72.77277277277277 - task: type: Classification dataset: name: MTEB OnlineShopping type: C-MTEB/OnlineShopping-classification config: default split: test revision: e610f2ebd179a8fda30ae534c3878750a96db120 metrics: - type: accuracy value: 92.92000000000002 - type: ap value: 91.98475625106201 - type: f1 value: 92.91841470541901 - task: type: STS dataset: name: MTEB PAWSX type: C-MTEB/PAWSX config: default split: test revision: 9c6a90e430ac22b5779fb019a23e820b11a8b5e1 metrics: - type: cos_sim_pearson value: 41.23764415526825 - type: cos_sim_spearman value: 46.872669471694664 - type: euclidean_pearson value: 46.434144530918566 - type: euclidean_spearman value: 46.872669471694664 - type: manhattan_pearson value: 46.39678126910133 - type: manhattan_spearman value: 46.55877754642116 - task: type: STS dataset: name: MTEB QBQTC type: C-MTEB/QBQTC config: default split: test revision: 790b0510dc52b1553e8c49f3d2afb48c0e5c48b7 metrics: - type: cos_sim_pearson value: 28.77503601696299 - type: cos_sim_spearman value: 31.818095557325606 - type: euclidean_pearson value: 29.811479220397125 - type: euclidean_spearman value: 31.817046821577673 - type: manhattan_pearson value: 29.901628633314214 - type: manhattan_spearman value: 31.991472038092084 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 68.908 - type: map_at_10 value: 83.19 - type: map_at_100 value: 83.842 - type: map_at_1000 value: 83.858 - type: map_at_3 value: 80.167 - type: map_at_5 value: 82.053 - type: mrr_at_1 value: 79.46 - type: mrr_at_10 value: 86.256 - type: mrr_at_100 value: 86.37 - type: mrr_at_1000 value: 86.371 - type: mrr_at_3 value: 85.177 - type: mrr_at_5 value: 85.908 - type: ndcg_at_1 value: 79.5 - type: ndcg_at_10 value: 87.244 - type: ndcg_at_100 value: 88.532 - type: ndcg_at_1000 value: 88.626 - type: ndcg_at_3 value: 84.161 - type: ndcg_at_5 value: 85.835 - type: precision_at_1 value: 79.5 - type: precision_at_10 value: 13.339 - type: precision_at_100 value: 1.53 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 36.97 - type: precision_at_5 value: 24.384 - type: recall_at_1 value: 68.908 - type: recall_at_10 value: 95.179 - type: recall_at_100 value: 99.579 - type: recall_at_1000 value: 99.964 - type: recall_at_3 value: 86.424 - type: recall_at_5 value: 91.065 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 65.17897847862794 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 66.22194961632586 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.668 - type: map_at_10 value: 13.921 - type: map_at_100 value: 16.391 - type: map_at_1000 value: 16.749 - type: map_at_3 value: 10.001999999999999 - type: map_at_5 value: 11.974 - type: mrr_at_1 value: 27.800000000000004 - type: mrr_at_10 value: 39.290000000000006 - type: mrr_at_100 value: 40.313 - type: mrr_at_1000 value: 40.355999999999995 - type: mrr_at_3 value: 35.667 - type: mrr_at_5 value: 37.742 - type: ndcg_at_1 value: 27.800000000000004 - type: ndcg_at_10 value: 23.172 - type: ndcg_at_100 value: 32.307 - type: ndcg_at_1000 value: 38.048 - type: ndcg_at_3 value: 22.043 - type: ndcg_at_5 value: 19.287000000000003 - type: precision_at_1 value: 27.800000000000004 - type: precision_at_10 value: 11.95 - type: precision_at_100 value: 2.5260000000000002 - type: precision_at_1000 value: 0.38999999999999996 - type: precision_at_3 value: 20.433 - type: precision_at_5 value: 16.84 - type: recall_at_1 value: 5.668 - type: recall_at_10 value: 24.22 - type: recall_at_100 value: 51.217 - type: recall_at_1000 value: 79.10000000000001 - type: recall_at_3 value: 12.443 - type: recall_at_5 value: 17.068 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.83535239748218 - type: cos_sim_spearman value: 73.98553311584509 - type: euclidean_pearson value: 79.57336200069007 - type: euclidean_spearman value: 73.98553926018461 - type: manhattan_pearson value: 79.02277757114132 - type: manhattan_spearman value: 73.52350678760683 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 81.99055838690317 - type: cos_sim_spearman value: 72.05290668592296 - type: euclidean_pearson value: 81.7130610313565 - type: euclidean_spearman value: 72.0529066787229 - type: manhattan_pearson value: 82.09213883730894 - type: manhattan_spearman value: 72.5171577483134 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.4685161191763 - type: cos_sim_spearman value: 84.4847436140129 - type: euclidean_pearson value: 84.05016757016948 - type: euclidean_spearman value: 84.48474353891532 - type: manhattan_pearson value: 83.83064062713048 - type: manhattan_spearman value: 84.30431591842805 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.00171021092486 - type: cos_sim_spearman value: 77.91329577609622 - type: euclidean_pearson value: 81.49758593915315 - type: euclidean_spearman value: 77.91329577609622 - type: manhattan_pearson value: 81.23255996803785 - type: manhattan_spearman value: 77.80027024941825 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.62608607472492 - type: cos_sim_spearman value: 87.62293916855751 - type: euclidean_pearson value: 87.04313886714989 - type: euclidean_spearman value: 87.62293907119869 - type: manhattan_pearson value: 86.97266321040769 - type: manhattan_spearman value: 87.61807042381702 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 80.8012095789289 - type: cos_sim_spearman value: 81.91868918081325 - type: euclidean_pearson value: 81.2267973811213 - type: euclidean_spearman value: 81.91868918081325 - type: manhattan_pearson value: 81.0173457901168 - type: manhattan_spearman value: 81.79743115887055 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 88.39698537303725 - type: cos_sim_spearman value: 88.78668529808967 - type: euclidean_pearson value: 88.78863351718252 - type: euclidean_spearman value: 88.78668529808967 - type: manhattan_pearson value: 88.41678215762478 - type: manhattan_spearman value: 88.3827998418763 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 68.49024974161408 - type: cos_sim_spearman value: 69.19917146180619 - type: euclidean_pearson value: 70.48882819806336 - type: euclidean_spearman value: 69.19917146180619 - type: manhattan_pearson value: 70.86827961779932 - type: manhattan_spearman value: 69.38456983992613 - task: type: STS dataset: name: MTEB STS22 (zh) type: mteb/sts22-crosslingual-sts config: zh split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 67.41628669863584 - type: cos_sim_spearman value: 67.87238206703478 - type: euclidean_pearson value: 67.67834985311778 - type: euclidean_spearman value: 67.87238206703478 - type: manhattan_pearson value: 68.23423896742973 - type: manhattan_spearman value: 68.27069260687092 - task: type: STS dataset: name: MTEB STSB type: C-MTEB/STSB config: default split: test revision: 0cde68302b3541bb8b3c340dc0644b0b745b3dc0 metrics: - type: cos_sim_pearson value: 77.31628954400037 - type: cos_sim_spearman value: 76.83296022489624 - type: euclidean_pearson value: 76.69680425261211 - type: euclidean_spearman value: 76.83287843321102 - type: manhattan_pearson value: 76.65603163327958 - type: manhattan_spearman value: 76.80803503360451 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 84.31376078795105 - type: cos_sim_spearman value: 83.3985199217591 - type: euclidean_pearson value: 84.06630133719332 - type: euclidean_spearman value: 83.3985199217591 - type: manhattan_pearson value: 83.7896654474364 - type: manhattan_spearman value: 83.1885039212299 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.83161002188668 - type: mrr value: 96.19253114351153 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 48.132999999999996 - type: map_at_10 value: 58.541 - type: map_at_100 value: 59.34 - type: map_at_1000 value: 59.367999999999995 - type: map_at_3 value: 55.191 - type: map_at_5 value: 57.084 - type: mrr_at_1 value: 51.0 - type: mrr_at_10 value: 59.858 - type: mrr_at_100 value: 60.474000000000004 - type: mrr_at_1000 value: 60.501000000000005 - type: mrr_at_3 value: 57.111000000000004 - type: mrr_at_5 value: 58.694 - type: ndcg_at_1 value: 51.0 - type: ndcg_at_10 value: 63.817 - type: ndcg_at_100 value: 67.229 - type: ndcg_at_1000 value: 67.94 - type: ndcg_at_3 value: 57.896 - type: ndcg_at_5 value: 60.785999999999994 - type: precision_at_1 value: 51.0 - type: precision_at_10 value: 8.933 - type: precision_at_100 value: 1.0699999999999998 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 23.111 - type: precision_at_5 value: 15.733 - type: recall_at_1 value: 48.132999999999996 - type: recall_at_10 value: 78.922 - type: recall_at_100 value: 94.167 - type: recall_at_1000 value: 99.667 - type: recall_at_3 value: 62.806 - type: recall_at_5 value: 70.078 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.88415841584158 - type: cos_sim_ap value: 97.72557886493401 - type: cos_sim_f1 value: 94.1294530858003 - type: cos_sim_precision value: 94.46122860020141 - type: cos_sim_recall value: 93.8 - type: dot_accuracy value: 99.88415841584158 - type: dot_ap value: 97.72557439066108 - type: dot_f1 value: 94.1294530858003 - type: dot_precision value: 94.46122860020141 - type: dot_recall value: 93.8 - type: euclidean_accuracy value: 99.88415841584158 - type: euclidean_ap value: 97.72557439066108 - type: euclidean_f1 value: 94.1294530858003 - type: euclidean_precision value: 94.46122860020141 - type: euclidean_recall value: 93.8 - type: manhattan_accuracy value: 99.88514851485148 - type: manhattan_ap value: 97.73324334051959 - type: manhattan_f1 value: 94.1825476429288 - type: manhattan_precision value: 94.46680080482898 - type: manhattan_recall value: 93.89999999999999 - type: max_accuracy value: 99.88514851485148 - type: max_ap value: 97.73324334051959 - type: max_f1 value: 94.1825476429288 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 72.8168026381278 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 44.30948635130784 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 54.11268548719803 - type: mrr value: 55.08079747050335 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.82885852096243 - type: cos_sim_spearman value: 30.800770979226076 - type: dot_pearson value: 30.82885608827704 - type: dot_spearman value: 30.800770979226076 - task: type: Reranking dataset: name: MTEB T2Reranking type: C-MTEB/T2Reranking config: default split: dev revision: 76631901a18387f85eaa53e5450019b87ad58ef9 metrics: - type: map value: 66.73038448968596 - type: mrr value: 77.26510193334836 - task: type: Retrieval dataset: name: MTEB T2Retrieval type: C-MTEB/T2Retrieval config: default split: dev revision: 8731a845f1bf500a4f111cf1070785c793d10e64 metrics: - type: map_at_1 value: 28.157 - type: map_at_10 value: 79.00399999999999 - type: map_at_100 value: 82.51899999999999 - type: map_at_1000 value: 82.577 - type: map_at_3 value: 55.614 - type: map_at_5 value: 68.292 - type: mrr_at_1 value: 91.167 - type: mrr_at_10 value: 93.391 - type: mrr_at_100 value: 93.467 - type: mrr_at_1000 value: 93.47 - type: mrr_at_3 value: 93.001 - type: mrr_at_5 value: 93.254 - type: ndcg_at_1 value: 91.167 - type: ndcg_at_10 value: 86.155 - type: ndcg_at_100 value: 89.425 - type: ndcg_at_1000 value: 89.983 - type: ndcg_at_3 value: 87.516 - type: ndcg_at_5 value: 86.148 - type: precision_at_1 value: 91.167 - type: precision_at_10 value: 42.697 - type: precision_at_100 value: 5.032 - type: precision_at_1000 value: 0.516 - type: precision_at_3 value: 76.45100000000001 - type: precision_at_5 value: 64.051 - type: recall_at_1 value: 28.157 - type: recall_at_10 value: 84.974 - type: recall_at_100 value: 95.759 - type: recall_at_1000 value: 98.583 - type: recall_at_3 value: 57.102 - type: recall_at_5 value: 71.383 - task: type: Classification dataset: name: MTEB TNews type: C-MTEB/TNews-classification config: default split: validation revision: 317f262bf1e6126357bbe89e875451e4b0938fe4 metrics: - type: accuracy value: 55.031 - type: f1 value: 53.07992810732314 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.20400000000000001 - type: map_at_10 value: 1.27 - type: map_at_100 value: 7.993 - type: map_at_1000 value: 20.934 - type: map_at_3 value: 0.469 - type: map_at_5 value: 0.716 - type: mrr_at_1 value: 76.0 - type: mrr_at_10 value: 84.967 - type: mrr_at_100 value: 84.967 - type: mrr_at_1000 value: 84.967 - type: mrr_at_3 value: 83.667 - type: mrr_at_5 value: 84.967 - type: ndcg_at_1 value: 69.0 - type: ndcg_at_10 value: 59.243 - type: ndcg_at_100 value: 48.784 - type: ndcg_at_1000 value: 46.966 - type: ndcg_at_3 value: 64.14 - type: ndcg_at_5 value: 61.60600000000001 - type: precision_at_1 value: 76.0 - type: precision_at_10 value: 62.6 - type: precision_at_100 value: 50.18 - type: precision_at_1000 value: 21.026 - type: precision_at_3 value: 68.667 - type: precision_at_5 value: 66.0 - type: recall_at_1 value: 0.20400000000000001 - type: recall_at_10 value: 1.582 - type: recall_at_100 value: 11.988 - type: recall_at_1000 value: 44.994 - type: recall_at_3 value: 0.515 - type: recall_at_5 value: 0.844 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringP2P type: C-MTEB/ThuNewsClusteringP2P config: default split: test revision: 5798586b105c0434e4f0fe5e767abe619442cf93 metrics: - type: v_measure value: 72.80915114296552 - task: type: Clustering dataset: name: MTEB ThuNewsClusteringS2S type: C-MTEB/ThuNewsClusteringS2S config: default split: test revision: 8a8b2caeda43f39e13c4bc5bea0f8a667896e10d metrics: - type: v_measure value: 70.86374654127641 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.3009999999999997 - type: map_at_10 value: 11.566 - type: map_at_100 value: 17.645 - type: map_at_1000 value: 19.206 - type: map_at_3 value: 6.986000000000001 - type: map_at_5 value: 8.716 - type: mrr_at_1 value: 42.857 - type: mrr_at_10 value: 58.287 - type: mrr_at_100 value: 59.111000000000004 - type: mrr_at_1000 value: 59.111000000000004 - type: mrr_at_3 value: 55.102 - type: mrr_at_5 value: 57.449 - type: ndcg_at_1 value: 39.796 - type: ndcg_at_10 value: 29.059 - type: ndcg_at_100 value: 40.629 - type: ndcg_at_1000 value: 51.446000000000005 - type: ndcg_at_3 value: 36.254999999999995 - type: ndcg_at_5 value: 32.216 - type: precision_at_1 value: 42.857 - type: precision_at_10 value: 23.469 - type: precision_at_100 value: 8.041 - type: precision_at_1000 value: 1.551 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 30.203999999999997 - type: recall_at_1 value: 3.3009999999999997 - type: recall_at_10 value: 17.267 - type: recall_at_100 value: 49.36 - type: recall_at_1000 value: 83.673 - type: recall_at_3 value: 8.049000000000001 - type: recall_at_5 value: 11.379999999999999 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 88.7576 - type: ap value: 35.52110634325751 - type: f1 value: 74.14476947482417 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 73.52009054895304 - type: f1 value: 73.81407409876577 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 54.35358706465052 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 83.65619598259522 - type: cos_sim_ap value: 65.824087818991 - type: cos_sim_f1 value: 61.952620244077536 - type: cos_sim_precision value: 56.676882661996494 - type: cos_sim_recall value: 68.311345646438 - type: dot_accuracy value: 83.65619598259522 - type: dot_ap value: 65.82406256999921 - type: dot_f1 value: 61.952620244077536 - type: dot_precision value: 56.676882661996494 - type: dot_recall value: 68.311345646438 - type: euclidean_accuracy value: 83.65619598259522 - type: euclidean_ap value: 65.82409143427542 - type: euclidean_f1 value: 61.952620244077536 - type: euclidean_precision value: 56.676882661996494 - type: euclidean_recall value: 68.311345646438 - type: manhattan_accuracy value: 83.4296954163438 - type: manhattan_ap value: 65.20662449614932 - type: manhattan_f1 value: 61.352885525070946 - type: manhattan_precision value: 55.59365623660523 - type: manhattan_recall value: 68.44327176781002 - type: max_accuracy value: 83.65619598259522 - type: max_ap value: 65.82409143427542 - type: max_f1 value: 61.952620244077536 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 87.90119144642372 - type: cos_sim_ap value: 84.04753852793387 - type: cos_sim_f1 value: 76.27737226277372 - type: cos_sim_precision value: 73.86757068667052 - type: cos_sim_recall value: 78.84970742223591 - type: dot_accuracy value: 87.90119144642372 - type: dot_ap value: 84.04753668117337 - type: dot_f1 value: 76.27737226277372 - type: dot_precision value: 73.86757068667052 - type: dot_recall value: 78.84970742223591 - type: euclidean_accuracy value: 87.90119144642372 - type: euclidean_ap value: 84.04754553468206 - type: euclidean_f1 value: 76.27737226277372 - type: euclidean_precision value: 73.86757068667052 - type: euclidean_recall value: 78.84970742223591 - type: manhattan_accuracy value: 87.87014398261343 - type: manhattan_ap value: 84.05164646221583 - type: manhattan_f1 value: 76.31392706820128 - type: manhattan_precision value: 73.91586694566708 - type: manhattan_recall value: 78.87280566676932 - type: max_accuracy value: 87.90119144642372 - type: max_ap value: 84.05164646221583 - type: max_f1 value: 76.31392706820128 - task: type: Retrieval dataset: name: MTEB VideoRetrieval type: C-MTEB/VideoRetrieval config: default split: dev revision: 58c2597a5943a2ba48f4668c3b90d796283c5639 metrics: - type: map_at_1 value: 63.6 - type: map_at_10 value: 72.673 - type: map_at_100 value: 73.05199999999999 - type: map_at_1000 value: 73.057 - type: map_at_3 value: 70.833 - type: map_at_5 value: 72.05799999999999 - type: mrr_at_1 value: 63.6 - type: mrr_at_10 value: 72.673 - type: mrr_at_100 value: 73.05199999999999 - type: mrr_at_1000 value: 73.057 - type: mrr_at_3 value: 70.833 - type: mrr_at_5 value: 72.05799999999999 - type: ndcg_at_1 value: 63.6 - type: ndcg_at_10 value: 76.776 - type: ndcg_at_100 value: 78.52900000000001 - type: ndcg_at_1000 value: 78.696 - type: ndcg_at_3 value: 73.093 - type: ndcg_at_5 value: 75.288 - type: precision_at_1 value: 63.6 - type: precision_at_10 value: 8.95 - type: precision_at_100 value: 0.975 - type: precision_at_1000 value: 0.099 - type: precision_at_3 value: 26.533 - type: precision_at_5 value: 16.98 - type: recall_at_1 value: 63.6 - type: recall_at_10 value: 89.5 - type: recall_at_100 value: 97.5 - type: recall_at_1000 value: 98.9 - type: recall_at_3 value: 79.60000000000001 - type: recall_at_5 value: 84.89999999999999 - task: type: Classification dataset: name: MTEB Waimai type: C-MTEB/waimai-classification config: default split: test revision: 339287def212450dcaa9df8c22bf93e9980c7023 metrics: - type: accuracy value: 89.39999999999999 - type: ap value: 75.52087544076016 - type: f1 value: 87.7629629899278 --- <p align="center"> <img src="images/gme_logo.png" alt="GME Logo" style="width: 100%; max-width: 450px;"> </p> <p align="center"><b>GME: General Multimodal Embedding</b></p> ## GME-Qwen2-VL-2B We are excited to present `GME-Qwen2VL` series of unified **multimodal embedding models**, which are based on the advanced [Qwen2-VL](https://huggingface.co/collections/Qwen/qwen2-vl-66cee7455501d7126940800d) multimodal large language models (MLLMs). The `GME` models support three types of input: **text**, **image**, and **image-text pair**, all of which can produce universal vector representations and have powerful retrieval performance. **Key Enhancements of GME Models**: - **Unified Multimodal Representation**: GME models can process both single-modal and combined-modal inputs, resulting in a unified vector representation. This enables versatile retrieval scenarios (Any2Any Search), supporting tasks such as text retrieval, image retrieval from text, and image-to-image searches. - **High Performance**: Achieves state-of-the-art (SOTA) results in our universal multimodal retrieval benchmark (**UMRB**) and demonstrate strong evaluation scores in the Multimodal Textual Evaluation Benchmark (**MTEB**). - **Dynamic Image Resolution**: Benefiting from `Qwen2-VL` and our training data, GME models support dynamic resolution image input. - **Strong Visual Retrieval Performance**: Enhanced by the Qwen2-VL model series, our models excel in visual document retrieval tasks that require a nuanced understanding of document screenshots. This capability is particularly beneficial for complex document understanding scenarios, such as multimodal retrieval-augmented generation (RAG) applications focused on academic papers. **Developed by**: Tongyi Lab, Alibaba Group **Paper**: [GME: Improving Universal Multimodal Retrieval by Multimodal LLMs](http://arxiv.org/abs/2412.16855) ## Model List | Models | Model Size | Max Seq. Length | Dimension | MTEB-en| MTEB-zh | UMRB | |:-----: | :-----: |:-----: |:-----: |:-----: | :-----: | :-----: | |[`gme-Qwen2-VL-2B`](https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-2B-Instruct) | 2.21B | 32768 | 1536 | 65.27 | 66.92 | 64.45 | |[`gme-Qwen2-VL-7B`](https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-7B-Instruct) | 8.29B | 32768 | 3584 | 67.48 | 69.73 | 67.44 | ## Usage **Use with custom code** ```python # You can find the script gme_inference.py in https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-2B-Instruct/blob/main/gme_inference.py from gme_inference import GmeQwen2VL texts = [ "What kind of car is this?", "The Tesla Cybertruck is a battery electric pickup truck built by Tesla, Inc. since 2023." ] images = [ 'https://en.wikipedia.org/wiki/File:Tesla_Cybertruck_damaged_window.jpg', 'https://en.wikipedia.org/wiki/File:2024_Tesla_Cybertruck_Foundation_Series,_front_left_(Greenwich).jpg', ] gme = GmeQwen2VL("Alibaba-NLP/gme-Qwen2-VL-2B-Instruct") # Single-modal embedding e_text = gme.get_text_embeddings(texts=texts) e_image = gme.get_image_embeddings(images=images) print((e_text * e_image).sum(-1)) ## tensor([0.2281, 0.6001], dtype=torch.float16) # How to set embedding instruction e_query = gme.get_text_embeddings(texts=texts, instruction='Find an image that matches the given text.') # If is_query=False, we always use the default instruction. e_corpus = gme.get_image_embeddings(images=images, is_query=False) print((e_query * e_corpus).sum(-1)) ## tensor([0.2433, 0.7051], dtype=torch.float16) # Fused-modal embedding e_fused = gme.get_fused_embeddings(texts=texts, images=images) print((e_fused[0] * e_fused[1]).sum()) ## tensor(0.6108, dtype=torch.float16) ``` ## Evaluation We validated the performance on our universal multimodal retrieval benchmark (**UMRB**) among others. | | | Single-modal | | Cross-modal | | | Fused-modal | | | | Avg. | |--------------------|------|:------------:|:---------:|:-----------:|:-----------:|:---------:|:-----------:|:----------:|:----------:|:-----------:|:----------:| | | | T→T (16) | I→I (1) | T→I (4) | T→VD (10) | I→T (4) | T→IT (2) | IT→T (5) | IT→I (2) | IT→IT (3) | (47) | | VISTA | 0.2B | 55.15 | **31.98** | 32.88 | 10.12 | 31.23 | 45.81 | 53.32 | 8.97 | 26.26 | 37.32 | | CLIP-SF | 0.4B | 39.75 | 31.42 | 59.05 | 24.09 | 62.95 | 66.41 | 53.32 | 34.9 | 55.65 | 43.66 | | One-Peace | 4B | 43.54 | 31.27 | 61.38 | 42.9 | 65.59 | 42.72 | 28.29 | 6.73 | 23.41 | 42.01 | | DSE | 4.2B | 48.94 | 27.92 | 40.75 | 78.21 | 52.54 | 49.62 | 35.44 | 8.36 | 40.18 | 50.04 | | E5-V | 8.4B | 52.41 | 27.36 | 46.56 | 41.22 | 47.95 | 54.13 | 32.9 | 23.17 | 7.23 | 42.52 | | **[GME-Qwen2-VL-2B](https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-2B-Instruct)** | 2.2B | 55.93 | 29.86 | 57.36 | 87.84 | 61.93 | 76.47 | 64.58 | 37.02 | 66.47 | 64.45 | | **[GME-Qwen2-VL-7B](https://huggingface.co/Alibaba-NLP/gme-Qwen2-VL-7B-Instruct)** | 8.3B | **58.19** | 31.89 | **61.35** | **89.92** | **65.83** | **80.94** | **66.18** | **42.56** | **73.62** | **67.44** | The [MTEB Leaderboard](https://huggingface.co/spaces/mteb/leaderboard) English tab shows the text embeddings performence of our model. **More detailed experimental results can be found in the [paper](http://arxiv.org/abs/2412.16855)**. ## Community support ### Fine-tuning GME models can be fine-tuned by SWIFT: ```shell pip install ms-swift -U ``` ```shell # MAX_PIXELS settings to reduce memory usage # check: https://swift.readthedocs.io/en/latest/BestPractices/Embedding.html nproc_per_node=8 MAX_PIXELS=1003520 \ USE_HF=1 \ NPROC_PER_NODE=$nproc_per_node \ swift sft \ --model Alibaba-NLP/gme-Qwen2-VL-2B-Instruct \ --train_type lora \ --dataset 'HuggingFaceM4/TextCaps:emb' \ --torch_dtype bfloat16 \ --num_train_epochs 1 \ --per_device_train_batch_size 2 \ --per_device_eval_batch_size 2 \ --gradient_accumulation_steps $(expr 64 / $nproc_per_node) \ --eval_steps 100 \ --save_steps 100 \ --eval_strategy steps \ --save_total_limit 5 \ --logging_steps 5 \ --output_dir output \ --lazy_tokenize true \ --warmup_ratio 0.05 \ --learning_rate 5e-6 \ --deepspeed zero3 \ --dataloader_num_workers 4 \ --task_type embedding \ --loss_type infonce \ --dataloader_drop_last true ``` ## Limitations - **Single Image Input**: In `Qwen2-VL`, an image could be converted into a very large number of visual tokens. We limit the number of visual tokens to 1024 to obtain a good training efficiency. Due to the lack of relevant data, our models and evaluations retain one single image. - **English-only Training**: Our models are trained on english data only. Although the `Qwen2-VL` models are multilingual, the multilingual-multimodal embedding performance are not guaranteed. We will extend to multi-image input, image-text interleaved data as well as multilingual data in the future version. ## Redistribution and Use We encourage and value diverse applications of GME models and continuous enhancements to the models themselves. - If you distribute or make GME models (or any derivative works) available, or if you create a product or service (including another AI model) that incorporates them, you must prominently display `Built with GME` on your website, user interface, blog post, About page, or product documentation. - If you utilize GME models or their outputs to develop, train, fine-tune, or improve an AI model that is distributed or made available, you must prefix the name of any such AI model with `GME`. ## Cloud API Services In addition to the open-source [GME](https://huggingface.co/collections/Alibaba-NLP/gme-models-67667e092da3491f630964d6) series models, GME series models are also available as commercial API services on Alibaba Cloud. - [MultiModal Embedding Models](https://help.aliyun.com/zh/model-studio/developer-reference/multimodal-embedding-api-reference?spm=a2c4g.11186623.0.0.321c1d1cqmoJ5C): The `multimodal-embedding-v1` model service is available. Note that the models behind the commercial APIs are not entirely identical to the open-source models. ## Hiring We have open positions for Research Interns and Full-Time Researchers to join our team at Tongyi Lab. We are seeking passionate individuals with expertise in representation learning, LLM-driven information retrieval, Retrieval-Augmented Generation (RAG), and agent-based systems. Our team is located in the vibrant cities of Beijing and Hangzhou, offering a collaborative and dynamic work environment where you can contribute to cutting-edge advancements in artificial intelligence and machine learning. If you are driven by curiosity and eager to make a meaningful impact through your work, we would love to hear from you. Please submit your resume along with a brief introduction to <a href="mailto:[email protected]">[email protected]</a>. ## Citation If you find our paper or models helpful, please consider cite: ``` @misc{zhang2024gme, title={GME: Improving Universal Multimodal Retrieval by Multimodal LLMs}, author={Zhang, Xin and Zhang, Yanzhao and Xie, Wen and Li, Mingxin and Dai, Ziqi and Long, Dingkun and Xie, Pengjun and Zhang, Meishan and Li, Wenjie and Zhang, Min}, year={2024}, eprint={2412.16855}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={http://arxiv.org/abs/2412.16855}, } ```
[ "BIOSSES", "SCIFACT" ]
EleutherAI/pythia-1b
EleutherAI
text-generation
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "causal-lm", "pythia", "en", "dataset:the_pile", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2023-03-10T21:42:46Z"
2023-07-09T16:05:58+00:00
64,849
37
--- datasets: - the_pile language: - en license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-1B ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-1B for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-1B as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-1B has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-1B will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-1B to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-1B may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-1B. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-1B. ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "SCIQ" ]
Snowflake/snowflake-arctic-embed-m-v1.5
Snowflake
sentence-similarity
[ "sentence-transformers", "onnx", "safetensors", "gguf", "bert", "feature-extraction", "sentence-similarity", "mteb", "arctic", "snowflake-arctic-embed", "transformers.js", "arxiv:2412.04506", "arxiv:2407.18887", "arxiv:2405.05374", "arxiv:2205.13147", "license:apache-2.0", "model-index", "autotrain_compatible", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-07-03T18:46:29Z"
2024-12-13T22:45:35+00:00
63,055
57
--- license: apache-2.0 pipeline_tag: sentence-similarity tags: - sentence-transformers - feature-extraction - sentence-similarity - mteb - arctic - snowflake-arctic-embed - transformers.js model-index: - name: snowflake-arctic-embed-m-v1.5 results: - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: main_score value: 59.53000000000001 - type: map_at_1 value: 34.282000000000004 - type: map_at_10 value: 50.613 - type: map_at_100 value: 51.269 - type: map_at_1000 value: 51.271 - type: map_at_20 value: 51.158 - type: map_at_3 value: 45.626 - type: map_at_5 value: 48.638 - type: mrr_at_1 value: 34.92176386913229 - type: mrr_at_10 value: 50.856081645555406 - type: mrr_at_100 value: 51.510739437069034 - type: mrr_at_1000 value: 51.51299498830165 - type: mrr_at_20 value: 51.39987941081724 - type: mrr_at_3 value: 45.993361782835514 - type: mrr_at_5 value: 48.88098624940742 - type: nauc_map_at_1000_diff1 value: 10.628675774160785 - type: nauc_map_at_1000_max value: -10.11742589992339 - type: nauc_map_at_1000_std value: -18.29277379812427 - type: nauc_map_at_100_diff1 value: 10.63250240035489 - type: nauc_map_at_100_max value: -10.112078786734363 - type: nauc_map_at_100_std value: -18.288524872706834 - type: nauc_map_at_10_diff1 value: 10.476494913081712 - type: nauc_map_at_10_max value: -9.890937746734037 - type: nauc_map_at_10_std value: -18.279750514750443 - type: nauc_map_at_1_diff1 value: 14.549204048461151 - type: nauc_map_at_1_max value: -12.230560087701225 - type: nauc_map_at_1_std value: -19.469903650130362 - type: nauc_map_at_20_diff1 value: 10.586564571825674 - type: nauc_map_at_20_max value: -10.00292720526217 - type: nauc_map_at_20_std value: -18.258077347878064 - type: nauc_map_at_3_diff1 value: 10.378663968090372 - type: nauc_map_at_3_max value: -10.458896171786185 - type: nauc_map_at_3_std value: -18.38852760333766 - type: nauc_map_at_5_diff1 value: 10.235960275925581 - type: nauc_map_at_5_max value: -10.239496080409058 - type: nauc_map_at_5_std value: -18.817023479445886 - type: nauc_mrr_at_1000_diff1 value: 8.718212649575722 - type: nauc_mrr_at_1000_max value: -10.81022794038691 - type: nauc_mrr_at_1000_std value: -17.87669499555167 - type: nauc_mrr_at_100_diff1 value: 8.722174171165133 - type: nauc_mrr_at_100_max value: -10.804840985713525 - type: nauc_mrr_at_100_std value: -17.872487099359986 - type: nauc_mrr_at_10_diff1 value: 8.609421635870238 - type: nauc_mrr_at_10_max value: -10.568644717548432 - type: nauc_mrr_at_10_std value: -17.872968762635814 - type: nauc_mrr_at_1_diff1 value: 12.69590006263834 - type: nauc_mrr_at_1_max value: -12.082056561238321 - type: nauc_mrr_at_1_std value: -18.036424092186657 - type: nauc_mrr_at_20_diff1 value: 8.684842497970315 - type: nauc_mrr_at_20_max value: -10.691578914627286 - type: nauc_mrr_at_20_std value: -17.84350301434992 - type: nauc_mrr_at_3_diff1 value: 8.649761557556763 - type: nauc_mrr_at_3_max value: -11.104694428047496 - type: nauc_mrr_at_3_std value: -18.149917948370344 - type: nauc_mrr_at_5_diff1 value: 8.433489750038396 - type: nauc_mrr_at_5_max value: -10.917772454397436 - type: nauc_mrr_at_5_std value: -18.4094211134111 - type: nauc_ndcg_at_1000_diff1 value: 10.19041067807956 - type: nauc_ndcg_at_1000_max value: -9.54328201605796 - type: nauc_ndcg_at_1000_std value: -17.824620427456633 - type: nauc_ndcg_at_100_diff1 value: 10.289491087585963 - type: nauc_ndcg_at_100_max value: -9.357214331420337 - type: nauc_ndcg_at_100_std value: -17.657600653632873 - type: nauc_ndcg_at_10_diff1 value: 9.435530877596092 - type: nauc_ndcg_at_10_max value: -8.182581635383546 - type: nauc_ndcg_at_10_std value: -17.603156479980388 - type: nauc_ndcg_at_1_diff1 value: 14.549204048461151 - type: nauc_ndcg_at_1_max value: -12.230560087701225 - type: nauc_ndcg_at_1_std value: -19.469903650130362 - type: nauc_ndcg_at_20_diff1 value: 9.885227087275197 - type: nauc_ndcg_at_20_max value: -8.52362662391439 - type: nauc_ndcg_at_20_std value: -17.441705436231764 - type: nauc_ndcg_at_3_diff1 value: 9.22542769998547 - type: nauc_ndcg_at_3_max value: -9.903590564219288 - type: nauc_ndcg_at_3_std value: -18.357220221111593 - type: nauc_ndcg_at_5_diff1 value: 8.8756720745828 - type: nauc_ndcg_at_5_max value: -9.269764943861245 - type: nauc_ndcg_at_5_std value: -19.009229433187784 - type: nauc_precision_at_1000_diff1 value: 3.733355117431035 - type: nauc_precision_at_1000_max value: 3.9603571352517393 - type: nauc_precision_at_1000_std value: 70.07345061131439 - type: nauc_precision_at_100_diff1 value: 29.019032142462457 - type: nauc_precision_at_100_max value: 40.75153328286103 - type: nauc_precision_at_100_std value: 62.634249549126594 - type: nauc_precision_at_10_diff1 value: 2.5762677254910353 - type: nauc_precision_at_10_max value: 6.096298633773051 - type: nauc_precision_at_10_std value: -11.507400451348587 - type: nauc_precision_at_1_diff1 value: 14.549204048461151 - type: nauc_precision_at_1_max value: -12.230560087701225 - type: nauc_precision_at_1_std value: -19.469903650130362 - type: nauc_precision_at_20_diff1 value: 1.715540124567996 - type: nauc_precision_at_20_max value: 21.53546453945913 - type: nauc_precision_at_20_std value: 1.537961142195571 - type: nauc_precision_at_3_diff1 value: 5.701850652555737 - type: nauc_precision_at_3_max value: -8.180345365085552 - type: nauc_precision_at_3_std value: -18.37033750502482 - type: nauc_precision_at_5_diff1 value: 3.6053552181042843 - type: nauc_precision_at_5_max value: -5.207647070615612 - type: nauc_precision_at_5_std value: -19.89491085427258 - type: nauc_recall_at_1000_diff1 value: 3.733355117431255 - type: nauc_recall_at_1000_max value: 3.9603571352482194 - type: nauc_recall_at_1000_std value: 70.07345061131205 - type: nauc_recall_at_100_diff1 value: 29.01903214246288 - type: nauc_recall_at_100_max value: 40.7515332828621 - type: nauc_recall_at_100_std value: 62.63424954912607 - type: nauc_recall_at_10_diff1 value: 2.5762677254911988 - type: nauc_recall_at_10_max value: 6.0962986337729905 - type: nauc_recall_at_10_std value: -11.507400451348577 - type: nauc_recall_at_1_diff1 value: 14.549204048461151 - type: nauc_recall_at_1_max value: -12.230560087701225 - type: nauc_recall_at_1_std value: -19.469903650130362 - type: nauc_recall_at_20_diff1 value: 1.7155401245682675 - type: nauc_recall_at_20_max value: 21.535464539459632 - type: nauc_recall_at_20_std value: 1.5379611421957025 - type: nauc_recall_at_3_diff1 value: 5.7018506525557875 - type: nauc_recall_at_3_max value: -8.180345365085538 - type: nauc_recall_at_3_std value: -18.370337505024796 - type: nauc_recall_at_5_diff1 value: 3.6053552181043913 - type: nauc_recall_at_5_max value: -5.207647070615579 - type: nauc_recall_at_5_std value: -19.894910854272492 - type: ndcg_at_1 value: 34.282000000000004 - type: ndcg_at_10 value: 59.53000000000001 - type: ndcg_at_100 value: 62.187000000000005 - type: ndcg_at_1000 value: 62.243 - type: ndcg_at_20 value: 61.451 - type: ndcg_at_3 value: 49.393 - type: ndcg_at_5 value: 54.771 - type: precision_at_1 value: 34.282000000000004 - type: precision_at_10 value: 8.791 - type: precision_at_100 value: 0.992 - type: precision_at_1000 value: 0.1 - type: precision_at_20 value: 4.769 - type: precision_at_3 value: 20.104 - type: precision_at_5 value: 14.651 - type: recall_at_1 value: 34.282000000000004 - type: recall_at_10 value: 87.909 - type: recall_at_100 value: 99.21799999999999 - type: recall_at_1000 value: 99.644 - type: recall_at_20 value: 95.377 - type: recall_at_3 value: 60.313 - type: recall_at_5 value: 73.257 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: main_score value: 53.885000000000005 - type: map_at_1 value: 35.429 - type: map_at_10 value: 47.469 - type: map_at_100 value: 48.997 - type: map_at_1000 value: 49.117 - type: map_at_20 value: 48.324 - type: map_at_3 value: 43.835 - type: map_at_5 value: 46.043 - type: mrr_at_1 value: 43.34763948497854 - type: mrr_at_10 value: 53.258623430297234 - type: mrr_at_100 value: 53.99123884299005 - type: mrr_at_1000 value: 54.02458101713216 - type: mrr_at_20 value: 53.695964669618945 - type: mrr_at_3 value: 50.81068192656173 - type: mrr_at_5 value: 52.45588936576058 - type: nauc_map_at_1000_diff1 value: 51.55382824218782 - type: nauc_map_at_1000_max value: 31.855350695084606 - type: nauc_map_at_1000_std value: -5.465862008150992 - type: nauc_map_at_100_diff1 value: 51.55889312452534 - type: nauc_map_at_100_max value: 31.88429637207401 - type: nauc_map_at_100_std value: -5.40805152544196 - type: nauc_map_at_10_diff1 value: 51.6592677505875 - type: nauc_map_at_10_max value: 31.554425233617543 - type: nauc_map_at_10_std value: -6.125756131339046 - type: nauc_map_at_1_diff1 value: 55.6889617582672 - type: nauc_map_at_1_max value: 27.821166966868176 - type: nauc_map_at_1_std value: -5.778838498211728 - type: nauc_map_at_20_diff1 value: 51.70520970992564 - type: nauc_map_at_20_max value: 31.811676633900465 - type: nauc_map_at_20_std value: -5.463596751904718 - type: nauc_map_at_3_diff1 value: 53.206169626589606 - type: nauc_map_at_3_max value: 31.64373830824983 - type: nauc_map_at_3_std value: -6.054761451312827 - type: nauc_map_at_5_diff1 value: 52.37308971673694 - type: nauc_map_at_5_max value: 31.974302019633644 - type: nauc_map_at_5_std value: -6.302653399940531 - type: nauc_mrr_at_1000_diff1 value: 49.345152231490616 - type: nauc_mrr_at_1000_max value: 33.49789501712511 - type: nauc_mrr_at_1000_std value: -6.054730861163538 - type: nauc_mrr_at_100_diff1 value: 49.3387577601307 - type: nauc_mrr_at_100_max value: 33.48149992464187 - type: nauc_mrr_at_100_std value: -6.061177137579308 - type: nauc_mrr_at_10_diff1 value: 49.08312288449718 - type: nauc_mrr_at_10_max value: 33.470393322577465 - type: nauc_mrr_at_10_std value: -6.180286430216975 - type: nauc_mrr_at_1_diff1 value: 52.43364978537192 - type: nauc_mrr_at_1_max value: 31.521755633355713 - type: nauc_mrr_at_1_std value: -7.002499524130836 - type: nauc_mrr_at_20_diff1 value: 49.311059224991766 - type: nauc_mrr_at_20_max value: 33.538523037692144 - type: nauc_mrr_at_20_std value: -6.034619474981136 - type: nauc_mrr_at_3_diff1 value: 49.90489868439366 - type: nauc_mrr_at_3_max value: 34.400493912164606 - type: nauc_mrr_at_3_std value: -6.028875320994629 - type: nauc_mrr_at_5_diff1 value: 49.033661898983475 - type: nauc_mrr_at_5_max value: 33.732315350193936 - type: nauc_mrr_at_5_std value: -6.272548556330368 - type: nauc_ndcg_at_1000_diff1 value: 49.81681892539247 - type: nauc_ndcg_at_1000_max value: 33.06518006062093 - type: nauc_ndcg_at_1000_std value: -4.282105713014755 - type: nauc_ndcg_at_100_diff1 value: 49.42362108857786 - type: nauc_ndcg_at_100_max value: 32.92024325540483 - type: nauc_ndcg_at_100_std value: -3.7786765305496717 - type: nauc_ndcg_at_10_diff1 value: 48.83102435475594 - type: nauc_ndcg_at_10_max value: 31.898404563611958 - type: nauc_ndcg_at_10_std value: -6.2024003866707 - type: nauc_ndcg_at_1_diff1 value: 52.43364978537192 - type: nauc_ndcg_at_1_max value: 31.521755633355713 - type: nauc_ndcg_at_1_std value: -7.002499524130836 - type: nauc_ndcg_at_20_diff1 value: 49.466526454438316 - type: nauc_ndcg_at_20_max value: 32.424462698701674 - type: nauc_ndcg_at_20_std value: -4.520809563712905 - type: nauc_ndcg_at_3_diff1 value: 50.997884562583884 - type: nauc_ndcg_at_3_max value: 33.26787046916917 - type: nauc_ndcg_at_3_std value: -6.340699471083753 - type: nauc_ndcg_at_5_diff1 value: 49.68314458398097 - type: nauc_ndcg_at_5_max value: 32.80910071143984 - type: nauc_ndcg_at_5_std value: -6.734495576445887 - type: nauc_precision_at_1000_diff1 value: -24.18940012795299 - type: nauc_precision_at_1000_max value: -10.995343674356896 - type: nauc_precision_at_1000_std value: -8.298841004724856 - type: nauc_precision_at_100_diff1 value: -18.104939577865935 - type: nauc_precision_at_100_max value: -1.3757613100627637 - type: nauc_precision_at_100_std value: 0.07661922190466432 - type: nauc_precision_at_10_diff1 value: 3.9624459059275967 - type: nauc_precision_at_10_max value: 14.841561593450391 - type: nauc_precision_at_10_std value: -2.485374333613117 - type: nauc_precision_at_1_diff1 value: 52.43364978537192 - type: nauc_precision_at_1_max value: 31.521755633355713 - type: nauc_precision_at_1_std value: -7.002499524130836 - type: nauc_precision_at_20_diff1 value: -4.4791763436505265 - type: nauc_precision_at_20_max value: 9.157872836996276 - type: nauc_precision_at_20_std value: 2.086903518342088 - type: nauc_precision_at_3_diff1 value: 28.480888018235568 - type: nauc_precision_at_3_max value: 30.34526267718485 - type: nauc_precision_at_3_std value: -6.3006706923866025 - type: nauc_precision_at_5_diff1 value: 16.488039195453517 - type: nauc_precision_at_5_max value: 24.593477099241852 - type: nauc_precision_at_5_std value: -5.316448107840636 - type: nauc_recall_at_1000_diff1 value: 34.715187316533076 - type: nauc_recall_at_1000_max value: 58.2266544684947 - type: nauc_recall_at_1000_std value: 63.85237636398278 - type: nauc_recall_at_100_diff1 value: 36.08623826028132 - type: nauc_recall_at_100_max value: 33.05011429439473 - type: nauc_recall_at_100_std value: 16.559545021212564 - type: nauc_recall_at_10_diff1 value: 39.76738610714205 - type: nauc_recall_at_10_max value: 28.233045706945997 - type: nauc_recall_at_10_std value: -5.13243784043598 - type: nauc_recall_at_1_diff1 value: 55.6889617582672 - type: nauc_recall_at_1_max value: 27.821166966868176 - type: nauc_recall_at_1_std value: -5.778838498211728 - type: nauc_recall_at_20_diff1 value: 41.18682480073759 - type: nauc_recall_at_20_max value: 29.525993239296945 - type: nauc_recall_at_20_std value: 1.5003598438954298 - type: nauc_recall_at_3_diff1 value: 48.31879460301157 - type: nauc_recall_at_3_max value: 32.93751306970167 - type: nauc_recall_at_3_std value: -5.28070084211707 - type: nauc_recall_at_5_diff1 value: 44.327686388315435 - type: nauc_recall_at_5_max value: 32.04823486234599 - type: nauc_recall_at_5_std value: -6.4221525602778256 - type: ndcg_at_1 value: 43.348 - type: ndcg_at_10 value: 53.885000000000005 - type: ndcg_at_100 value: 59.204 - type: ndcg_at_1000 value: 60.744 - type: ndcg_at_20 value: 55.995 - type: ndcg_at_3 value: 49.112 - type: ndcg_at_5 value: 51.61900000000001 - type: precision_at_1 value: 43.348 - type: precision_at_10 value: 10.242999999999999 - type: precision_at_100 value: 1.6150000000000002 - type: precision_at_1000 value: 0.203 - type: precision_at_20 value: 6.066 - type: precision_at_3 value: 23.605 - type: precision_at_5 value: 17.024 - type: recall_at_1 value: 35.429 - type: recall_at_10 value: 65.77199999999999 - type: recall_at_100 value: 87.89 - type: recall_at_1000 value: 97.13000000000001 - type: recall_at_20 value: 73.299 - type: recall_at_3 value: 52.034000000000006 - type: recall_at_5 value: 58.96 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: main_score value: 49.55 - type: map_at_1 value: 31.684 - type: map_at_10 value: 43.258 - type: map_at_100 value: 44.628 - type: map_at_1000 value: 44.761 - type: map_at_20 value: 44.015 - type: map_at_3 value: 39.778000000000006 - type: map_at_5 value: 41.643 - type: mrr_at_1 value: 39.87261146496815 - type: mrr_at_10 value: 49.31978566373469 - type: mrr_at_100 value: 49.94922739445482 - type: mrr_at_1000 value: 49.990325601254106 - type: mrr_at_20 value: 49.70597468576704 - type: mrr_at_3 value: 47.070063694267546 - type: mrr_at_5 value: 48.23248407643316 - type: nauc_map_at_1000_diff1 value: 53.44044712371752 - type: nauc_map_at_1000_max value: 34.5651440062204 - type: nauc_map_at_1000_std value: -0.9814384609230475 - type: nauc_map_at_100_diff1 value: 53.429004435388464 - type: nauc_map_at_100_max value: 34.52038957273436 - type: nauc_map_at_100_std value: -1.1021936362699805 - type: nauc_map_at_10_diff1 value: 53.879128574022005 - type: nauc_map_at_10_max value: 33.74771524140917 - type: nauc_map_at_10_std value: -2.945132777205236 - type: nauc_map_at_1_diff1 value: 60.25159799695403 - type: nauc_map_at_1_max value: 26.843892985235808 - type: nauc_map_at_1_std value: -9.618702739509093 - type: nauc_map_at_20_diff1 value: 53.56789898225283 - type: nauc_map_at_20_max value: 34.11628845872402 - type: nauc_map_at_20_std value: -2.024376635870884 - type: nauc_map_at_3_diff1 value: 54.45882099014072 - type: nauc_map_at_3_max value: 31.29495446507793 - type: nauc_map_at_3_std value: -6.391948228781555 - type: nauc_map_at_5_diff1 value: 54.20536489050697 - type: nauc_map_at_5_max value: 32.31001487256826 - type: nauc_map_at_5_std value: -5.050953263346934 - type: nauc_mrr_at_1000_diff1 value: 50.835858995999125 - type: nauc_mrr_at_1000_max value: 38.20717381701079 - type: nauc_mrr_at_1000_std value: 4.174163368228787 - type: nauc_mrr_at_100_diff1 value: 50.827072441041224 - type: nauc_mrr_at_100_max value: 38.21077622034756 - type: nauc_mrr_at_100_std value: 4.1951082737013365 - type: nauc_mrr_at_10_diff1 value: 50.90578491570948 - type: nauc_mrr_at_10_max value: 38.19229691746408 - type: nauc_mrr_at_10_std value: 3.8290750066335546 - type: nauc_mrr_at_1_diff1 value: 54.807021746871186 - type: nauc_mrr_at_1_max value: 37.09225642043841 - type: nauc_mrr_at_1_std value: 0.5654547513131355 - type: nauc_mrr_at_20_diff1 value: 50.86247832095378 - type: nauc_mrr_at_20_max value: 38.19277867384178 - type: nauc_mrr_at_20_std value: 4.098932316791841 - type: nauc_mrr_at_3_diff1 value: 50.788934370903036 - type: nauc_mrr_at_3_max value: 37.72130561895659 - type: nauc_mrr_at_3_std value: 2.7339370381517583 - type: nauc_mrr_at_5_diff1 value: 50.72543792525547 - type: nauc_mrr_at_5_max value: 37.57740908475375 - type: nauc_mrr_at_5_std value: 2.742881431085094 - type: nauc_ndcg_at_1000_diff1 value: 50.89692885407576 - type: nauc_ndcg_at_1000_max value: 37.250583054716955 - type: nauc_ndcg_at_1000_std value: 5.552279826578831 - type: nauc_ndcg_at_100_diff1 value: 50.624606875496944 - type: nauc_ndcg_at_100_max value: 37.1024514234627 - type: nauc_ndcg_at_100_std value: 5.495892760032762 - type: nauc_ndcg_at_10_diff1 value: 51.910387255793445 - type: nauc_ndcg_at_10_max value: 36.71168418905039 - type: nauc_ndcg_at_10_std value: 2.3064115117905217 - type: nauc_ndcg_at_1_diff1 value: 54.807021746871186 - type: nauc_ndcg_at_1_max value: 37.09225642043841 - type: nauc_ndcg_at_1_std value: 0.5654547513131355 - type: nauc_ndcg_at_20_diff1 value: 51.43416588546778 - type: nauc_ndcg_at_20_max value: 36.76387180172346 - type: nauc_ndcg_at_20_std value: 3.7012798827049718 - type: nauc_ndcg_at_3_diff1 value: 50.91198494475423 - type: nauc_ndcg_at_3_max value: 34.92770670756687 - type: nauc_ndcg_at_3_std value: -0.9071486759887368 - type: nauc_ndcg_at_5_diff1 value: 51.63559468683886 - type: nauc_ndcg_at_5_max value: 34.86849679864564 - type: nauc_ndcg_at_5_std value: -0.734837221224976 - type: nauc_precision_at_1000_diff1 value: -13.43645457127175 - type: nauc_precision_at_1000_max value: 12.71162105198664 - type: nauc_precision_at_1000_std value: 33.175399007040255 - type: nauc_precision_at_100_diff1 value: -8.549834785105412 - type: nauc_precision_at_100_max value: 22.47383497331883 - type: nauc_precision_at_100_std value: 39.09108761430844 - type: nauc_precision_at_10_diff1 value: 7.556572451100043 - type: nauc_precision_at_10_max value: 35.35285122987575 - type: nauc_precision_at_10_std value: 29.417466305615967 - type: nauc_precision_at_1_diff1 value: 54.807021746871186 - type: nauc_precision_at_1_max value: 37.09225642043841 - type: nauc_precision_at_1_std value: 0.5654547513131355 - type: nauc_precision_at_20_diff1 value: -0.550158641635712 - type: nauc_precision_at_20_max value: 29.9068430006187 - type: nauc_precision_at_20_std value: 33.920603132821185 - type: nauc_precision_at_3_diff1 value: 25.551264664276687 - type: nauc_precision_at_3_max value: 37.59463225854679 - type: nauc_precision_at_3_std value: 13.707295021359043 - type: nauc_precision_at_5_diff1 value: 17.76136129817151 - type: nauc_precision_at_5_max value: 35.85363807255972 - type: nauc_precision_at_5_std value: 19.48470876841111 - type: nauc_recall_at_1000_diff1 value: 37.1593620123866 - type: nauc_recall_at_1000_max value: 46.29322536951135 - type: nauc_recall_at_1000_std value: 51.47312657083967 - type: nauc_recall_at_100_diff1 value: 37.7542224949536 - type: nauc_recall_at_100_max value: 38.84120637703135 - type: nauc_recall_at_100_std value: 28.839672572221925 - type: nauc_recall_at_10_diff1 value: 46.24130302658384 - type: nauc_recall_at_10_max value: 35.89001724712849 - type: nauc_recall_at_10_std value: 6.985137790828618 - type: nauc_recall_at_1_diff1 value: 60.25159799695403 - type: nauc_recall_at_1_max value: 26.843892985235808 - type: nauc_recall_at_1_std value: -9.618702739509093 - type: nauc_recall_at_20_diff1 value: 43.63576680886187 - type: nauc_recall_at_20_max value: 36.79079644708101 - type: nauc_recall_at_20_std value: 13.81561928605839 - type: nauc_recall_at_3_diff1 value: 48.2299322140522 - type: nauc_recall_at_3_max value: 30.038088484376203 - type: nauc_recall_at_3_std value: -4.871116183843762 - type: nauc_recall_at_5_diff1 value: 47.22331872695983 - type: nauc_recall_at_5_max value: 30.398541477173136 - type: nauc_recall_at_5_std value: -3.2038541888528957 - type: ndcg_at_1 value: 39.873 - type: ndcg_at_10 value: 49.55 - type: ndcg_at_100 value: 53.809 - type: ndcg_at_1000 value: 55.767999999999994 - type: ndcg_at_20 value: 51.275999999999996 - type: ndcg_at_3 value: 44.91 - type: ndcg_at_5 value: 46.855999999999995 - type: precision_at_1 value: 39.873 - type: precision_at_10 value: 9.65 - type: precision_at_100 value: 1.522 - type: precision_at_1000 value: 0.196 - type: precision_at_20 value: 5.701 - type: precision_at_3 value: 22.166 - type: precision_at_5 value: 15.643 - type: recall_at_1 value: 31.684 - type: recall_at_10 value: 60.69 - type: recall_at_100 value: 78.521 - type: recall_at_1000 value: 91.02900000000001 - type: recall_at_20 value: 66.973 - type: recall_at_3 value: 46.807 - type: recall_at_5 value: 52.402 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: main_score value: 62.686 - type: map_at_1 value: 43.856 - type: map_at_10 value: 57.056 - type: map_at_100 value: 58.048 - type: map_at_1000 value: 58.092 - type: map_at_20 value: 57.684000000000005 - type: map_at_3 value: 53.958 - type: map_at_5 value: 55.80500000000001 - type: mrr_at_1 value: 50.03134796238244 - type: mrr_at_10 value: 60.31022043091019 - type: mrr_at_100 value: 60.91892338857461 - type: mrr_at_1000 value: 60.93770463536649 - type: mrr_at_20 value: 60.705642387392736 - type: mrr_at_3 value: 58.286311389759746 - type: mrr_at_5 value: 59.49320794148393 - type: nauc_map_at_1000_diff1 value: 54.849140197256695 - type: nauc_map_at_1000_max value: 38.978448968260224 - type: nauc_map_at_1000_std value: 0.4955439383268162 - type: nauc_map_at_100_diff1 value: 54.824334747823364 - type: nauc_map_at_100_max value: 38.959443109450994 - type: nauc_map_at_100_std value: 0.49626092018886037 - type: nauc_map_at_10_diff1 value: 54.778189277103394 - type: nauc_map_at_10_max value: 38.20972191654546 - type: nauc_map_at_10_std value: -0.7239823837455759 - type: nauc_map_at_1_diff1 value: 58.74017164752485 - type: nauc_map_at_1_max value: 31.528974862589585 - type: nauc_map_at_1_std value: -3.273824691929492 - type: nauc_map_at_20_diff1 value: 54.78943693416187 - type: nauc_map_at_20_max value: 38.77930316443076 - type: nauc_map_at_20_std value: 0.25607460088355544 - type: nauc_map_at_3_diff1 value: 55.68313410225767 - type: nauc_map_at_3_max value: 36.22847284104399 - type: nauc_map_at_3_std value: -3.010979639100503 - type: nauc_map_at_5_diff1 value: 55.11385094420661 - type: nauc_map_at_5_max value: 37.319681045490924 - type: nauc_map_at_5_std value: -2.156640733221061 - type: nauc_mrr_at_1000_diff1 value: 54.504759468380705 - type: nauc_mrr_at_1000_max value: 40.58849492650406 - type: nauc_mrr_at_1000_std value: 1.8226622175866118 - type: nauc_mrr_at_100_diff1 value: 54.4918034449886 - type: nauc_mrr_at_100_max value: 40.59202728933427 - type: nauc_mrr_at_100_std value: 1.8276428096536335 - type: nauc_mrr_at_10_diff1 value: 54.33603399493329 - type: nauc_mrr_at_10_max value: 40.58896878978089 - type: nauc_mrr_at_10_std value: 1.5733340909114375 - type: nauc_mrr_at_1_diff1 value: 58.062410036466105 - type: nauc_mrr_at_1_max value: 37.660958859966506 - type: nauc_mrr_at_1_std value: 0.029007600674170648 - type: nauc_mrr_at_20_diff1 value: 54.43793386924358 - type: nauc_mrr_at_20_max value: 40.66773423875307 - type: nauc_mrr_at_20_std value: 1.891967891797154 - type: nauc_mrr_at_3_diff1 value: 54.77901284537966 - type: nauc_mrr_at_3_max value: 40.182219821206964 - type: nauc_mrr_at_3_std value: 0.8911935034597871 - type: nauc_mrr_at_5_diff1 value: 54.466068837163675 - type: nauc_mrr_at_5_max value: 40.334996916684126 - type: nauc_mrr_at_5_std value: 0.9460830492892364 - type: nauc_ndcg_at_1000_diff1 value: 53.8465376860938 - type: nauc_ndcg_at_1000_max value: 41.63158111016696 - type: nauc_ndcg_at_1000_std value: 3.864205884257578 - type: nauc_ndcg_at_100_diff1 value: 53.4025864436944 - type: nauc_ndcg_at_100_max value: 41.805453995307914 - type: nauc_ndcg_at_100_std value: 4.36777557904857 - type: nauc_ndcg_at_10_diff1 value: 52.96034987157544 - type: nauc_ndcg_at_10_max value: 40.7601173480795 - type: nauc_ndcg_at_10_std value: 1.905824035879141 - type: nauc_ndcg_at_1_diff1 value: 58.062410036466105 - type: nauc_ndcg_at_1_max value: 37.660958859966506 - type: nauc_ndcg_at_1_std value: 0.029007600674170648 - type: nauc_ndcg_at_20_diff1 value: 53.2834771889242 - type: nauc_ndcg_at_20_max value: 41.713541932946406 - type: nauc_ndcg_at_20_std value: 3.865102828793311 - type: nauc_ndcg_at_3_diff1 value: 54.03389464372289 - type: nauc_ndcg_at_3_max value: 38.41449914649933 - type: nauc_ndcg_at_3_std value: -0.886276189886313 - type: nauc_ndcg_at_5_diff1 value: 53.456413320299 - type: nauc_ndcg_at_5_max value: 39.49048882649335 - type: nauc_ndcg_at_5_std value: -0.42692690160443814 - type: nauc_precision_at_1000_diff1 value: -14.770791653274824 - type: nauc_precision_at_1000_max value: 21.479874538905246 - type: nauc_precision_at_1000_std value: 28.607024261300207 - type: nauc_precision_at_100_diff1 value: -12.189696449878126 - type: nauc_precision_at_100_max value: 26.69785787492456 - type: nauc_precision_at_100_std value: 33.59098307467553 - type: nauc_precision_at_10_diff1 value: 6.922968330978399 - type: nauc_precision_at_10_max value: 34.52138344123087 - type: nauc_precision_at_10_std value: 21.768427637079952 - type: nauc_precision_at_1_diff1 value: 58.062410036466105 - type: nauc_precision_at_1_max value: 37.660958859966506 - type: nauc_precision_at_1_std value: 0.029007600674170648 - type: nauc_precision_at_20_diff1 value: -0.6837867902179278 - type: nauc_precision_at_20_max value: 33.98683709011133 - type: nauc_precision_at_20_std value: 30.8845561918902 - type: nauc_precision_at_3_diff1 value: 28.195043041120847 - type: nauc_precision_at_3_max value: 37.659916094938836 - type: nauc_precision_at_3_std value: 7.226520146634867 - type: nauc_precision_at_5_diff1 value: 16.633667288096245 - type: nauc_precision_at_5_max value: 34.90176597404891 - type: nauc_precision_at_5_std value: 12.421585442334088 - type: nauc_recall_at_1000_diff1 value: 45.20743732415397 - type: nauc_recall_at_1000_max value: 72.77115913579242 - type: nauc_recall_at_1000_std value: 70.48328496679083 - type: nauc_recall_at_100_diff1 value: 38.56282680810794 - type: nauc_recall_at_100_max value: 55.46797683321103 - type: nauc_recall_at_100_std value: 36.878791151929136 - type: nauc_recall_at_10_diff1 value: 44.18252051452362 - type: nauc_recall_at_10_max value: 43.33391810040086 - type: nauc_recall_at_10_std value: 6.663378192277723 - type: nauc_recall_at_1_diff1 value: 58.74017164752485 - type: nauc_recall_at_1_max value: 31.528974862589585 - type: nauc_recall_at_1_std value: -3.273824691929492 - type: nauc_recall_at_20_diff1 value: 44.19944231642417 - type: nauc_recall_at_20_max value: 49.401101483915866 - type: nauc_recall_at_20_std value: 18.97803841673839 - type: nauc_recall_at_3_diff1 value: 49.56378985428704 - type: nauc_recall_at_3_max value: 36.434210616870224 - type: nauc_recall_at_3_std value: -2.850559971607616 - type: nauc_recall_at_5_diff1 value: 47.37107217086109 - type: nauc_recall_at_5_max value: 39.0236745509895 - type: nauc_recall_at_5_std value: -1.7402454457937195 - type: ndcg_at_1 value: 50.031000000000006 - type: ndcg_at_10 value: 62.686 - type: ndcg_at_100 value: 66.403 - type: ndcg_at_1000 value: 67.241 - type: ndcg_at_20 value: 64.37899999999999 - type: ndcg_at_3 value: 57.859 - type: ndcg_at_5 value: 60.375 - type: precision_at_1 value: 50.031000000000006 - type: precision_at_10 value: 9.856 - type: precision_at_100 value: 1.266 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_20 value: 5.489 - type: precision_at_3 value: 25.746999999999996 - type: precision_at_5 value: 17.492 - type: recall_at_1 value: 43.856 - type: recall_at_10 value: 75.824 - type: recall_at_100 value: 91.622 - type: recall_at_1000 value: 97.538 - type: recall_at_20 value: 81.951 - type: recall_at_3 value: 63.016000000000005 - type: recall_at_5 value: 69.18299999999999 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: main_score value: 43.983 - type: map_at_1 value: 28.942 - type: map_at_10 value: 38.621 - type: map_at_100 value: 39.7 - type: map_at_1000 value: 39.766 - type: map_at_20 value: 39.262 - type: map_at_3 value: 35.719 - type: map_at_5 value: 37.378 - type: mrr_at_1 value: 31.29943502824859 - type: mrr_at_10 value: 40.76463994260603 - type: mrr_at_100 value: 41.67073617629083 - type: mrr_at_1000 value: 41.717446259457105 - type: mrr_at_20 value: 41.32577374689195 - type: mrr_at_3 value: 37.984934086628996 - type: mrr_at_5 value: 39.64595103578152 - type: nauc_map_at_1000_diff1 value: 43.64461679688985 - type: nauc_map_at_1000_max value: 31.53717883948204 - type: nauc_map_at_1000_std value: 1.193745788248017 - type: nauc_map_at_100_diff1 value: 43.63847825079489 - type: nauc_map_at_100_max value: 31.536602619279165 - type: nauc_map_at_100_std value: 1.2001240243342401 - type: nauc_map_at_10_diff1 value: 43.845991987142014 - type: nauc_map_at_10_max value: 31.27509937344113 - type: nauc_map_at_10_std value: 0.7327934840520994 - type: nauc_map_at_1_diff1 value: 50.62269273984579 - type: nauc_map_at_1_max value: 30.16325757909521 - type: nauc_map_at_1_std value: -0.6398875136233392 - type: nauc_map_at_20_diff1 value: 43.630758403790914 - type: nauc_map_at_20_max value: 31.408258098047703 - type: nauc_map_at_20_std value: 1.12616034652217 - type: nauc_map_at_3_diff1 value: 44.823493567359456 - type: nauc_map_at_3_max value: 31.075886347614496 - type: nauc_map_at_3_std value: -0.25126874515735426 - type: nauc_map_at_5_diff1 value: 43.79768853087658 - type: nauc_map_at_5_max value: 31.091080995725324 - type: nauc_map_at_5_std value: 0.16440771782544047 - type: nauc_mrr_at_1000_diff1 value: 42.7865400752329 - type: nauc_mrr_at_1000_max value: 32.84731670326893 - type: nauc_mrr_at_1000_std value: 2.6067637582013825 - type: nauc_mrr_at_100_diff1 value: 42.771741548331065 - type: nauc_mrr_at_100_max value: 32.85324232845987 - type: nauc_mrr_at_100_std value: 2.6092786694308376 - type: nauc_mrr_at_10_diff1 value: 42.82969738870672 - type: nauc_mrr_at_10_max value: 32.69407549631432 - type: nauc_mrr_at_10_std value: 2.302903910016054 - type: nauc_mrr_at_1_diff1 value: 49.05638333657571 - type: nauc_mrr_at_1_max value: 33.12030717171514 - type: nauc_mrr_at_1_std value: 1.3278035087690774 - type: nauc_mrr_at_20_diff1 value: 42.74267239536286 - type: nauc_mrr_at_20_max value: 32.78571108973092 - type: nauc_mrr_at_20_std value: 2.5932669908758643 - type: nauc_mrr_at_3_diff1 value: 43.69963426089187 - type: nauc_mrr_at_3_max value: 32.78193126956233 - type: nauc_mrr_at_3_std value: 1.634874463134699 - type: nauc_mrr_at_5_diff1 value: 42.838630647832524 - type: nauc_mrr_at_5_max value: 32.459318735260545 - type: nauc_mrr_at_5_std value: 1.9412518283209172 - type: nauc_ndcg_at_1000_diff1 value: 41.01253839851583 - type: nauc_ndcg_at_1000_max value: 32.69570568894237 - type: nauc_ndcg_at_1000_std value: 3.4254737113410343 - type: nauc_ndcg_at_100_diff1 value: 40.62589243745832 - type: nauc_ndcg_at_100_max value: 32.664990655736126 - type: nauc_ndcg_at_100_std value: 3.799569445326048 - type: nauc_ndcg_at_10_diff1 value: 41.31658753735306 - type: nauc_ndcg_at_10_max value: 31.511946320339295 - type: nauc_ndcg_at_10_std value: 2.0492930500796662 - type: nauc_ndcg_at_1_diff1 value: 49.05638333657571 - type: nauc_ndcg_at_1_max value: 33.12030717171514 - type: nauc_ndcg_at_1_std value: 1.3278035087690774 - type: nauc_ndcg_at_20_diff1 value: 40.66188223212841 - type: nauc_ndcg_at_20_max value: 31.926240431497476 - type: nauc_ndcg_at_20_std value: 3.370398664595343 - type: nauc_ndcg_at_3_diff1 value: 43.035580180241 - type: nauc_ndcg_at_3_max value: 31.363874129878404 - type: nauc_ndcg_at_3_std value: 0.1422507242819929 - type: nauc_ndcg_at_5_diff1 value: 41.29049003955878 - type: nauc_ndcg_at_5_max value: 31.112034994977737 - type: nauc_ndcg_at_5_std value: 0.860179279828966 - type: nauc_precision_at_1000_diff1 value: -12.41854465881981 - type: nauc_precision_at_1000_max value: 14.706779246590548 - type: nauc_precision_at_1000_std value: 9.812804367375206 - type: nauc_precision_at_100_diff1 value: 2.797520107808461 - type: nauc_precision_at_100_max value: 24.335873541811406 - type: nauc_precision_at_100_std value: 12.87186398750545 - type: nauc_precision_at_10_diff1 value: 24.530962799265847 - type: nauc_precision_at_10_max value: 31.00772010798733 - type: nauc_precision_at_10_std value: 6.696733001548185 - type: nauc_precision_at_1_diff1 value: 49.05638333657571 - type: nauc_precision_at_1_max value: 33.12030717171514 - type: nauc_precision_at_1_std value: 1.3278035087690774 - type: nauc_precision_at_20_diff1 value: 16.25028416351204 - type: nauc_precision_at_20_max value: 29.629326492027342 - type: nauc_precision_at_20_std value: 11.085888573121679 - type: nauc_precision_at_3_diff1 value: 33.923667689694256 - type: nauc_precision_at_3_max value: 33.5859782361996 - type: nauc_precision_at_3_std value: 1.9468331086918693 - type: nauc_precision_at_5_diff1 value: 27.917827233088875 - type: nauc_precision_at_5_max value: 33.13290043423535 - type: nauc_precision_at_5_std value: 3.800870695945311 - type: nauc_recall_at_1000_diff1 value: 9.680283388428789 - type: nauc_recall_at_1000_max value: 49.479399284871235 - type: nauc_recall_at_1000_std value: 31.506985071436088 - type: nauc_recall_at_100_diff1 value: 23.607673377885448 - type: nauc_recall_at_100_max value: 36.637750366403935 - type: nauc_recall_at_100_std value: 18.30770690564224 - type: nauc_recall_at_10_diff1 value: 33.199683418312446 - type: nauc_recall_at_10_max value: 29.63115497012312 - type: nauc_recall_at_10_std value: 4.813200391480566 - type: nauc_recall_at_1_diff1 value: 50.62269273984579 - type: nauc_recall_at_1_max value: 30.16325757909521 - type: nauc_recall_at_1_std value: -0.6398875136233392 - type: nauc_recall_at_20_diff1 value: 29.16488387844995 - type: nauc_recall_at_20_max value: 30.788019479459 - type: nauc_recall_at_20_std value: 11.031953917298853 - type: nauc_recall_at_3_diff1 value: 38.215351600417065 - type: nauc_recall_at_3_max value: 29.619887154236128 - type: nauc_recall_at_3_std value: -0.13237298980339363 - type: nauc_recall_at_5_diff1 value: 33.93788042633265 - type: nauc_recall_at_5_max value: 28.67185092656741 - type: nauc_recall_at_5_std value: 1.316700201091445 - type: ndcg_at_1 value: 31.299 - type: ndcg_at_10 value: 43.983 - type: ndcg_at_100 value: 48.992999999999995 - type: ndcg_at_1000 value: 50.757 - type: ndcg_at_20 value: 46.152 - type: ndcg_at_3 value: 38.367000000000004 - type: ndcg_at_5 value: 41.171 - type: precision_at_1 value: 31.299 - type: precision_at_10 value: 6.734 - type: precision_at_100 value: 0.972 - type: precision_at_1000 value: 0.11499999999999999 - type: precision_at_20 value: 3.898 - type: precision_at_3 value: 16.121 - type: precision_at_5 value: 11.344999999999999 - type: recall_at_1 value: 28.942 - type: recall_at_10 value: 58.343999999999994 - type: recall_at_100 value: 80.82300000000001 - type: recall_at_1000 value: 94.348 - type: recall_at_20 value: 66.449 - type: recall_at_3 value: 43.415 - type: recall_at_5 value: 50.007999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: main_score value: 33.144 - type: map_at_1 value: 19.41 - type: map_at_10 value: 27.802 - type: map_at_100 value: 29.157 - type: map_at_1000 value: 29.274 - type: map_at_20 value: 28.549000000000003 - type: map_at_3 value: 25.052999999999997 - type: map_at_5 value: 26.521 - type: mrr_at_1 value: 23.756218905472636 - type: mrr_at_10 value: 32.3623450209271 - type: mrr_at_100 value: 33.3648208444617 - type: mrr_at_1000 value: 33.427688215162185 - type: mrr_at_20 value: 32.93723485575758 - type: mrr_at_3 value: 29.539800995024883 - type: mrr_at_5 value: 31.156716417910452 - type: nauc_map_at_1000_diff1 value: 36.196391248081284 - type: nauc_map_at_1000_max value: 25.650644367091495 - type: nauc_map_at_1000_std value: 6.130340697729844 - type: nauc_map_at_100_diff1 value: 36.138890642411376 - type: nauc_map_at_100_max value: 25.587124763888518 - type: nauc_map_at_100_std value: 6.129336379055536 - type: nauc_map_at_10_diff1 value: 36.254426743566775 - type: nauc_map_at_10_max value: 25.465599906543034 - type: nauc_map_at_10_std value: 5.880280378112879 - type: nauc_map_at_1_diff1 value: 42.890551563179976 - type: nauc_map_at_1_max value: 25.813805281076956 - type: nauc_map_at_1_std value: 5.150718386163028 - type: nauc_map_at_20_diff1 value: 35.98551587974314 - type: nauc_map_at_20_max value: 25.501540521726636 - type: nauc_map_at_20_std value: 5.858703157458749 - type: nauc_map_at_3_diff1 value: 37.646558039577734 - type: nauc_map_at_3_max value: 26.138491471124247 - type: nauc_map_at_3_std value: 6.0487505175540734 - type: nauc_map_at_5_diff1 value: 36.817582976153695 - type: nauc_map_at_5_max value: 25.398200211121146 - type: nauc_map_at_5_std value: 6.31126763919522 - type: nauc_mrr_at_1000_diff1 value: 37.313544952847835 - type: nauc_mrr_at_1000_max value: 26.96218532078988 - type: nauc_mrr_at_1000_std value: 6.814359224654042 - type: nauc_mrr_at_100_diff1 value: 37.28104407653679 - type: nauc_mrr_at_100_max value: 26.931243040477256 - type: nauc_mrr_at_100_std value: 6.800500150841733 - type: nauc_mrr_at_10_diff1 value: 37.315832621275895 - type: nauc_mrr_at_10_max value: 26.941454225978372 - type: nauc_mrr_at_10_std value: 6.837046527796884 - type: nauc_mrr_at_1_diff1 value: 43.19904188582958 - type: nauc_mrr_at_1_max value: 26.975620445904795 - type: nauc_mrr_at_1_std value: 4.52071008581395 - type: nauc_mrr_at_20_diff1 value: 37.2200524790774 - type: nauc_mrr_at_20_max value: 26.971494160765847 - type: nauc_mrr_at_20_std value: 6.716431228783282 - type: nauc_mrr_at_3_diff1 value: 38.46236387340654 - type: nauc_mrr_at_3_max value: 27.846812992192056 - type: nauc_mrr_at_3_std value: 6.550711872569794 - type: nauc_mrr_at_5_diff1 value: 37.620346007658476 - type: nauc_mrr_at_5_max value: 27.031025952102038 - type: nauc_mrr_at_5_std value: 7.32343760231163 - type: nauc_ndcg_at_1000_diff1 value: 34.95081314840592 - type: nauc_ndcg_at_1000_max value: 26.89265465124325 - type: nauc_ndcg_at_1000_std value: 7.854154466831975 - type: nauc_ndcg_at_100_diff1 value: 34.01417812563093 - type: nauc_ndcg_at_100_max value: 25.792737746436835 - type: nauc_ndcg_at_100_std value: 7.726584165493833 - type: nauc_ndcg_at_10_diff1 value: 33.895122516474466 - type: nauc_ndcg_at_10_max value: 25.388442204589612 - type: nauc_ndcg_at_10_std value: 6.359560223645991 - type: nauc_ndcg_at_1_diff1 value: 43.19904188582958 - type: nauc_ndcg_at_1_max value: 26.975620445904795 - type: nauc_ndcg_at_1_std value: 4.52071008581395 - type: nauc_ndcg_at_20_diff1 value: 33.36078689830245 - type: nauc_ndcg_at_20_max value: 25.531794610571563 - type: nauc_ndcg_at_20_std value: 6.136658608653248 - type: nauc_ndcg_at_3_diff1 value: 36.44505602530781 - type: nauc_ndcg_at_3_max value: 26.9104071983157 - type: nauc_ndcg_at_3_std value: 6.427178520371878 - type: nauc_ndcg_at_5_diff1 value: 35.01384323197442 - type: nauc_ndcg_at_5_max value: 25.5560447088692 - type: nauc_ndcg_at_5_std value: 7.3676236760360485 - type: nauc_precision_at_1000_diff1 value: 2.8903331041804514 - type: nauc_precision_at_1000_max value: 4.059662742366004 - type: nauc_precision_at_1000_std value: -1.5891687644008334 - type: nauc_precision_at_100_diff1 value: 8.437726471693766 - type: nauc_precision_at_100_max value: 11.250588557568427 - type: nauc_precision_at_100_std value: 4.231571164627862 - type: nauc_precision_at_10_diff1 value: 19.57085237210294 - type: nauc_precision_at_10_max value: 20.973093492003905 - type: nauc_precision_at_10_std value: 3.197416248152466 - type: nauc_precision_at_1_diff1 value: 43.19904188582958 - type: nauc_precision_at_1_max value: 26.975620445904795 - type: nauc_precision_at_1_std value: 4.52071008581395 - type: nauc_precision_at_20_diff1 value: 15.67136554192724 - type: nauc_precision_at_20_max value: 17.706882621057858 - type: nauc_precision_at_20_std value: 1.9363472182867714 - type: nauc_precision_at_3_diff1 value: 30.38035695042325 - type: nauc_precision_at_3_max value: 26.48218693244094 - type: nauc_precision_at_3_std value: 6.424657705785632 - type: nauc_precision_at_5_diff1 value: 25.272543315171458 - type: nauc_precision_at_5_max value: 22.32441421311652 - type: nauc_precision_at_5_std value: 7.4912569081905716 - type: nauc_recall_at_1000_diff1 value: 25.5748044137675 - type: nauc_recall_at_1000_max value: 43.85796585370269 - type: nauc_recall_at_1000_std value: 30.0338086596789 - type: nauc_recall_at_100_diff1 value: 22.577080638885093 - type: nauc_recall_at_100_max value: 23.224511700617477 - type: nauc_recall_at_100_std value: 15.187963852289313 - type: nauc_recall_at_10_diff1 value: 25.058592299355908 - type: nauc_recall_at_10_max value: 22.24448483279841 - type: nauc_recall_at_10_std value: 6.3179089740052765 - type: nauc_recall_at_1_diff1 value: 42.890551563179976 - type: nauc_recall_at_1_max value: 25.813805281076956 - type: nauc_recall_at_1_std value: 5.150718386163028 - type: nauc_recall_at_20_diff1 value: 22.433865123187307 - type: nauc_recall_at_20_max value: 22.739695641511762 - type: nauc_recall_at_20_std value: 5.362005125538497 - type: nauc_recall_at_3_diff1 value: 32.17919168998616 - type: nauc_recall_at_3_max value: 26.044028436867357 - type: nauc_recall_at_3_std value: 7.420349884006329 - type: nauc_recall_at_5_diff1 value: 28.967104573649138 - type: nauc_recall_at_5_max value: 23.40865848168201 - type: nauc_recall_at_5_std value: 9.174406147723621 - type: ndcg_at_1 value: 23.756 - type: ndcg_at_10 value: 33.144 - type: ndcg_at_100 value: 39.261 - type: ndcg_at_1000 value: 41.881 - type: ndcg_at_20 value: 35.56 - type: ndcg_at_3 value: 27.927999999999997 - type: ndcg_at_5 value: 30.293999999999997 - type: precision_at_1 value: 23.756 - type: precision_at_10 value: 5.995 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.14100000000000001 - type: precision_at_20 value: 3.688 - type: precision_at_3 value: 13.059999999999999 - type: precision_at_5 value: 9.602 - type: recall_at_1 value: 19.41 - type: recall_at_10 value: 45.074 - type: recall_at_100 value: 71.131 - type: recall_at_1000 value: 89.604 - type: recall_at_20 value: 53.673 - type: recall_at_3 value: 31.055 - type: recall_at_5 value: 36.714999999999996 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: main_score value: 49.675000000000004 - type: map_at_1 value: 33.178999999999995 - type: map_at_10 value: 43.807 - type: map_at_100 value: 45.17 - type: map_at_1000 value: 45.271 - type: map_at_20 value: 44.516 - type: map_at_3 value: 40.813 - type: map_at_5 value: 42.457 - type: mrr_at_1 value: 40.32723772858518 - type: mrr_at_10 value: 49.646867409138814 - type: mrr_at_100 value: 50.493686101426285 - type: mrr_at_1000 value: 50.525386961808834 - type: mrr_at_20 value: 50.120274354884586 - type: mrr_at_3 value: 47.49759384023096 - type: mrr_at_5 value: 48.72473532242535 - type: nauc_map_at_1000_diff1 value: 49.5947127786396 - type: nauc_map_at_1000_max value: 33.39720045844929 - type: nauc_map_at_1000_std value: -3.131428593252271 - type: nauc_map_at_100_diff1 value: 49.57797867324617 - type: nauc_map_at_100_max value: 33.356927974709464 - type: nauc_map_at_100_std value: -3.1661365376766337 - type: nauc_map_at_10_diff1 value: 49.59294630598952 - type: nauc_map_at_10_max value: 32.86647346990462 - type: nauc_map_at_10_std value: -4.1582043443386745 - type: nauc_map_at_1_diff1 value: 53.98646767288695 - type: nauc_map_at_1_max value: 29.45629077638936 - type: nauc_map_at_1_std value: -5.621187380771589 - type: nauc_map_at_20_diff1 value: 49.486982890447074 - type: nauc_map_at_20_max value: 33.11681933406332 - type: nauc_map_at_20_std value: -3.5826433195146854 - type: nauc_map_at_3_diff1 value: 50.81807107491861 - type: nauc_map_at_3_max value: 32.32552291988859 - type: nauc_map_at_3_std value: -3.952946504088928 - type: nauc_map_at_5_diff1 value: 49.70201354274439 - type: nauc_map_at_5_max value: 32.831846031004886 - type: nauc_map_at_5_std value: -3.8330488624207737 - type: nauc_mrr_at_1000_diff1 value: 49.04159472507738 - type: nauc_mrr_at_1000_max value: 35.617600171138676 - type: nauc_mrr_at_1000_std value: -1.5975830757486646 - type: nauc_mrr_at_100_diff1 value: 49.03848471692094 - type: nauc_mrr_at_100_max value: 35.61936748662614 - type: nauc_mrr_at_100_std value: -1.5922053398594729 - type: nauc_mrr_at_10_diff1 value: 48.92463964652612 - type: nauc_mrr_at_10_max value: 35.37757708992045 - type: nauc_mrr_at_10_std value: -2.2052028139567303 - type: nauc_mrr_at_1_diff1 value: 52.23915787290734 - type: nauc_mrr_at_1_max value: 34.393531787632334 - type: nauc_mrr_at_1_std value: -1.452007661016969 - type: nauc_mrr_at_20_diff1 value: 48.91168438018404 - type: nauc_mrr_at_20_max value: 35.478962544421876 - type: nauc_mrr_at_20_std value: -1.8246048423555414 - type: nauc_mrr_at_3_diff1 value: 50.115432665442164 - type: nauc_mrr_at_3_max value: 35.89093796085569 - type: nauc_mrr_at_3_std value: -1.4895016313153366 - type: nauc_mrr_at_5_diff1 value: 49.04321261351915 - type: nauc_mrr_at_5_max value: 35.85730520949451 - type: nauc_mrr_at_5_std value: -1.68790556880753 - type: nauc_ndcg_at_1000_diff1 value: 48.294697499154374 - type: nauc_ndcg_at_1000_max value: 35.167410242367595 - type: nauc_ndcg_at_1000_std value: -0.6346078535914157 - type: nauc_ndcg_at_100_diff1 value: 48.025525283449014 - type: nauc_ndcg_at_100_max value: 34.79288511776105 - type: nauc_ndcg_at_100_std value: -0.7823403044086993 - type: nauc_ndcg_at_10_diff1 value: 47.70793258015258 - type: nauc_ndcg_at_10_max value: 33.09558927880104 - type: nauc_ndcg_at_10_std value: -4.7793864166260605 - type: nauc_ndcg_at_1_diff1 value: 52.23915787290734 - type: nauc_ndcg_at_1_max value: 34.393531787632334 - type: nauc_ndcg_at_1_std value: -1.452007661016969 - type: nauc_ndcg_at_20_diff1 value: 47.354286045074815 - type: nauc_ndcg_at_20_max value: 33.686648806027975 - type: nauc_ndcg_at_20_std value: -3.0189085132476556 - type: nauc_ndcg_at_3_diff1 value: 49.68805334316908 - type: nauc_ndcg_at_3_max value: 34.196077748056496 - type: nauc_ndcg_at_3_std value: -2.7167289163768436 - type: nauc_ndcg_at_5_diff1 value: 47.94474868912989 - type: nauc_ndcg_at_5_max value: 34.00261603413051 - type: nauc_ndcg_at_5_std value: -3.3541028103046115 - type: nauc_precision_at_1000_diff1 value: -12.0150100710755 - type: nauc_precision_at_1000_max value: 5.332942816568796 - type: nauc_precision_at_1000_std value: 14.543288479130458 - type: nauc_precision_at_100_diff1 value: -4.920332181588838 - type: nauc_precision_at_100_max value: 14.42313332017491 - type: nauc_precision_at_100_std value: 17.821953321018384 - type: nauc_precision_at_10_diff1 value: 14.70509089079217 - type: nauc_precision_at_10_max value: 25.381887131649716 - type: nauc_precision_at_10_std value: 5.226419288645675 - type: nauc_precision_at_1_diff1 value: 52.23915787290734 - type: nauc_precision_at_1_max value: 34.393531787632334 - type: nauc_precision_at_1_std value: -1.452007661016969 - type: nauc_precision_at_20_diff1 value: 6.312827641507564 - type: nauc_precision_at_20_max value: 22.483038562271933 - type: nauc_precision_at_20_std value: 11.368419856892416 - type: nauc_precision_at_3_diff1 value: 33.271443420273606 - type: nauc_precision_at_3_max value: 33.571078182106675 - type: nauc_precision_at_3_std value: 4.47382265155717 - type: nauc_precision_at_5_diff1 value: 23.43287104284656 - type: nauc_precision_at_5_max value: 30.909085068105313 - type: nauc_precision_at_5_std value: 5.545672049452433 - type: nauc_recall_at_1000_diff1 value: 35.22615594677707 - type: nauc_recall_at_1000_max value: 52.0710533173532 - type: nauc_recall_at_1000_std value: 45.17683523786464 - type: nauc_recall_at_100_diff1 value: 36.2169056956332 - type: nauc_recall_at_100_max value: 35.02435003210817 - type: nauc_recall_at_100_std value: 15.833632946282508 - type: nauc_recall_at_10_diff1 value: 39.12440292974848 - type: nauc_recall_at_10_max value: 28.0546011979648 - type: nauc_recall_at_10_std value: -9.620558638092172 - type: nauc_recall_at_1_diff1 value: 53.98646767288695 - type: nauc_recall_at_1_max value: 29.45629077638936 - type: nauc_recall_at_1_std value: -5.621187380771589 - type: nauc_recall_at_20_diff1 value: 36.39254630768161 - type: nauc_recall_at_20_max value: 29.277856508751967 - type: nauc_recall_at_20_std value: -3.048007490798412 - type: nauc_recall_at_3_diff1 value: 45.64706642644958 - type: nauc_recall_at_3_max value: 31.003050159737413 - type: nauc_recall_at_3_std value: -4.849763876930667 - type: nauc_recall_at_5_diff1 value: 40.918108859971746 - type: nauc_recall_at_5_max value: 30.69907335071493 - type: nauc_recall_at_5_std value: -6.1445436251916865 - type: ndcg_at_1 value: 40.327 - type: ndcg_at_10 value: 49.675000000000004 - type: ndcg_at_100 value: 55.364000000000004 - type: ndcg_at_1000 value: 56.992 - type: ndcg_at_20 value: 51.803999999999995 - type: ndcg_at_3 value: 45.227000000000004 - type: ndcg_at_5 value: 47.244 - type: precision_at_1 value: 40.327 - type: precision_at_10 value: 8.826 - type: precision_at_100 value: 1.354 - type: precision_at_1000 value: 0.167 - type: precision_at_20 value: 5.115 - type: precision_at_3 value: 21.303 - type: precision_at_5 value: 14.726 - type: recall_at_1 value: 33.178999999999995 - type: recall_at_10 value: 61.087 - type: recall_at_100 value: 85.099 - type: recall_at_1000 value: 95.14099999999999 - type: recall_at_20 value: 68.623 - type: recall_at_3 value: 48.245 - type: recall_at_5 value: 53.832 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: main_score value: 44.99 - type: map_at_1 value: 28.089 - type: map_at_10 value: 38.98 - type: map_at_100 value: 40.339000000000006 - type: map_at_1000 value: 40.441 - type: map_at_20 value: 39.702 - type: map_at_3 value: 35.620000000000005 - type: map_at_5 value: 37.657000000000004 - type: mrr_at_1 value: 35.15981735159817 - type: mrr_at_10 value: 44.54075161266937 - type: mrr_at_100 value: 45.435730392436646 - type: mrr_at_1000 value: 45.47673849356812 - type: mrr_at_20 value: 45.05949613726918 - type: mrr_at_3 value: 42.00913242009131 - type: mrr_at_5 value: 43.52739726027392 - type: nauc_map_at_1000_diff1 value: 42.6375513442399 - type: nauc_map_at_1000_max value: 35.83899956589522 - type: nauc_map_at_1000_std value: 5.798620017712549 - type: nauc_map_at_100_diff1 value: 42.609712253881504 - type: nauc_map_at_100_max value: 35.85401871065736 - type: nauc_map_at_100_std value: 5.829007296755533 - type: nauc_map_at_10_diff1 value: 42.90931172127824 - type: nauc_map_at_10_max value: 35.46694204511423 - type: nauc_map_at_10_std value: 5.131477704152026 - type: nauc_map_at_1_diff1 value: 48.066312177855956 - type: nauc_map_at_1_max value: 30.67745267941573 - type: nauc_map_at_1_std value: -1.4170737991670943 - type: nauc_map_at_20_diff1 value: 42.730423700784 - type: nauc_map_at_20_max value: 35.710039616497085 - type: nauc_map_at_20_std value: 5.363961887475162 - type: nauc_map_at_3_diff1 value: 43.499223646579935 - type: nauc_map_at_3_max value: 33.872570039621564 - type: nauc_map_at_3_std value: 3.0787571843453008 - type: nauc_map_at_5_diff1 value: 43.28963642946521 - type: nauc_map_at_5_max value: 35.18327408279892 - type: nauc_map_at_5_std value: 4.516467154662473 - type: nauc_mrr_at_1000_diff1 value: 42.71279871641341 - type: nauc_mrr_at_1000_max value: 37.48825064817496 - type: nauc_mrr_at_1000_std value: 8.10015025024314 - type: nauc_mrr_at_100_diff1 value: 42.694777404773376 - type: nauc_mrr_at_100_max value: 37.476741768741086 - type: nauc_mrr_at_100_std value: 8.11525130417229 - type: nauc_mrr_at_10_diff1 value: 42.954194054560176 - type: nauc_mrr_at_10_max value: 37.606138578797506 - type: nauc_mrr_at_10_std value: 8.092519513302399 - type: nauc_mrr_at_1_diff1 value: 48.350790286038574 - type: nauc_mrr_at_1_max value: 33.97992759739641 - type: nauc_mrr_at_1_std value: 1.8332987018664093 - type: nauc_mrr_at_20_diff1 value: 42.664983701783044 - type: nauc_mrr_at_20_max value: 37.47450702110784 - type: nauc_mrr_at_20_std value: 8.001067634745462 - type: nauc_mrr_at_3_diff1 value: 42.921968602737955 - type: nauc_mrr_at_3_max value: 37.19599728791262 - type: nauc_mrr_at_3_std value: 7.4692697422507575 - type: nauc_mrr_at_5_diff1 value: 42.96028546491891 - type: nauc_mrr_at_5_max value: 37.688350071295915 - type: nauc_mrr_at_5_std value: 8.213017954012372 - type: nauc_ndcg_at_1000_diff1 value: 40.70763263942397 - type: nauc_ndcg_at_1000_max value: 37.87768319167602 - type: nauc_ndcg_at_1000_std value: 9.908807071686738 - type: nauc_ndcg_at_100_diff1 value: 39.97828438221707 - type: nauc_ndcg_at_100_max value: 37.7723393835996 - type: nauc_ndcg_at_100_std value: 10.666779466040097 - type: nauc_ndcg_at_10_diff1 value: 41.172233451172936 - type: nauc_ndcg_at_10_max value: 37.12252131573939 - type: nauc_ndcg_at_10_std value: 8.273798754436639 - type: nauc_ndcg_at_1_diff1 value: 48.350790286038574 - type: nauc_ndcg_at_1_max value: 33.97992759739641 - type: nauc_ndcg_at_1_std value: 1.8332987018664093 - type: nauc_ndcg_at_20_diff1 value: 40.33325895172716 - type: nauc_ndcg_at_20_max value: 37.36015594019951 - type: nauc_ndcg_at_20_std value: 8.818556108749302 - type: nauc_ndcg_at_3_diff1 value: 41.652701699747254 - type: nauc_ndcg_at_3_max value: 35.499109874223294 - type: nauc_ndcg_at_3_std value: 5.831784865606119 - type: nauc_ndcg_at_5_diff1 value: 41.856346892595475 - type: nauc_ndcg_at_5_max value: 36.940681835687194 - type: nauc_ndcg_at_5_std value: 7.507798515093516 - type: nauc_precision_at_1000_diff1 value: -2.4605367806784866 - type: nauc_precision_at_1000_max value: -0.3538142127162922 - type: nauc_precision_at_1000_std value: 8.369794961833236 - type: nauc_precision_at_100_diff1 value: -0.34954522096524704 - type: nauc_precision_at_100_max value: 13.159909603146458 - type: nauc_precision_at_100_std value: 19.425561514133996 - type: nauc_precision_at_10_diff1 value: 17.048304710148145 - type: nauc_precision_at_10_max value: 29.816041846806375 - type: nauc_precision_at_10_std value: 18.358893367243798 - type: nauc_precision_at_1_diff1 value: 48.350790286038574 - type: nauc_precision_at_1_max value: 33.97992759739641 - type: nauc_precision_at_1_std value: 1.8332987018664093 - type: nauc_precision_at_20_diff1 value: 10.450903599411344 - type: nauc_precision_at_20_max value: 25.228916373799127 - type: nauc_precision_at_20_std value: 18.46893569529936 - type: nauc_precision_at_3_diff1 value: 29.181236567048636 - type: nauc_precision_at_3_max value: 35.64918262500281 - type: nauc_precision_at_3_std value: 13.347538222514968 - type: nauc_precision_at_5_diff1 value: 23.693323840550345 - type: nauc_precision_at_5_max value: 33.972399735191225 - type: nauc_precision_at_5_std value: 17.107012760554618 - type: nauc_recall_at_1000_diff1 value: 20.297340483227945 - type: nauc_recall_at_1000_max value: 63.084305970127275 - type: nauc_recall_at_1000_std value: 63.04655000858784 - type: nauc_recall_at_100_diff1 value: 22.587332148979723 - type: nauc_recall_at_100_max value: 40.740968468024775 - type: nauc_recall_at_100_std value: 34.120423684507124 - type: nauc_recall_at_10_diff1 value: 33.361195948673675 - type: nauc_recall_at_10_max value: 37.1411402410262 - type: nauc_recall_at_10_std value: 13.475407196166259 - type: nauc_recall_at_1_diff1 value: 48.066312177855956 - type: nauc_recall_at_1_max value: 30.67745267941573 - type: nauc_recall_at_1_std value: -1.4170737991670943 - type: nauc_recall_at_20_diff1 value: 28.703982984383984 - type: nauc_recall_at_20_max value: 37.32929431193496 - type: nauc_recall_at_20_std value: 16.139135347989903 - type: nauc_recall_at_3_diff1 value: 36.53346179134789 - type: nauc_recall_at_3_max value: 34.11397914899309 - type: nauc_recall_at_3_std value: 7.19358019807132 - type: nauc_recall_at_5_diff1 value: 36.24058894947452 - type: nauc_recall_at_5_max value: 37.00990358651097 - type: nauc_recall_at_5_std value: 11.074645476821619 - type: ndcg_at_1 value: 35.160000000000004 - type: ndcg_at_10 value: 44.99 - type: ndcg_at_100 value: 50.661 - type: ndcg_at_1000 value: 52.599 - type: ndcg_at_20 value: 47.154 - type: ndcg_at_3 value: 39.843 - type: ndcg_at_5 value: 42.486000000000004 - type: precision_at_1 value: 35.160000000000004 - type: precision_at_10 value: 8.299 - type: precision_at_100 value: 1.2850000000000001 - type: precision_at_1000 value: 0.16199999999999998 - type: precision_at_20 value: 4.84 - type: precision_at_3 value: 19.178 - type: precision_at_5 value: 13.927 - type: recall_at_1 value: 28.089 - type: recall_at_10 value: 57.158 - type: recall_at_100 value: 81.461 - type: recall_at_1000 value: 94.46900000000001 - type: recall_at_20 value: 64.927 - type: recall_at_3 value: 42.775999999999996 - type: recall_at_5 value: 49.719 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: CQADupstackRetrieval is a combined dataset metrics: - type: main_score value: 44.989166666666655 - type: ndcg_at_10 value: 44.989166666666655 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: main_score value: 39.586 - type: map_at_1 value: 27.301 - type: map_at_10 value: 35.022 - type: map_at_100 value: 36.061 - type: map_at_1000 value: 36.146 - type: map_at_20 value: 35.608000000000004 - type: map_at_3 value: 32.978 - type: map_at_5 value: 33.994 - type: mrr_at_1 value: 30.67484662576687 - type: mrr_at_10 value: 38.1696124257474 - type: mrr_at_100 value: 38.99730898994137 - type: mrr_at_1000 value: 39.049871007408136 - type: mrr_at_20 value: 38.62424051396064 - type: mrr_at_3 value: 36.40081799591004 - type: mrr_at_5 value: 37.23670756646219 - type: nauc_map_at_1000_diff1 value: 50.4395097150819 - type: nauc_map_at_1000_max value: 42.36231476768413 - type: nauc_map_at_1000_std value: 1.0739414045485742 - type: nauc_map_at_100_diff1 value: 50.4253775421283 - type: nauc_map_at_100_max value: 42.34508969348633 - type: nauc_map_at_100_std value: 1.0590256535050135 - type: nauc_map_at_10_diff1 value: 50.74196619464362 - type: nauc_map_at_10_max value: 42.354326434590284 - type: nauc_map_at_10_std value: 0.6330167542705694 - type: nauc_map_at_1_diff1 value: 55.7404810490963 - type: nauc_map_at_1_max value: 40.7676941648045 - type: nauc_map_at_1_std value: -5.021772566610674 - type: nauc_map_at_20_diff1 value: 50.39792463598886 - type: nauc_map_at_20_max value: 42.25768760228577 - type: nauc_map_at_20_std value: 0.8979017700131807 - type: nauc_map_at_3_diff1 value: 51.53267996170815 - type: nauc_map_at_3_max value: 41.78801756883417 - type: nauc_map_at_3_std value: -0.6652383024396911 - type: nauc_map_at_5_diff1 value: 50.992783683271504 - type: nauc_map_at_5_max value: 41.8607977828188 - type: nauc_map_at_5_std value: 0.3484379897869807 - type: nauc_mrr_at_1000_diff1 value: 48.952907124445126 - type: nauc_mrr_at_1000_max value: 42.93563741482114 - type: nauc_mrr_at_1000_std value: 3.0791495753556424 - type: nauc_mrr_at_100_diff1 value: 48.941921107360805 - type: nauc_mrr_at_100_max value: 42.94419657374061 - type: nauc_mrr_at_100_std value: 3.075397087180154 - type: nauc_mrr_at_10_diff1 value: 49.098926306303056 - type: nauc_mrr_at_10_max value: 42.941857820499806 - type: nauc_mrr_at_10_std value: 2.8184474174054372 - type: nauc_mrr_at_1_diff1 value: 54.428109877009334 - type: nauc_mrr_at_1_max value: 42.50273386972492 - type: nauc_mrr_at_1_std value: -2.1811826216412187 - type: nauc_mrr_at_20_diff1 value: 48.82502192775839 - type: nauc_mrr_at_20_max value: 42.92227277257095 - type: nauc_mrr_at_20_std value: 2.975812634368533 - type: nauc_mrr_at_3_diff1 value: 49.440009227591176 - type: nauc_mrr_at_3_max value: 42.95503176290712 - type: nauc_mrr_at_3_std value: 2.2997128945013796 - type: nauc_mrr_at_5_diff1 value: 49.09846782701398 - type: nauc_mrr_at_5_max value: 42.51449168285772 - type: nauc_mrr_at_5_std value: 2.7785816484421297 - type: nauc_ndcg_at_1000_diff1 value: 48.14680758187888 - type: nauc_ndcg_at_1000_max value: 43.57465718500695 - type: nauc_ndcg_at_1000_std value: 5.287435676678261 - type: nauc_ndcg_at_100_diff1 value: 47.66081605743284 - type: nauc_ndcg_at_100_max value: 43.28156751251163 - type: nauc_ndcg_at_100_std value: 4.959626409663624 - type: nauc_ndcg_at_10_diff1 value: 48.25075619623878 - type: nauc_ndcg_at_10_max value: 43.00688660666578 - type: nauc_ndcg_at_10_std value: 3.2319193368891637 - type: nauc_ndcg_at_1_diff1 value: 54.428109877009334 - type: nauc_ndcg_at_1_max value: 42.50273386972492 - type: nauc_ndcg_at_1_std value: -2.1811826216412187 - type: nauc_ndcg_at_20_diff1 value: 47.1943098627403 - type: nauc_ndcg_at_20_max value: 42.86954491768707 - type: nauc_ndcg_at_20_std value: 4.08583080150737 - type: nauc_ndcg_at_3_diff1 value: 49.32681523192246 - type: nauc_ndcg_at_3_max value: 42.46898641470274 - type: nauc_ndcg_at_3_std value: 1.7416962407725236 - type: nauc_ndcg_at_5_diff1 value: 48.59647012439291 - type: nauc_ndcg_at_5_max value: 42.07098889846439 - type: nauc_ndcg_at_5_std value: 2.979621233356828 - type: nauc_precision_at_1000_diff1 value: -1.7366334161587105 - type: nauc_precision_at_1000_max value: 17.70969166396819 - type: nauc_precision_at_1000_std value: 17.50619975322144 - type: nauc_precision_at_100_diff1 value: 10.082579982582155 - type: nauc_precision_at_100_max value: 28.024893516091776 - type: nauc_precision_at_100_std value: 18.41413013357596 - type: nauc_precision_at_10_diff1 value: 28.796167732373657 - type: nauc_precision_at_10_max value: 40.37340024485382 - type: nauc_precision_at_10_std value: 13.718572711091733 - type: nauc_precision_at_1_diff1 value: 54.428109877009334 - type: nauc_precision_at_1_max value: 42.50273386972492 - type: nauc_precision_at_1_std value: -2.1811826216412187 - type: nauc_precision_at_20_diff1 value: 19.82691920771315 - type: nauc_precision_at_20_max value: 34.45075390159975 - type: nauc_precision_at_20_std value: 16.410812072348058 - type: nauc_precision_at_3_diff1 value: 40.85430254962678 - type: nauc_precision_at_3_max value: 43.63016056067074 - type: nauc_precision_at_3_std value: 9.322014634477581 - type: nauc_precision_at_5_diff1 value: 35.830272848975795 - type: nauc_precision_at_5_max value: 41.30047691620363 - type: nauc_precision_at_5_std value: 13.145693992266565 - type: nauc_recall_at_1000_diff1 value: 35.532000545890504 - type: nauc_recall_at_1000_max value: 50.714223194510325 - type: nauc_recall_at_1000_std value: 43.09037309139045 - type: nauc_recall_at_100_diff1 value: 35.11024488875192 - type: nauc_recall_at_100_max value: 43.0874566265193 - type: nauc_recall_at_100_std value: 19.70628521846854 - type: nauc_recall_at_10_diff1 value: 40.36203726741153 - type: nauc_recall_at_10_max value: 42.581482582576726 - type: nauc_recall_at_10_std value: 8.642553371022348 - type: nauc_recall_at_1_diff1 value: 55.7404810490963 - type: nauc_recall_at_1_max value: 40.7676941648045 - type: nauc_recall_at_1_std value: -5.021772566610674 - type: nauc_recall_at_20_diff1 value: 35.97348868186562 - type: nauc_recall_at_20_max value: 41.82695933305065 - type: nauc_recall_at_20_std value: 11.444957541593585 - type: nauc_recall_at_3_diff1 value: 44.20020470014979 - type: nauc_recall_at_3_max value: 40.84130855296979 - type: nauc_recall_at_3_std value: 5.004883338558809 - type: nauc_recall_at_5_diff1 value: 42.08756885472078 - type: nauc_recall_at_5_max value: 39.90323783606852 - type: nauc_recall_at_5_std value: 8.085182534171127 - type: ndcg_at_1 value: 30.675 - type: ndcg_at_10 value: 39.586 - type: ndcg_at_100 value: 44.737 - type: ndcg_at_1000 value: 46.863 - type: ndcg_at_20 value: 41.495 - type: ndcg_at_3 value: 35.8 - type: ndcg_at_5 value: 37.3 - type: precision_at_1 value: 30.675 - type: precision_at_10 value: 6.196 - type: precision_at_100 value: 0.9570000000000001 - type: precision_at_1000 value: 0.122 - type: precision_at_20 value: 3.6350000000000002 - type: precision_at_3 value: 15.337 - type: precision_at_5 value: 10.337 - type: recall_at_1 value: 27.301 - type: recall_at_10 value: 50.346999999999994 - type: recall_at_100 value: 74.459 - type: recall_at_1000 value: 90.018 - type: recall_at_20 value: 57.473 - type: recall_at_3 value: 39.672000000000004 - type: recall_at_5 value: 43.383 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: main_score value: 32.842 - type: map_at_1 value: 19.527 - type: map_at_10 value: 27.711999999999996 - type: map_at_100 value: 28.98 - type: map_at_1000 value: 29.108 - type: map_at_20 value: 28.407 - type: map_at_3 value: 25.023 - type: map_at_5 value: 26.528000000000002 - type: mrr_at_1 value: 23.675154852030282 - type: mrr_at_10 value: 31.810676323752784 - type: mrr_at_100 value: 32.788970614380716 - type: mrr_at_1000 value: 32.86028758975889 - type: mrr_at_20 value: 32.35935756676056 - type: mrr_at_3 value: 29.41615049323246 - type: mrr_at_5 value: 30.785730672172633 - type: nauc_map_at_1000_diff1 value: 35.597766688968015 - type: nauc_map_at_1000_max value: 26.295790183159845 - type: nauc_map_at_1000_std value: -0.04229904865958209 - type: nauc_map_at_100_diff1 value: 35.568782622469925 - type: nauc_map_at_100_max value: 26.27850795471227 - type: nauc_map_at_100_std value: -0.04944875782811099 - type: nauc_map_at_10_diff1 value: 35.63760937893694 - type: nauc_map_at_10_max value: 26.130094042028233 - type: nauc_map_at_10_std value: -0.6896882769027717 - type: nauc_map_at_1_diff1 value: 41.759098341890976 - type: nauc_map_at_1_max value: 23.918885427783326 - type: nauc_map_at_1_std value: -2.1383574897865074 - type: nauc_map_at_20_diff1 value: 35.55706530442612 - type: nauc_map_at_20_max value: 26.23339626569677 - type: nauc_map_at_20_std value: -0.162172033918129 - type: nauc_map_at_3_diff1 value: 37.22183376355153 - type: nauc_map_at_3_max value: 25.770512522122186 - type: nauc_map_at_3_std value: -1.3105892187778403 - type: nauc_map_at_5_diff1 value: 36.205913161663084 - type: nauc_map_at_5_max value: 25.953300641502064 - type: nauc_map_at_5_std value: -0.7987363137547906 - type: nauc_mrr_at_1000_diff1 value: 34.864016559617646 - type: nauc_mrr_at_1000_max value: 26.8689525348564 - type: nauc_mrr_at_1000_std value: -0.5839923973914446 - type: nauc_mrr_at_100_diff1 value: 34.83820469598538 - type: nauc_mrr_at_100_max value: 26.864669056231282 - type: nauc_mrr_at_100_std value: -0.5785645654158633 - type: nauc_mrr_at_10_diff1 value: 34.81868397381981 - type: nauc_mrr_at_10_max value: 26.79988560460627 - type: nauc_mrr_at_10_std value: -1.1113808365827318 - type: nauc_mrr_at_1_diff1 value: 40.0281507903504 - type: nauc_mrr_at_1_max value: 25.036735941806583 - type: nauc_mrr_at_1_std value: -2.508700799268523 - type: nauc_mrr_at_20_diff1 value: 34.81954537357966 - type: nauc_mrr_at_20_max value: 26.877673033315453 - type: nauc_mrr_at_20_std value: -0.6706028107452919 - type: nauc_mrr_at_3_diff1 value: 35.87313782549696 - type: nauc_mrr_at_3_max value: 26.776261693392335 - type: nauc_mrr_at_3_std value: -1.8010591328112908 - type: nauc_mrr_at_5_diff1 value: 35.31673912159536 - type: nauc_mrr_at_5_max value: 26.78720786106881 - type: nauc_mrr_at_5_std value: -1.3096326953900546 - type: nauc_ndcg_at_1000_diff1 value: 33.43105244339048 - type: nauc_ndcg_at_1000_max value: 27.52195065724684 - type: nauc_ndcg_at_1000_std value: 2.8376056562675744 - type: nauc_ndcg_at_100_diff1 value: 32.90916846420573 - type: nauc_ndcg_at_100_max value: 27.27161017736065 - type: nauc_ndcg_at_100_std value: 2.8703122625872126 - type: nauc_ndcg_at_10_diff1 value: 33.12714979317447 - type: nauc_ndcg_at_10_max value: 26.67762031747992 - type: nauc_ndcg_at_10_std value: -0.1341345572932233 - type: nauc_ndcg_at_1_diff1 value: 40.0281507903504 - type: nauc_ndcg_at_1_max value: 25.036735941806583 - type: nauc_ndcg_at_1_std value: -2.508700799268523 - type: nauc_ndcg_at_20_diff1 value: 32.891656138688546 - type: nauc_ndcg_at_20_max value: 26.991976404027163 - type: nauc_ndcg_at_20_std value: 1.6050741106677746 - type: nauc_ndcg_at_3_diff1 value: 35.576958713955484 - type: nauc_ndcg_at_3_max value: 26.41687745899445 - type: nauc_ndcg_at_3_std value: -1.5326687067002291 - type: nauc_ndcg_at_5_diff1 value: 34.27335619067276 - type: nauc_ndcg_at_5_max value: 26.479515412084208 - type: nauc_ndcg_at_5_std value: -0.5597648935666003 - type: nauc_precision_at_1000_diff1 value: -0.18660914306684007 - type: nauc_precision_at_1000_max value: 7.268255385799229 - type: nauc_precision_at_1000_std value: -0.1968875268478991 - type: nauc_precision_at_100_diff1 value: 7.386701205054449 - type: nauc_precision_at_100_max value: 15.477735603019607 - type: nauc_precision_at_100_std value: 4.753153414679307 - type: nauc_precision_at_10_diff1 value: 18.4668296945938 - type: nauc_precision_at_10_max value: 25.457144217779597 - type: nauc_precision_at_10_std value: 0.40165373733963605 - type: nauc_precision_at_1_diff1 value: 40.0281507903504 - type: nauc_precision_at_1_max value: 25.036735941806583 - type: nauc_precision_at_1_std value: -2.508700799268523 - type: nauc_precision_at_20_diff1 value: 14.751135844289335 - type: nauc_precision_at_20_max value: 22.763373329576293 - type: nauc_precision_at_20_std value: 4.360731801761864 - type: nauc_precision_at_3_diff1 value: 28.154753888265393 - type: nauc_precision_at_3_max value: 27.838427033527147 - type: nauc_precision_at_3_std value: -1.0042621266717804 - type: nauc_precision_at_5_diff1 value: 23.549026872711423 - type: nauc_precision_at_5_max value: 27.192214745385044 - type: nauc_precision_at_5_std value: 0.4455206110174471 - type: nauc_recall_at_1000_diff1 value: 17.905404210815632 - type: nauc_recall_at_1000_max value: 32.8674418535776 - type: nauc_recall_at_1000_std value: 35.187050415735435 - type: nauc_recall_at_100_diff1 value: 20.903609751984757 - type: nauc_recall_at_100_max value: 27.180306691518364 - type: nauc_recall_at_100_std value: 17.553030959393297 - type: nauc_recall_at_10_diff1 value: 25.615147693464387 - type: nauc_recall_at_10_max value: 25.97062699453565 - type: nauc_recall_at_10_std value: 2.2181702899826576 - type: nauc_recall_at_1_diff1 value: 41.759098341890976 - type: nauc_recall_at_1_max value: 23.918885427783326 - type: nauc_recall_at_1_std value: -2.1383574897865074 - type: nauc_recall_at_20_diff1 value: 23.922775940094386 - type: nauc_recall_at_20_max value: 26.384627814902785 - type: nauc_recall_at_20_std value: 7.944532403561578 - type: nauc_recall_at_3_diff1 value: 32.26543270634743 - type: nauc_recall_at_3_max value: 26.36357710828272 - type: nauc_recall_at_3_std value: -0.42723331708340706 - type: nauc_recall_at_5_diff1 value: 29.080464141763336 - type: nauc_recall_at_5_max value: 25.81238438303652 - type: nauc_recall_at_5_std value: 1.1649311168287726 - type: ndcg_at_1 value: 23.674999999999997 - type: ndcg_at_10 value: 32.842 - type: ndcg_at_100 value: 38.64 - type: ndcg_at_1000 value: 41.367 - type: ndcg_at_20 value: 35.032999999999994 - type: ndcg_at_3 value: 28.166000000000004 - type: ndcg_at_5 value: 30.407 - type: precision_at_1 value: 23.674999999999997 - type: precision_at_10 value: 6.005 - type: precision_at_100 value: 1.053 - type: precision_at_1000 value: 0.146 - type: precision_at_20 value: 3.6580000000000004 - type: precision_at_3 value: 13.352 - type: precision_at_5 value: 9.718 - type: recall_at_1 value: 19.527 - type: recall_at_10 value: 44.096999999999994 - type: recall_at_100 value: 69.962 - type: recall_at_1000 value: 89.035 - type: recall_at_20 value: 52.166000000000004 - type: recall_at_3 value: 30.946 - type: recall_at_5 value: 36.789 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: main_score value: 46.54 - type: map_at_1 value: 29.953999999999997 - type: map_at_10 value: 40.742 - type: map_at_100 value: 41.964 - type: map_at_1000 value: 42.059999999999995 - type: map_at_20 value: 41.426 - type: map_at_3 value: 37.378 - type: map_at_5 value: 39.267 - type: mrr_at_1 value: 34.701492537313435 - type: mrr_at_10 value: 44.29978085761664 - type: mrr_at_100 value: 45.205551401915486 - type: mrr_at_1000 value: 45.24735017384963 - type: mrr_at_20 value: 44.85338423755729 - type: mrr_at_3 value: 41.57338308457707 - type: mrr_at_5 value: 43.19185323383077 - type: nauc_map_at_1000_diff1 value: 48.45170522932164 - type: nauc_map_at_1000_max value: 31.544164363591204 - type: nauc_map_at_1000_std value: 0.8661088818146858 - type: nauc_map_at_100_diff1 value: 48.47347800061323 - type: nauc_map_at_100_max value: 31.568637596620313 - type: nauc_map_at_100_std value: 0.9252699336843858 - type: nauc_map_at_10_diff1 value: 48.64849891585432 - type: nauc_map_at_10_max value: 31.40371265579746 - type: nauc_map_at_10_std value: 0.7088016563713089 - type: nauc_map_at_1_diff1 value: 53.57918993108331 - type: nauc_map_at_1_max value: 31.392632653740993 - type: nauc_map_at_1_std value: -2.857306170463933 - type: nauc_map_at_20_diff1 value: 48.49084353023969 - type: nauc_map_at_20_max value: 31.470313174779374 - type: nauc_map_at_20_std value: 0.8950296035234309 - type: nauc_map_at_3_diff1 value: 49.273481161619806 - type: nauc_map_at_3_max value: 31.101471509782826 - type: nauc_map_at_3_std value: -0.886510096257905 - type: nauc_map_at_5_diff1 value: 48.85344288229106 - type: nauc_map_at_5_max value: 31.32633663238284 - type: nauc_map_at_5_std value: -0.44752909698881177 - type: nauc_mrr_at_1000_diff1 value: 46.27593166906613 - type: nauc_mrr_at_1000_max value: 31.637594372116336 - type: nauc_mrr_at_1000_std value: 0.8444917550670064 - type: nauc_mrr_at_100_diff1 value: 46.27161543033672 - type: nauc_mrr_at_100_max value: 31.64330655339695 - type: nauc_mrr_at_100_std value: 0.8717446416398773 - type: nauc_mrr_at_10_diff1 value: 46.100348481312864 - type: nauc_mrr_at_10_max value: 31.594271897882237 - type: nauc_mrr_at_10_std value: 0.8807168907688873 - type: nauc_mrr_at_1_diff1 value: 51.35163098909763 - type: nauc_mrr_at_1_max value: 31.99084441327899 - type: nauc_mrr_at_1_std value: -2.688594880742662 - type: nauc_mrr_at_20_diff1 value: 46.18178546174727 - type: nauc_mrr_at_20_max value: 31.639111674119448 - type: nauc_mrr_at_20_std value: 0.9855008641374622 - type: nauc_mrr_at_3_diff1 value: 46.307484835305864 - type: nauc_mrr_at_3_max value: 31.35563850804847 - type: nauc_mrr_at_3_std value: -0.3419536587707561 - type: nauc_mrr_at_5_diff1 value: 46.17646418781234 - type: nauc_mrr_at_5_max value: 31.313474270239833 - type: nauc_mrr_at_5_std value: -0.08656550526568331 - type: nauc_ndcg_at_1000_diff1 value: 46.12095795101613 - type: nauc_ndcg_at_1000_max value: 31.989083597726314 - type: nauc_ndcg_at_1000_std value: 3.2965704707660763 - type: nauc_ndcg_at_100_diff1 value: 46.05376249841318 - type: nauc_ndcg_at_100_max value: 32.39195988574972 - type: nauc_ndcg_at_100_std value: 4.518018135593347 - type: nauc_ndcg_at_10_diff1 value: 46.133631183744875 - type: nauc_ndcg_at_10_max value: 31.45358876172339 - type: nauc_ndcg_at_10_std value: 3.4254370918871055 - type: nauc_ndcg_at_1_diff1 value: 51.35163098909763 - type: nauc_ndcg_at_1_max value: 31.99084441327899 - type: nauc_ndcg_at_1_std value: -2.688594880742662 - type: nauc_ndcg_at_20_diff1 value: 45.94584949766954 - type: nauc_ndcg_at_20_max value: 31.689777515111295 - type: nauc_ndcg_at_20_std value: 4.189082428922442 - type: nauc_ndcg_at_3_diff1 value: 46.5057835389752 - type: nauc_ndcg_at_3_max value: 30.941407592082047 - type: nauc_ndcg_at_3_std value: -0.042473944857831535 - type: nauc_ndcg_at_5_diff1 value: 46.369027395136136 - type: nauc_ndcg_at_5_max value: 31.057841776505352 - type: nauc_ndcg_at_5_std value: 0.6878993420489522 - type: nauc_precision_at_1000_diff1 value: -17.30759714093202 - type: nauc_precision_at_1000_max value: -4.441155558458858 - type: nauc_precision_at_1000_std value: 1.5537300718220326 - type: nauc_precision_at_100_diff1 value: -7.18920438222021 - type: nauc_precision_at_100_max value: 8.017878121399253 - type: nauc_precision_at_100_std value: 11.357132919349102 - type: nauc_precision_at_10_diff1 value: 15.202451884794076 - type: nauc_precision_at_10_max value: 19.077295902881417 - type: nauc_precision_at_10_std value: 9.885526867355805 - type: nauc_precision_at_1_diff1 value: 51.35163098909763 - type: nauc_precision_at_1_max value: 31.99084441327899 - type: nauc_precision_at_1_std value: -2.688594880742662 - type: nauc_precision_at_20_diff1 value: 6.827461091494899 - type: nauc_precision_at_20_max value: 15.27268633497114 - type: nauc_precision_at_20_std value: 11.515826649647384 - type: nauc_precision_at_3_diff1 value: 31.043021807472027 - type: nauc_precision_at_3_max value: 26.22457157531548 - type: nauc_precision_at_3_std value: 1.788215968301994 - type: nauc_precision_at_5_diff1 value: 25.030185818513235 - type: nauc_precision_at_5_max value: 23.680129160901537 - type: nauc_precision_at_5_std value: 4.303018899688115 - type: nauc_recall_at_1000_diff1 value: 28.68826642607512 - type: nauc_recall_at_1000_max value: 42.33849804103852 - type: nauc_recall_at_1000_std value: 42.67413575876864 - type: nauc_recall_at_100_diff1 value: 36.51494878715 - type: nauc_recall_at_100_max value: 37.4764995034434 - type: nauc_recall_at_100_std value: 28.295671266661017 - type: nauc_recall_at_10_diff1 value: 39.416721111463524 - type: nauc_recall_at_10_max value: 29.95985608454179 - type: nauc_recall_at_10_std value: 12.423335839786201 - type: nauc_recall_at_1_diff1 value: 53.57918993108331 - type: nauc_recall_at_1_max value: 31.392632653740993 - type: nauc_recall_at_1_std value: -2.857306170463933 - type: nauc_recall_at_20_diff1 value: 38.228803480194046 - type: nauc_recall_at_20_max value: 30.87261362975955 - type: nauc_recall_at_20_std value: 16.977113091834095 - type: nauc_recall_at_3_diff1 value: 43.154348566653155 - type: nauc_recall_at_3_max value: 29.54536633744803 - type: nauc_recall_at_3_std value: 2.02842672250621 - type: nauc_recall_at_5_diff1 value: 41.00436246072242 - type: nauc_recall_at_5_max value: 29.413569555348023 - type: nauc_recall_at_5_std value: 3.845214021958289 - type: ndcg_at_1 value: 34.701 - type: ndcg_at_10 value: 46.54 - type: ndcg_at_100 value: 51.754999999999995 - type: ndcg_at_1000 value: 53.71 - type: ndcg_at_20 value: 48.679 - type: ndcg_at_3 value: 40.892 - type: ndcg_at_5 value: 43.595 - type: precision_at_1 value: 34.701 - type: precision_at_10 value: 8.004 - type: precision_at_100 value: 1.185 - type: precision_at_1000 value: 0.145 - type: precision_at_20 value: 4.632 - type: precision_at_3 value: 18.719 - type: precision_at_5 value: 13.245999999999999 - type: recall_at_1 value: 29.953999999999997 - type: recall_at_10 value: 60.246 - type: recall_at_100 value: 82.128 - type: recall_at_1000 value: 95.622 - type: recall_at_20 value: 67.756 - type: recall_at_3 value: 45.096000000000004 - type: recall_at_5 value: 51.9 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: main_score value: 44.718999999999994 - type: map_at_1 value: 28.383999999999997 - type: map_at_10 value: 38.422 - type: map_at_100 value: 40.058 - type: map_at_1000 value: 40.276 - type: map_at_20 value: 39.301 - type: map_at_3 value: 35.205 - type: map_at_5 value: 36.803999999999995 - type: mrr_at_1 value: 33.59683794466403 - type: mrr_at_10 value: 42.837536859275986 - type: mrr_at_100 value: 43.7501703455481 - type: mrr_at_1000 value: 43.79258407771123 - type: mrr_at_20 value: 43.36044710445095 - type: mrr_at_3 value: 40.15151515151516 - type: mrr_at_5 value: 41.74242424242425 - type: nauc_map_at_1000_diff1 value: 47.934826596875304 - type: nauc_map_at_1000_max value: 32.39759438116062 - type: nauc_map_at_1000_std value: 0.9489007346763054 - type: nauc_map_at_100_diff1 value: 47.94844822157888 - type: nauc_map_at_100_max value: 32.51485845519537 - type: nauc_map_at_100_std value: 0.8094339925545622 - type: nauc_map_at_10_diff1 value: 48.251456404874645 - type: nauc_map_at_10_max value: 31.412906399154245 - type: nauc_map_at_10_std value: -0.7024825737369933 - type: nauc_map_at_1_diff1 value: 55.81906101970174 - type: nauc_map_at_1_max value: 31.811715334193796 - type: nauc_map_at_1_std value: -6.17056859281584 - type: nauc_map_at_20_diff1 value: 47.80902650237369 - type: nauc_map_at_20_max value: 32.22465403023091 - type: nauc_map_at_20_std value: 0.20706526946705656 - type: nauc_map_at_3_diff1 value: 49.97333984346632 - type: nauc_map_at_3_max value: 31.58195498640799 - type: nauc_map_at_3_std value: -2.577539707727459 - type: nauc_map_at_5_diff1 value: 49.40005767350608 - type: nauc_map_at_5_max value: 30.998435600377434 - type: nauc_map_at_5_std value: -2.1231771618690307 - type: nauc_mrr_at_1000_diff1 value: 46.86811371969663 - type: nauc_mrr_at_1000_max value: 31.25147138171024 - type: nauc_mrr_at_1000_std value: 1.9954422477585918 - type: nauc_mrr_at_100_diff1 value: 46.855870345882195 - type: nauc_mrr_at_100_max value: 31.263524035665966 - type: nauc_mrr_at_100_std value: 2.0160751193806568 - type: nauc_mrr_at_10_diff1 value: 46.93294772825783 - type: nauc_mrr_at_10_max value: 30.927002048701663 - type: nauc_mrr_at_10_std value: 1.6538220080908224 - type: nauc_mrr_at_1_diff1 value: 52.416386548395664 - type: nauc_mrr_at_1_max value: 32.28582003787206 - type: nauc_mrr_at_1_std value: -2.154991145714492 - type: nauc_mrr_at_20_diff1 value: 46.71796185319694 - type: nauc_mrr_at_20_max value: 31.16219902794994 - type: nauc_mrr_at_20_std value: 1.8590646572728409 - type: nauc_mrr_at_3_diff1 value: 47.697100317669914 - type: nauc_mrr_at_3_max value: 30.821806030159383 - type: nauc_mrr_at_3_std value: 1.1927626358099177 - type: nauc_mrr_at_5_diff1 value: 47.065272061365704 - type: nauc_mrr_at_5_max value: 30.299230962805023 - type: nauc_mrr_at_5_std value: 1.3225842862629529 - type: nauc_ndcg_at_1000_diff1 value: 45.20612583136058 - type: nauc_ndcg_at_1000_max value: 33.51931869947315 - type: nauc_ndcg_at_1000_std value: 4.923707509620363 - type: nauc_ndcg_at_100_diff1 value: 44.76206243393775 - type: nauc_ndcg_at_100_max value: 33.57771606755598 - type: nauc_ndcg_at_100_std value: 5.30915563331338 - type: nauc_ndcg_at_10_diff1 value: 45.12714032463827 - type: nauc_ndcg_at_10_max value: 30.351909495610492 - type: nauc_ndcg_at_10_std value: 2.3972947289996873 - type: nauc_ndcg_at_1_diff1 value: 52.416386548395664 - type: nauc_ndcg_at_1_max value: 32.28582003787206 - type: nauc_ndcg_at_1_std value: -2.154991145714492 - type: nauc_ndcg_at_20_diff1 value: 44.20281844000005 - type: nauc_ndcg_at_20_max value: 32.14112739396226 - type: nauc_ndcg_at_20_std value: 3.3971385462591916 - type: nauc_ndcg_at_3_diff1 value: 47.0633767031858 - type: nauc_ndcg_at_3_max value: 31.032896053733435 - type: nauc_ndcg_at_3_std value: 0.6827544906310201 - type: nauc_ndcg_at_5_diff1 value: 46.735352294106484 - type: nauc_ndcg_at_5_max value: 29.784992270528544 - type: nauc_ndcg_at_5_std value: 0.8685943819516141 - type: nauc_precision_at_1000_diff1 value: -12.223330179860852 - type: nauc_precision_at_1000_max value: -9.266492213777273 - type: nauc_precision_at_1000_std value: 19.0569899587788 - type: nauc_precision_at_100_diff1 value: -5.803751085072067 - type: nauc_precision_at_100_max value: 3.448932057044294 - type: nauc_precision_at_100_std value: 23.470863527030627 - type: nauc_precision_at_10_diff1 value: 8.887357341361907 - type: nauc_precision_at_10_max value: 18.67165390928126 - type: nauc_precision_at_10_std value: 19.158543337955404 - type: nauc_precision_at_1_diff1 value: 52.416386548395664 - type: nauc_precision_at_1_max value: 32.28582003787206 - type: nauc_precision_at_1_std value: -2.154991145714492 - type: nauc_precision_at_20_diff1 value: 0.942496138409553 - type: nauc_precision_at_20_max value: 18.86957127610774 - type: nauc_precision_at_20_std value: 24.075503903246496 - type: nauc_precision_at_3_diff1 value: 28.15363877307106 - type: nauc_precision_at_3_max value: 27.064928137991824 - type: nauc_precision_at_3_std value: 8.632807104504753 - type: nauc_precision_at_5_diff1 value: 20.805862332497973 - type: nauc_precision_at_5_max value: 21.420201475758404 - type: nauc_precision_at_5_std value: 12.380239645425714 - type: nauc_recall_at_1000_diff1 value: 18.478341468055547 - type: nauc_recall_at_1000_max value: 56.293560115074506 - type: nauc_recall_at_1000_std value: 64.31607185065428 - type: nauc_recall_at_100_diff1 value: 26.737267337771886 - type: nauc_recall_at_100_max value: 38.011889141496326 - type: nauc_recall_at_100_std value: 30.44904690114732 - type: nauc_recall_at_10_diff1 value: 35.22772732735716 - type: nauc_recall_at_10_max value: 26.000054115159486 - type: nauc_recall_at_10_std value: 5.174264254271206 - type: nauc_recall_at_1_diff1 value: 55.81906101970174 - type: nauc_recall_at_1_max value: 31.811715334193796 - type: nauc_recall_at_1_std value: -6.17056859281584 - type: nauc_recall_at_20_diff1 value: 30.48493302415641 - type: nauc_recall_at_20_max value: 31.05487040370753 - type: nauc_recall_at_20_std value: 10.319948318834136 - type: nauc_recall_at_3_diff1 value: 43.12289512340243 - type: nauc_recall_at_3_max value: 28.176279771026135 - type: nauc_recall_at_3_std value: -0.1775154523381921 - type: nauc_recall_at_5_diff1 value: 40.9934933741234 - type: nauc_recall_at_5_max value: 25.569156290584733 - type: nauc_recall_at_5_std value: 0.21166696686855038 - type: ndcg_at_1 value: 33.597 - type: ndcg_at_10 value: 44.718999999999994 - type: ndcg_at_100 value: 50.324000000000005 - type: ndcg_at_1000 value: 52.468 - type: ndcg_at_20 value: 46.822 - type: ndcg_at_3 value: 39.558 - type: ndcg_at_5 value: 41.827999999999996 - type: precision_at_1 value: 33.597 - type: precision_at_10 value: 8.735 - type: precision_at_100 value: 1.6420000000000001 - type: precision_at_1000 value: 0.246 - type: precision_at_20 value: 5.375 - type: precision_at_3 value: 18.511 - type: precision_at_5 value: 13.399 - type: recall_at_1 value: 28.383999999999997 - type: recall_at_10 value: 56.425000000000004 - type: recall_at_100 value: 82.01899999999999 - type: recall_at_1000 value: 95.285 - type: recall_at_20 value: 64.615 - type: recall_at_3 value: 42.171 - type: recall_at_5 value: 48.296 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: main_score value: 38.269999999999996 - type: map_at_1 value: 25.324999999999996 - type: map_at_10 value: 33.263 - type: map_at_100 value: 34.304 - type: map_at_1000 value: 34.394000000000005 - type: map_at_20 value: 33.827 - type: map_at_3 value: 30.259999999999998 - type: map_at_5 value: 31.832 - type: mrr_at_1 value: 27.171903881700555 - type: mrr_at_10 value: 35.334991051257234 - type: mrr_at_100 value: 36.251283465952355 - type: mrr_at_1000 value: 36.316236092511055 - type: mrr_at_20 value: 35.87141909945257 - type: mrr_at_3 value: 32.71719038817007 - type: mrr_at_5 value: 34.19593345656194 - type: nauc_map_at_1000_diff1 value: 39.614836211522714 - type: nauc_map_at_1000_max value: 22.019768626310192 - type: nauc_map_at_1000_std value: -1.5238708712112499 - type: nauc_map_at_100_diff1 value: 39.63008548572307 - type: nauc_map_at_100_max value: 22.044756063752345 - type: nauc_map_at_100_std value: -1.4869190221494792 - type: nauc_map_at_10_diff1 value: 39.73025012395569 - type: nauc_map_at_10_max value: 22.117710178892107 - type: nauc_map_at_10_std value: -2.5129984871932973 - type: nauc_map_at_1_diff1 value: 45.015617718902654 - type: nauc_map_at_1_max value: 19.313800263189638 - type: nauc_map_at_1_std value: -4.763931386681675 - type: nauc_map_at_20_diff1 value: 39.53678019013766 - type: nauc_map_at_20_max value: 21.880316719428258 - type: nauc_map_at_20_std value: -1.882003994523355 - type: nauc_map_at_3_diff1 value: 40.37307665298228 - type: nauc_map_at_3_max value: 20.851976075322533 - type: nauc_map_at_3_std value: -2.429569082966531 - type: nauc_map_at_5_diff1 value: 39.763015635086 - type: nauc_map_at_5_max value: 22.010102196900725 - type: nauc_map_at_5_std value: -2.654896415670943 - type: nauc_mrr_at_1000_diff1 value: 39.74071733680025 - type: nauc_mrr_at_1000_max value: 21.67309640681989 - type: nauc_mrr_at_1000_std value: -1.4003373135477462 - type: nauc_mrr_at_100_diff1 value: 39.730614151966485 - type: nauc_mrr_at_100_max value: 21.678390048971767 - type: nauc_mrr_at_100_std value: -1.3655362623563931 - type: nauc_mrr_at_10_diff1 value: 39.7900031013241 - type: nauc_mrr_at_10_max value: 21.73643491725051 - type: nauc_mrr_at_10_std value: -2.1175389838696312 - type: nauc_mrr_at_1_diff1 value: 46.165736140679776 - type: nauc_mrr_at_1_max value: 20.071083446822147 - type: nauc_mrr_at_1_std value: -5.018909100858311 - type: nauc_mrr_at_20_diff1 value: 39.6371295762885 - type: nauc_mrr_at_20_max value: 21.659557440270973 - type: nauc_mrr_at_20_std value: -1.4909603958341686 - type: nauc_mrr_at_3_diff1 value: 40.351150322758876 - type: nauc_mrr_at_3_max value: 20.83706249041544 - type: nauc_mrr_at_3_std value: -1.956027373253151 - type: nauc_mrr_at_5_diff1 value: 39.57759107791911 - type: nauc_mrr_at_5_max value: 21.79552045204151 - type: nauc_mrr_at_5_std value: -2.1507013120951126 - type: nauc_ndcg_at_1000_diff1 value: 37.717619356839016 - type: nauc_ndcg_at_1000_max value: 22.545375504379805 - type: nauc_ndcg_at_1000_std value: 1.682348628141016 - type: nauc_ndcg_at_100_diff1 value: 37.656027803682626 - type: nauc_ndcg_at_100_max value: 22.49278246383637 - type: nauc_ndcg_at_100_std value: 2.6818118152357773 - type: nauc_ndcg_at_10_diff1 value: 37.834954205539766 - type: nauc_ndcg_at_10_max value: 22.655839885558443 - type: nauc_ndcg_at_10_std value: -1.97159619786231 - type: nauc_ndcg_at_1_diff1 value: 46.165736140679776 - type: nauc_ndcg_at_1_max value: 20.071083446822147 - type: nauc_ndcg_at_1_std value: -5.018909100858311 - type: nauc_ndcg_at_20_diff1 value: 37.171914857454304 - type: nauc_ndcg_at_20_max value: 21.858904801745897 - type: nauc_ndcg_at_20_std value: 0.3809854859496657 - type: nauc_ndcg_at_3_diff1 value: 38.4460623883955 - type: nauc_ndcg_at_3_max value: 20.95244159463402 - type: nauc_ndcg_at_3_std value: -1.2685011660086651 - type: nauc_ndcg_at_5_diff1 value: 37.48831054573054 - type: nauc_ndcg_at_5_max value: 22.625921624640526 - type: nauc_ndcg_at_5_std value: -2.049221092724925 - type: nauc_precision_at_1000_diff1 value: -19.120500628263994 - type: nauc_precision_at_1000_max value: -6.650707109047473 - type: nauc_precision_at_1000_std value: 15.71193179253002 - type: nauc_precision_at_100_diff1 value: 6.254606806876069 - type: nauc_precision_at_100_max value: 14.601826922181823 - type: nauc_precision_at_100_std value: 28.38299592246453 - type: nauc_precision_at_10_diff1 value: 22.978614338670816 - type: nauc_precision_at_10_max value: 23.04146766323557 - type: nauc_precision_at_10_std value: 6.226264308612577 - type: nauc_precision_at_1_diff1 value: 46.165736140679776 - type: nauc_precision_at_1_max value: 20.071083446822147 - type: nauc_precision_at_1_std value: -5.018909100858311 - type: nauc_precision_at_20_diff1 value: 17.681032853225602 - type: nauc_precision_at_20_max value: 18.66680304585122 - type: nauc_precision_at_20_std value: 15.34896796713905 - type: nauc_precision_at_3_diff1 value: 31.359396694559194 - type: nauc_precision_at_3_max value: 22.279263308973274 - type: nauc_precision_at_3_std value: 3.6302537979529035 - type: nauc_precision_at_5_diff1 value: 26.32257879892933 - type: nauc_precision_at_5_max value: 25.402524493181026 - type: nauc_precision_at_5_std value: 4.731450603747359 - type: nauc_recall_at_1000_diff1 value: 23.562925244967875 - type: nauc_recall_at_1000_max value: 30.737399333586797 - type: nauc_recall_at_1000_std value: 34.19418935008663 - type: nauc_recall_at_100_diff1 value: 28.703574970574824 - type: nauc_recall_at_100_max value: 22.448663600170278 - type: nauc_recall_at_100_std value: 24.53297349042035 - type: nauc_recall_at_10_diff1 value: 31.73603907811882 - type: nauc_recall_at_10_max value: 23.453183748640765 - type: nauc_recall_at_10_std value: -1.8279054407176274 - type: nauc_recall_at_1_diff1 value: 45.015617718902654 - type: nauc_recall_at_1_max value: 19.313800263189638 - type: nauc_recall_at_1_std value: -4.763931386681675 - type: nauc_recall_at_20_diff1 value: 28.74169081866096 - type: nauc_recall_at_20_max value: 20.035509169577324 - type: nauc_recall_at_20_std value: 7.371615811227748 - type: nauc_recall_at_3_diff1 value: 34.09890157333362 - type: nauc_recall_at_3_max value: 20.46565842748346 - type: nauc_recall_at_3_std value: -0.4337283067447526 - type: nauc_recall_at_5_diff1 value: 30.974580787842402 - type: nauc_recall_at_5_max value: 23.76379349487105 - type: nauc_recall_at_5_std value: -1.8407515927979428 - type: ndcg_at_1 value: 27.172 - type: ndcg_at_10 value: 38.269999999999996 - type: ndcg_at_100 value: 43.338 - type: ndcg_at_1000 value: 45.594 - type: ndcg_at_20 value: 40.256 - type: ndcg_at_3 value: 32.673 - type: ndcg_at_5 value: 35.224 - type: precision_at_1 value: 27.172 - type: precision_at_10 value: 6.063000000000001 - type: precision_at_100 value: 0.9259999999999999 - type: precision_at_1000 value: 0.123 - type: precision_at_20 value: 3.5029999999999997 - type: precision_at_3 value: 13.74 - type: precision_at_5 value: 9.797 - type: recall_at_1 value: 25.324999999999996 - type: recall_at_10 value: 51.634 - type: recall_at_100 value: 74.687 - type: recall_at_1000 value: 91.412 - type: recall_at_20 value: 59.207 - type: recall_at_3 value: 36.678 - type: recall_at_5 value: 42.742999999999995 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: main_score value: 36.853 - type: map_at_1 value: 15.371000000000002 - type: map_at_10 value: 27.122 - type: map_at_100 value: 29.226000000000003 - type: map_at_1000 value: 29.409999999999997 - type: map_at_20 value: 28.274 - type: map_at_3 value: 22.431 - type: map_at_5 value: 24.877 - type: mrr_at_1 value: 34.13680781758958 - type: mrr_at_10 value: 47.265911793599145 - type: mrr_at_100 value: 48.028369995763846 - type: mrr_at_1000 value: 48.05317022537804 - type: mrr_at_20 value: 47.75785292259516 - type: mrr_at_3 value: 43.887079261672156 - type: mrr_at_5 value: 45.906623235613544 - type: nauc_map_at_1000_diff1 value: 24.949211292921547 - type: nauc_map_at_1000_max value: 38.69844483304584 - type: nauc_map_at_1000_std value: 18.336359440844753 - type: nauc_map_at_100_diff1 value: 24.8951732982492 - type: nauc_map_at_100_max value: 38.65049158594052 - type: nauc_map_at_100_std value: 18.28935278388095 - type: nauc_map_at_10_diff1 value: 24.606032216798273 - type: nauc_map_at_10_max value: 38.00608351559887 - type: nauc_map_at_10_std value: 16.61261615173358 - type: nauc_map_at_1_diff1 value: 30.83614944448221 - type: nauc_map_at_1_max value: 33.757528532809 - type: nauc_map_at_1_std value: 8.880622713261126 - type: nauc_map_at_20_diff1 value: 24.75491310922017 - type: nauc_map_at_20_max value: 38.353679076398834 - type: nauc_map_at_20_std value: 17.58637493443171 - type: nauc_map_at_3_diff1 value: 25.563085273287083 - type: nauc_map_at_3_max value: 35.14515679047155 - type: nauc_map_at_3_std value: 11.75594869817732 - type: nauc_map_at_5_diff1 value: 24.815807517691614 - type: nauc_map_at_5_max value: 36.25905426665983 - type: nauc_map_at_5_std value: 14.516391726180697 - type: nauc_mrr_at_1000_diff1 value: 27.948233427121274 - type: nauc_mrr_at_1000_max value: 37.5893640945859 - type: nauc_mrr_at_1000_std value: 19.588442449629763 - type: nauc_mrr_at_100_diff1 value: 27.947962345854037 - type: nauc_mrr_at_100_max value: 37.60375479481945 - type: nauc_mrr_at_100_std value: 19.614791576283793 - type: nauc_mrr_at_10_diff1 value: 27.882311310262136 - type: nauc_mrr_at_10_max value: 37.58580968074054 - type: nauc_mrr_at_10_std value: 19.49875186170201 - type: nauc_mrr_at_1_diff1 value: 28.017413073648477 - type: nauc_mrr_at_1_max value: 32.87710191514022 - type: nauc_mrr_at_1_std value: 14.04889142608459 - type: nauc_mrr_at_20_diff1 value: 27.89129925771968 - type: nauc_mrr_at_20_max value: 37.6142863106945 - type: nauc_mrr_at_20_std value: 19.645390143394163 - type: nauc_mrr_at_3_diff1 value: 27.99609559690795 - type: nauc_mrr_at_3_max value: 36.87362332456197 - type: nauc_mrr_at_3_std value: 18.598416821915333 - type: nauc_mrr_at_5_diff1 value: 27.68306089976716 - type: nauc_mrr_at_5_max value: 37.12264485659723 - type: nauc_mrr_at_5_std value: 19.18875305730564 - type: nauc_ndcg_at_1000_diff1 value: 25.736779186453777 - type: nauc_ndcg_at_1000_max value: 41.93281139456004 - type: nauc_ndcg_at_1000_std value: 25.179038422659993 - type: nauc_ndcg_at_100_diff1 value: 25.144796623848322 - type: nauc_ndcg_at_100_max value: 41.72820916876173 - type: nauc_ndcg_at_100_std value: 25.12851686850754 - type: nauc_ndcg_at_10_diff1 value: 24.321249191226652 - type: nauc_ndcg_at_10_max value: 40.23711916935706 - type: nauc_ndcg_at_10_std value: 20.89060972334557 - type: nauc_ndcg_at_1_diff1 value: 28.017413073648477 - type: nauc_ndcg_at_1_max value: 32.87710191514022 - type: nauc_ndcg_at_1_std value: 14.04889142608459 - type: nauc_ndcg_at_20_diff1 value: 24.5090484877482 - type: nauc_ndcg_at_20_max value: 40.752854032983606 - type: nauc_ndcg_at_20_std value: 22.70331074781384 - type: nauc_ndcg_at_3_diff1 value: 25.13499057756147 - type: nauc_ndcg_at_3_max value: 35.8325682137567 - type: nauc_ndcg_at_3_std value: 15.23768392706637 - type: nauc_ndcg_at_5_diff1 value: 24.614105695451116 - type: nauc_ndcg_at_5_max value: 37.68089587624492 - type: nauc_ndcg_at_5_std value: 17.946406099261708 - type: nauc_precision_at_1000_diff1 value: -2.022340544774227 - type: nauc_precision_at_1000_max value: 6.070578645067797 - type: nauc_precision_at_1000_std value: 22.15132728777549 - type: nauc_precision_at_100_diff1 value: 4.544144474504255 - type: nauc_precision_at_100_max value: 19.780392159848574 - type: nauc_precision_at_100_std value: 31.107111186002438 - type: nauc_precision_at_10_diff1 value: 10.107015022955848 - type: nauc_precision_at_10_max value: 30.779709099060465 - type: nauc_precision_at_10_std value: 27.324148451668602 - type: nauc_precision_at_1_diff1 value: 28.017413073648477 - type: nauc_precision_at_1_max value: 32.87710191514022 - type: nauc_precision_at_1_std value: 14.04889142608459 - type: nauc_precision_at_20_diff1 value: 8.270881053079405 - type: nauc_precision_at_20_max value: 27.26753946078481 - type: nauc_precision_at_20_std value: 29.156725822074204 - type: nauc_precision_at_3_diff1 value: 17.82468940497632 - type: nauc_precision_at_3_max value: 31.490021174215155 - type: nauc_precision_at_3_std value: 18.73818985054394 - type: nauc_precision_at_5_diff1 value: 13.24803141673961 - type: nauc_precision_at_5_max value: 29.94926240784298 - type: nauc_precision_at_5_std value: 23.2940906142919 - type: nauc_recall_at_1000_diff1 value: 19.09850333580471 - type: nauc_recall_at_1000_max value: 46.026306142840596 - type: nauc_recall_at_1000_std value: 46.50391519568263 - type: nauc_recall_at_100_diff1 value: 16.739384224869738 - type: nauc_recall_at_100_max value: 40.68987136431252 - type: nauc_recall_at_100_std value: 36.01609750485591 - type: nauc_recall_at_10_diff1 value: 17.51796617221814 - type: nauc_recall_at_10_max value: 39.47453129444401 - type: nauc_recall_at_10_std value: 23.79239002974899 - type: nauc_recall_at_1_diff1 value: 30.83614944448221 - type: nauc_recall_at_1_max value: 33.757528532809 - type: nauc_recall_at_1_std value: 8.880622713261126 - type: nauc_recall_at_20_diff1 value: 16.978668307251652 - type: nauc_recall_at_20_max value: 39.09115357303713 - type: nauc_recall_at_20_std value: 27.278668534187524 - type: nauc_recall_at_3_diff1 value: 22.55937738994021 - type: nauc_recall_at_3_max value: 36.25055459395638 - type: nauc_recall_at_3_std value: 14.828905168761247 - type: nauc_recall_at_5_diff1 value: 19.32656748627199 - type: nauc_recall_at_5_max value: 36.28836228620816 - type: nauc_recall_at_5_std value: 19.264352933914278 - type: ndcg_at_1 value: 34.137 - type: ndcg_at_10 value: 36.853 - type: ndcg_at_100 value: 44.279 - type: ndcg_at_1000 value: 47.336 - type: ndcg_at_20 value: 39.815 - type: ndcg_at_3 value: 30.253999999999998 - type: ndcg_at_5 value: 32.649 - type: precision_at_1 value: 34.137 - type: precision_at_10 value: 11.655 - type: precision_at_100 value: 1.9619999999999997 - type: precision_at_1000 value: 0.254 - type: precision_at_20 value: 7.1209999999999996 - type: precision_at_3 value: 22.823 - type: precision_at_5 value: 17.655 - type: recall_at_1 value: 15.371000000000002 - type: recall_at_10 value: 43.718 - type: recall_at_100 value: 68.81 - type: recall_at_1000 value: 85.69600000000001 - type: recall_at_20 value: 51.94 - type: recall_at_3 value: 27.694000000000003 - type: recall_at_5 value: 34.469 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: main_score value: 45.553 - type: map_at_1 value: 9.168999999999999 - type: map_at_10 value: 22.154 - type: map_at_100 value: 32.174 - type: map_at_1000 value: 33.974 - type: map_at_20 value: 25.899 - type: map_at_3 value: 15.275 - type: map_at_5 value: 18.291 - type: mrr_at_1 value: 70.75 - type: mrr_at_10 value: 78.39662698412697 - type: mrr_at_100 value: 78.56221458977012 - type: mrr_at_1000 value: 78.56669970642338 - type: mrr_at_20 value: 78.49688805346696 - type: mrr_at_3 value: 76.33333333333333 - type: mrr_at_5 value: 77.70833333333333 - type: nauc_map_at_1000_diff1 value: 18.465085922071346 - type: nauc_map_at_1000_max value: 24.29804638788498 - type: nauc_map_at_1000_std value: 22.380463943423514 - type: nauc_map_at_100_diff1 value: 19.37585410674523 - type: nauc_map_at_100_max value: 22.56424042509462 - type: nauc_map_at_100_std value: 19.672237275984426 - type: nauc_map_at_10_diff1 value: 23.597788166305577 - type: nauc_map_at_10_max value: 9.157316105122925 - type: nauc_map_at_10_std value: -3.8881247055786807 - type: nauc_map_at_1_diff1 value: 43.96699602275052 - type: nauc_map_at_1_max value: -0.7577088440873263 - type: nauc_map_at_1_std value: -17.732463891968404 - type: nauc_map_at_20_diff1 value: 22.326759054850097 - type: nauc_map_at_20_max value: 14.879191412167703 - type: nauc_map_at_20_std value: 5.405751236575241 - type: nauc_map_at_3_diff1 value: 28.73583545428074 - type: nauc_map_at_3_max value: 1.5986597211018239 - type: nauc_map_at_3_std value: -16.512455883681515 - type: nauc_map_at_5_diff1 value: 25.401810959155057 - type: nauc_map_at_5_max value: 4.418875376978587 - type: nauc_map_at_5_std value: -12.296750992013052 - type: nauc_mrr_at_1000_diff1 value: 51.228801807498584 - type: nauc_mrr_at_1000_max value: 61.040998883279585 - type: nauc_mrr_at_1000_std value: 40.93983887257123 - type: nauc_mrr_at_100_diff1 value: 51.23715338435314 - type: nauc_mrr_at_100_max value: 61.03971408781317 - type: nauc_mrr_at_100_std value: 40.91796923590573 - type: nauc_mrr_at_10_diff1 value: 51.1214868552331 - type: nauc_mrr_at_10_max value: 61.03069045590881 - type: nauc_mrr_at_10_std value: 40.661621199704264 - type: nauc_mrr_at_1_diff1 value: 50.84660003035892 - type: nauc_mrr_at_1_max value: 60.692091499960895 - type: nauc_mrr_at_1_std value: 42.126228731502955 - type: nauc_mrr_at_20_diff1 value: 51.0402624284872 - type: nauc_mrr_at_20_max value: 60.94577844338166 - type: nauc_mrr_at_20_std value: 40.89505950503613 - type: nauc_mrr_at_3_diff1 value: 51.771113665996516 - type: nauc_mrr_at_3_max value: 61.65264793077224 - type: nauc_mrr_at_3_std value: 41.75781827057092 - type: nauc_mrr_at_5_diff1 value: 51.0656793772882 - type: nauc_mrr_at_5_max value: 61.08042065139715 - type: nauc_mrr_at_5_std value: 41.11203271084835 - type: nauc_ndcg_at_1000_diff1 value: 22.347978262245107 - type: nauc_ndcg_at_1000_max value: 36.56458763955002 - type: nauc_ndcg_at_1000_std value: 35.99616144258822 - type: nauc_ndcg_at_100_diff1 value: 23.1120990977162 - type: nauc_ndcg_at_100_max value: 30.79663306311657 - type: nauc_ndcg_at_100_std value: 27.387572106784297 - type: nauc_ndcg_at_10_diff1 value: 23.329746066899656 - type: nauc_ndcg_at_10_max value: 28.69246947084685 - type: nauc_ndcg_at_10_std value: 21.457736188325345 - type: nauc_ndcg_at_1_diff1 value: 39.99399153456974 - type: nauc_ndcg_at_1_max value: 38.12447856470389 - type: nauc_ndcg_at_1_std value: 27.768869260384676 - type: nauc_ndcg_at_20_diff1 value: 24.945374175339907 - type: nauc_ndcg_at_20_max value: 27.67836982165295 - type: nauc_ndcg_at_20_std value: 19.7933631060578 - type: nauc_ndcg_at_3_diff1 value: 26.063492354398527 - type: nauc_ndcg_at_3_max value: 33.06541959550656 - type: nauc_ndcg_at_3_std value: 23.278902797288726 - type: nauc_ndcg_at_5_diff1 value: 22.521596060750035 - type: nauc_ndcg_at_5_max value: 31.210005673730784 - type: nauc_ndcg_at_5_std value: 22.893106456317927 - type: nauc_precision_at_1000_diff1 value: -19.845356495096006 - type: nauc_precision_at_1000_max value: 4.163819381816099 - type: nauc_precision_at_1000_std value: 7.612952884590339 - type: nauc_precision_at_100_diff1 value: -8.2679285153361 - type: nauc_precision_at_100_max value: 29.78018175573565 - type: nauc_precision_at_100_std value: 41.07244463956215 - type: nauc_precision_at_10_diff1 value: -3.2451428407349057 - type: nauc_precision_at_10_max value: 36.92563008274906 - type: nauc_precision_at_10_std value: 45.06962043489777 - type: nauc_precision_at_1_diff1 value: 50.84660003035892 - type: nauc_precision_at_1_max value: 60.692091499960895 - type: nauc_precision_at_1_std value: 42.126228731502955 - type: nauc_precision_at_20_diff1 value: -3.432279149061878 - type: nauc_precision_at_20_max value: 37.013592483974875 - type: nauc_precision_at_20_std value: 46.47324739428665 - type: nauc_precision_at_3_diff1 value: 7.28495481051025 - type: nauc_precision_at_3_max value: 38.66372411741402 - type: nauc_precision_at_3_std value: 35.23163993723955 - type: nauc_precision_at_5_diff1 value: -0.16540230063716202 - type: nauc_precision_at_5_max value: 37.322494255721715 - type: nauc_precision_at_5_std value: 39.666653561269754 - type: nauc_recall_at_1000_diff1 value: 11.388326469283681 - type: nauc_recall_at_1000_max value: 32.698146308591674 - type: nauc_recall_at_1000_std value: 49.48830488070777 - type: nauc_recall_at_100_diff1 value: 11.497443532756819 - type: nauc_recall_at_100_max value: 20.196970431621615 - type: nauc_recall_at_100_std value: 23.688772100803433 - type: nauc_recall_at_10_diff1 value: 16.519851398596003 - type: nauc_recall_at_10_max value: 0.774066845071221 - type: nauc_recall_at_10_std value: -10.89514647001814 - type: nauc_recall_at_1_diff1 value: 43.96699602275052 - type: nauc_recall_at_1_max value: -0.7577088440873263 - type: nauc_recall_at_1_std value: -17.732463891968404 - type: nauc_recall_at_20_diff1 value: 15.202960269878258 - type: nauc_recall_at_20_max value: 7.067263295590253 - type: nauc_recall_at_20_std value: -0.06050108222640702 - type: nauc_recall_at_3_diff1 value: 24.066741361525125 - type: nauc_recall_at_3_max value: -2.1961525860488424 - type: nauc_recall_at_3_std value: -19.48307077749568 - type: nauc_recall_at_5_diff1 value: 20.086330794102707 - type: nauc_recall_at_5_max value: -0.8866528062747986 - type: nauc_recall_at_5_std value: -16.53799173962747 - type: ndcg_at_1 value: 57.99999999999999 - type: ndcg_at_10 value: 45.553 - type: ndcg_at_100 value: 51.014 - type: ndcg_at_1000 value: 58.226 - type: ndcg_at_20 value: 44.98 - type: ndcg_at_3 value: 48.981 - type: ndcg_at_5 value: 46.794999999999995 - type: precision_at_1 value: 70.75 - type: precision_at_10 value: 36.85 - type: precision_at_100 value: 11.955 - type: precision_at_1000 value: 2.247 - type: precision_at_20 value: 28.075 - type: precision_at_3 value: 52.666999999999994 - type: precision_at_5 value: 45.85 - type: recall_at_1 value: 9.168999999999999 - type: recall_at_10 value: 28.796 - type: recall_at_100 value: 58.892999999999994 - type: recall_at_1000 value: 81.644 - type: recall_at_20 value: 36.659000000000006 - type: recall_at_3 value: 16.709 - type: recall_at_5 value: 21.387 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: main_score value: 88.41 - type: map_at_1 value: 75.637 - type: map_at_10 value: 84.674 - type: map_at_100 value: 84.909 - type: map_at_1000 value: 84.92 - type: map_at_20 value: 84.836 - type: map_at_3 value: 83.44200000000001 - type: map_at_5 value: 84.28099999999999 - type: mrr_at_1 value: 81.56315631563157 - type: mrr_at_10 value: 88.89571695264748 - type: mrr_at_100 value: 88.93671417216285 - type: mrr_at_1000 value: 88.93708016011664 - type: mrr_at_20 value: 88.9311652665256 - type: mrr_at_3 value: 88.20882088208805 - type: mrr_at_5 value: 88.72937293729349 - type: nauc_map_at_1000_diff1 value: 54.41216035074026 - type: nauc_map_at_1000_max value: 13.346153003554361 - type: nauc_map_at_1000_std value: -6.721664416152164 - type: nauc_map_at_100_diff1 value: 54.36538350995795 - type: nauc_map_at_100_max value: 13.355583381471298 - type: nauc_map_at_100_std value: -6.696921015641016 - type: nauc_map_at_10_diff1 value: 54.0389127730555 - type: nauc_map_at_10_max value: 13.387802159150663 - type: nauc_map_at_10_std value: -6.73514381731833 - type: nauc_map_at_1_diff1 value: 57.99489574836453 - type: nauc_map_at_1_max value: 7.830032589171654 - type: nauc_map_at_1_std value: -10.140208285080295 - type: nauc_map_at_20_diff1 value: 54.16841004736076 - type: nauc_map_at_20_max value: 13.345607363689746 - type: nauc_map_at_20_std value: -6.663119775158465 - type: nauc_map_at_3_diff1 value: 53.82879543599303 - type: nauc_map_at_3_max value: 12.716952288433902 - type: nauc_map_at_3_std value: -7.746102082835598 - type: nauc_map_at_5_diff1 value: 53.82838395350109 - type: nauc_map_at_5_max value: 13.487373534211702 - type: nauc_map_at_5_std value: -6.869504398693434 - type: nauc_mrr_at_1000_diff1 value: 68.92783546581906 - type: nauc_mrr_at_1000_max value: 12.076297180596592 - type: nauc_mrr_at_1000_std value: -13.306257067567998 - type: nauc_mrr_at_100_diff1 value: 68.92780219775517 - type: nauc_mrr_at_100_max value: 12.078449805054374 - type: nauc_mrr_at_100_std value: -13.303524852703719 - type: nauc_mrr_at_10_diff1 value: 68.92686206881258 - type: nauc_mrr_at_10_max value: 12.273295656884873 - type: nauc_mrr_at_10_std value: -13.222483496603965 - type: nauc_mrr_at_1_diff1 value: 70.1738022073041 - type: nauc_mrr_at_1_max value: 9.378639533482806 - type: nauc_mrr_at_1_std value: -13.444033823202348 - type: nauc_mrr_at_20_diff1 value: 68.91161304905303 - type: nauc_mrr_at_20_max value: 12.117091514817885 - type: nauc_mrr_at_20_std value: -13.258261750160239 - type: nauc_mrr_at_3_diff1 value: 68.61982455945467 - type: nauc_mrr_at_3_max value: 12.608213879734578 - type: nauc_mrr_at_3_std value: -13.558003431587839 - type: nauc_mrr_at_5_diff1 value: 68.81439097457242 - type: nauc_mrr_at_5_max value: 12.54025598903624 - type: nauc_mrr_at_5_std value: -13.199231514972093 - type: nauc_ndcg_at_1000_diff1 value: 56.47563443877495 - type: nauc_ndcg_at_1000_max value: 14.508331783439466 - type: nauc_ndcg_at_1000_std value: -6.206829736668775 - type: nauc_ndcg_at_100_diff1 value: 55.54015515673474 - type: nauc_ndcg_at_100_max value: 14.753595778278136 - type: nauc_ndcg_at_100_std value: -5.638517949568802 - type: nauc_ndcg_at_10_diff1 value: 54.220845223257996 - type: nauc_ndcg_at_10_max value: 15.265309648490021 - type: nauc_ndcg_at_10_std value: -5.516276098929109 - type: nauc_ndcg_at_1_diff1 value: 70.1738022073041 - type: nauc_ndcg_at_1_max value: 9.378639533482806 - type: nauc_ndcg_at_1_std value: -13.444033823202348 - type: nauc_ndcg_at_20_diff1 value: 54.481406100854635 - type: nauc_ndcg_at_20_max value: 14.868763583210498 - type: nauc_ndcg_at_20_std value: -5.328097380018734 - type: nauc_ndcg_at_3_diff1 value: 54.94411725607744 - type: nauc_ndcg_at_3_max value: 14.27186734506607 - type: nauc_ndcg_at_3_std value: -7.894724962312474 - type: nauc_ndcg_at_5_diff1 value: 54.08048166974806 - type: nauc_ndcg_at_5_max value: 15.528233170721006 - type: nauc_ndcg_at_5_std value: -5.984768714537104 - type: nauc_precision_at_1000_diff1 value: -8.744323640074445 - type: nauc_precision_at_1000_max value: -0.01881224392053465 - type: nauc_precision_at_1000_std value: 3.8721477979260635 - type: nauc_precision_at_100_diff1 value: -11.86150156952171 - type: nauc_precision_at_100_max value: 3.2736651314552314 - type: nauc_precision_at_100_std value: 8.12687620615509 - type: nauc_precision_at_10_diff1 value: -10.360708676781178 - type: nauc_precision_at_10_max value: 10.945552490433458 - type: nauc_precision_at_10_std value: 11.016707653014485 - type: nauc_precision_at_1_diff1 value: 70.1738022073041 - type: nauc_precision_at_1_max value: 9.378639533482806 - type: nauc_precision_at_1_std value: -13.444033823202348 - type: nauc_precision_at_20_diff1 value: -13.557721925696583 - type: nauc_precision_at_20_max value: 6.331386521718574 - type: nauc_precision_at_20_std value: 10.322188778142388 - type: nauc_precision_at_3_diff1 value: 15.139456770248968 - type: nauc_precision_at_3_max value: 17.10220985600708 - type: nauc_precision_at_3_std value: 3.0448183682558074 - type: nauc_precision_at_5_diff1 value: -1.9825577548111102 - type: nauc_precision_at_5_max value: 17.139148127012625 - type: nauc_precision_at_5_std value: 10.598435750554753 - type: nauc_recall_at_1000_diff1 value: 15.641740744283005 - type: nauc_recall_at_1000_max value: 44.65315702195612 - type: nauc_recall_at_1000_std value: 52.34265862835513 - type: nauc_recall_at_100_diff1 value: 5.254385435323394 - type: nauc_recall_at_100_max value: 38.53577774395794 - type: nauc_recall_at_100_std value: 43.47744274335829 - type: nauc_recall_at_10_diff1 value: 19.135735476268042 - type: nauc_recall_at_10_max value: 30.05417445923848 - type: nauc_recall_at_10_std value: 18.3988023241141 - type: nauc_recall_at_1_diff1 value: 57.99489574836453 - type: nauc_recall_at_1_max value: 7.830032589171654 - type: nauc_recall_at_1_std value: -10.140208285080295 - type: nauc_recall_at_20_diff1 value: 9.444797759735126 - type: nauc_recall_at_20_max value: 31.001311675371017 - type: nauc_recall_at_20_std value: 29.351418893822178 - type: nauc_recall_at_3_diff1 value: 36.88862653262064 - type: nauc_recall_at_3_max value: 19.845892741607823 - type: nauc_recall_at_3_std value: -1.0584273105890794 - type: nauc_recall_at_5_diff1 value: 27.360718561944974 - type: nauc_recall_at_5_max value: 26.698311215441738 - type: nauc_recall_at_5_std value: 8.97113997755362 - type: ndcg_at_1 value: 81.563 - type: ndcg_at_10 value: 88.41 - type: ndcg_at_100 value: 89.101 - type: ndcg_at_1000 value: 89.25800000000001 - type: ndcg_at_20 value: 88.79 - type: ndcg_at_3 value: 86.599 - type: ndcg_at_5 value: 87.74 - type: precision_at_1 value: 81.563 - type: precision_at_10 value: 10.699 - type: precision_at_100 value: 1.13 - type: precision_at_1000 value: 0.116 - type: precision_at_20 value: 5.479 - type: precision_at_3 value: 33.238 - type: precision_at_5 value: 20.744 - type: recall_at_1 value: 75.637 - type: recall_at_10 value: 95.57600000000001 - type: recall_at_100 value: 98.072 - type: recall_at_1000 value: 98.951 - type: recall_at_20 value: 96.792 - type: recall_at_3 value: 90.79599999999999 - type: recall_at_5 value: 93.674 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: main_score value: 42.396 - type: map_at_1 value: 21.711 - type: map_at_10 value: 34.628 - type: map_at_100 value: 36.549 - type: map_at_1000 value: 36.719 - type: map_at_20 value: 35.673 - type: map_at_3 value: 30.585 - type: map_at_5 value: 32.875 - type: mrr_at_1 value: 41.82098765432099 - type: mrr_at_10 value: 50.69505682931607 - type: mrr_at_100 value: 51.50556608727901 - type: mrr_at_1000 value: 51.53870583208304 - type: mrr_at_20 value: 51.15345764364655 - type: mrr_at_3 value: 48.35390946502059 - type: mrr_at_5 value: 49.87397119341563 - type: nauc_map_at_1000_diff1 value: 45.182252919583895 - type: nauc_map_at_1000_max value: 35.66124930024801 - type: nauc_map_at_1000_std value: -0.6925562638650965 - type: nauc_map_at_100_diff1 value: 45.116964706960125 - type: nauc_map_at_100_max value: 35.54990469525889 - type: nauc_map_at_100_std value: -0.6667263852859368 - type: nauc_map_at_10_diff1 value: 45.39189096228184 - type: nauc_map_at_10_max value: 34.780111261901 - type: nauc_map_at_10_std value: -1.8169859294150819 - type: nauc_map_at_1_diff1 value: 47.72764937952259 - type: nauc_map_at_1_max value: 24.83306559709341 - type: nauc_map_at_1_std value: -4.714128457297418 - type: nauc_map_at_20_diff1 value: 45.17073365898278 - type: nauc_map_at_20_max value: 35.0938403469058 - type: nauc_map_at_20_std value: -1.373412631183604 - type: nauc_map_at_3_diff1 value: 46.525724305731295 - type: nauc_map_at_3_max value: 31.042538866512597 - type: nauc_map_at_3_std value: -4.119355935975354 - type: nauc_map_at_5_diff1 value: 45.79569633383187 - type: nauc_map_at_5_max value: 32.88779656647293 - type: nauc_map_at_5_std value: -3.2518474739335312 - type: nauc_mrr_at_1000_diff1 value: 52.83619185487903 - type: nauc_mrr_at_1000_max value: 42.30310720405186 - type: nauc_mrr_at_1000_std value: -1.1487703348518024 - type: nauc_mrr_at_100_diff1 value: 52.82248853996664 - type: nauc_mrr_at_100_max value: 42.30549701564678 - type: nauc_mrr_at_100_std value: -1.1240113031894834 - type: nauc_mrr_at_10_diff1 value: 52.74644276642243 - type: nauc_mrr_at_10_max value: 42.39103029476398 - type: nauc_mrr_at_10_std value: -1.1043413237848576 - type: nauc_mrr_at_1_diff1 value: 54.810335521617326 - type: nauc_mrr_at_1_max value: 40.733260207843394 - type: nauc_mrr_at_1_std value: -4.452554921565855 - type: nauc_mrr_at_20_diff1 value: 52.788257862499954 - type: nauc_mrr_at_20_max value: 42.32658875363406 - type: nauc_mrr_at_20_std value: -1.2209728080684497 - type: nauc_mrr_at_3_diff1 value: 53.43281175319808 - type: nauc_mrr_at_3_max value: 41.735942650867926 - type: nauc_mrr_at_3_std value: -2.462688102468019 - type: nauc_mrr_at_5_diff1 value: 52.874037126566606 - type: nauc_mrr_at_5_max value: 41.93740449458822 - type: nauc_mrr_at_5_std value: -1.2928874908441947 - type: nauc_ndcg_at_1000_diff1 value: 46.5532425476402 - type: nauc_ndcg_at_1000_max value: 40.369611603370515 - type: nauc_ndcg_at_1000_std value: 3.472567588386994 - type: nauc_ndcg_at_100_diff1 value: 45.75244404695404 - type: nauc_ndcg_at_100_max value: 39.36470550675439 - type: nauc_ndcg_at_100_std value: 4.356189041115731 - type: nauc_ndcg_at_10_diff1 value: 46.005135323539704 - type: nauc_ndcg_at_10_max value: 37.89018165334218 - type: nauc_ndcg_at_10_std value: 0.7129618297768014 - type: nauc_ndcg_at_1_diff1 value: 54.810335521617326 - type: nauc_ndcg_at_1_max value: 40.733260207843394 - type: nauc_ndcg_at_1_std value: -4.452554921565855 - type: nauc_ndcg_at_20_diff1 value: 45.841552790490034 - type: nauc_ndcg_at_20_max value: 38.04992825472661 - type: nauc_ndcg_at_20_std value: 1.2748305707955212 - type: nauc_ndcg_at_3_diff1 value: 46.683033449357744 - type: nauc_ndcg_at_3_max value: 37.46397870760607 - type: nauc_ndcg_at_3_std value: -2.3421854966319824 - type: nauc_ndcg_at_5_diff1 value: 45.82409645378457 - type: nauc_ndcg_at_5_max value: 36.27588234096716 - type: nauc_ndcg_at_5_std value: -1.5141197170944254 - type: nauc_precision_at_1000_diff1 value: -3.137944321071885 - type: nauc_precision_at_1000_max value: 24.12803166253776 - type: nauc_precision_at_1000_std value: 11.076454789944101 - type: nauc_precision_at_100_diff1 value: 3.9896283891401048 - type: nauc_precision_at_100_max value: 31.00198316788829 - type: nauc_precision_at_100_std value: 15.725887643803063 - type: nauc_precision_at_10_diff1 value: 20.493420889888394 - type: nauc_precision_at_10_max value: 41.689699671507405 - type: nauc_precision_at_10_std value: 9.374983385669914 - type: nauc_precision_at_1_diff1 value: 54.810335521617326 - type: nauc_precision_at_1_max value: 40.733260207843394 - type: nauc_precision_at_1_std value: -4.452554921565855 - type: nauc_precision_at_20_diff1 value: 15.02911800246446 - type: nauc_precision_at_20_max value: 39.227068888505 - type: nauc_precision_at_20_std value: 11.755558515319404 - type: nauc_precision_at_3_diff1 value: 34.044986535461746 - type: nauc_precision_at_3_max value: 40.96605829831656 - type: nauc_precision_at_3_std value: 1.1903535705688038 - type: nauc_precision_at_5_diff1 value: 26.617002443432707 - type: nauc_precision_at_5_max value: 40.60413785916794 - type: nauc_precision_at_5_std value: 3.6984531670502814 - type: nauc_recall_at_1000_diff1 value: 26.96489389440101 - type: nauc_recall_at_1000_max value: 41.811583968523955 - type: nauc_recall_at_1000_std value: 41.5719519496712 - type: nauc_recall_at_100_diff1 value: 28.50851434908223 - type: nauc_recall_at_100_max value: 32.19528060706322 - type: nauc_recall_at_100_std value: 25.56935294258179 - type: nauc_recall_at_10_diff1 value: 35.139582891180964 - type: nauc_recall_at_10_max value: 32.15221840434225 - type: nauc_recall_at_10_std value: 5.550434611582702 - type: nauc_recall_at_1_diff1 value: 47.72764937952259 - type: nauc_recall_at_1_max value: 24.83306559709341 - type: nauc_recall_at_1_std value: -4.714128457297418 - type: nauc_recall_at_20_diff1 value: 32.78604811055205 - type: nauc_recall_at_20_max value: 29.62940720700254 - type: nauc_recall_at_20_std value: 6.769941491859872 - type: nauc_recall_at_3_diff1 value: 40.76090616138699 - type: nauc_recall_at_3_max value: 27.506425490226867 - type: nauc_recall_at_3_std value: -2.608872693119243 - type: nauc_recall_at_5_diff1 value: 37.06532485024711 - type: nauc_recall_at_5_max value: 27.704150556658448 - type: nauc_recall_at_5_std value: 0.4718707152343872 - type: ndcg_at_1 value: 41.821000000000005 - type: ndcg_at_10 value: 42.396 - type: ndcg_at_100 value: 49.370000000000005 - type: ndcg_at_1000 value: 52.251000000000005 - type: ndcg_at_20 value: 45.097 - type: ndcg_at_3 value: 39.028 - type: ndcg_at_5 value: 40.222 - type: precision_at_1 value: 41.821000000000005 - type: precision_at_10 value: 11.451 - type: precision_at_100 value: 1.863 - type: precision_at_1000 value: 0.23900000000000002 - type: precision_at_20 value: 6.798 - type: precision_at_3 value: 25.823 - type: precision_at_5 value: 18.735 - type: recall_at_1 value: 21.711 - type: recall_at_10 value: 48.862 - type: recall_at_100 value: 74.708 - type: recall_at_1000 value: 91.865 - type: recall_at_20 value: 57.50999999999999 - type: recall_at_3 value: 35.85 - type: recall_at_5 value: 41.976 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: main_score value: 72.21 - type: map_at_1 value: 39.487 - type: map_at_10 value: 63.949999999999996 - type: map_at_100 value: 64.873 - type: map_at_1000 value: 64.927 - type: map_at_20 value: 64.529 - type: map_at_3 value: 60.243 - type: map_at_5 value: 62.613 - type: mrr_at_1 value: 78.97366644159351 - type: mrr_at_10 value: 84.84600173627825 - type: mrr_at_100 value: 85.0172804866798 - type: mrr_at_1000 value: 85.02245651152857 - type: mrr_at_20 value: 84.9625577788225 - type: mrr_at_3 value: 83.90276839972962 - type: mrr_at_5 value: 84.48278190411845 - type: nauc_map_at_1000_diff1 value: 19.825004700775164 - type: nauc_map_at_1000_max value: 19.943221724164182 - type: nauc_map_at_1000_std value: 10.068951166560058 - type: nauc_map_at_100_diff1 value: 19.80139472181137 - type: nauc_map_at_100_max value: 19.938006132804347 - type: nauc_map_at_100_std value: 10.100008107666842 - type: nauc_map_at_10_diff1 value: 19.53604502514735 - type: nauc_map_at_10_max value: 19.62768870331064 - type: nauc_map_at_10_std value: 9.446859074725705 - type: nauc_map_at_1_diff1 value: 67.7764270505257 - type: nauc_map_at_1_max value: 38.45166604737058 - type: nauc_map_at_1_std value: 1.9919181988552352 - type: nauc_map_at_20_diff1 value: 19.635871913149913 - type: nauc_map_at_20_max value: 19.812838965919155 - type: nauc_map_at_20_std value: 9.905163140101845 - type: nauc_map_at_3_diff1 value: 18.965707122532212 - type: nauc_map_at_3_max value: 17.878860313056517 - type: nauc_map_at_3_std value: 6.189378752019195 - type: nauc_map_at_5_diff1 value: 19.493354049675954 - type: nauc_map_at_5_max value: 19.24527088109141 - type: nauc_map_at_5_std value: 8.283883139680066 - type: nauc_mrr_at_1000_diff1 value: 66.87150374356781 - type: nauc_mrr_at_1000_max value: 41.413456443203984 - type: nauc_mrr_at_1000_std value: 4.140387282484357 - type: nauc_mrr_at_100_diff1 value: 66.87178015619061 - type: nauc_mrr_at_100_max value: 41.419754763150834 - type: nauc_mrr_at_100_std value: 4.15222235416704 - type: nauc_mrr_at_10_diff1 value: 66.89720586892301 - type: nauc_mrr_at_10_max value: 41.56353878125211 - type: nauc_mrr_at_10_std value: 4.213376519922392 - type: nauc_mrr_at_1_diff1 value: 67.7764270505257 - type: nauc_mrr_at_1_max value: 38.45166604737058 - type: nauc_mrr_at_1_std value: 1.9919181988552352 - type: nauc_mrr_at_20_diff1 value: 66.8714688713149 - type: nauc_mrr_at_20_max value: 41.46170778986735 - type: nauc_mrr_at_20_std value: 4.165154741309859 - type: nauc_mrr_at_3_diff1 value: 66.31615462679144 - type: nauc_mrr_at_3_max value: 41.419637693259936 - type: nauc_mrr_at_3_std value: 3.814834551396097 - type: nauc_mrr_at_5_diff1 value: 66.7289413087213 - type: nauc_mrr_at_5_max value: 41.668346356371586 - type: nauc_mrr_at_5_std value: 4.116331539882484 - type: nauc_ndcg_at_1000_diff1 value: 26.37325375970598 - type: nauc_ndcg_at_1000_max value: 24.850915174721735 - type: nauc_ndcg_at_1000_std value: 13.37585683440429 - type: nauc_ndcg_at_100_diff1 value: 25.591771178059503 - type: nauc_ndcg_at_100_max value: 24.562820829532473 - type: nauc_ndcg_at_100_std value: 14.093690500501541 - type: nauc_ndcg_at_10_diff1 value: 24.64600598115805 - type: nauc_ndcg_at_10_max value: 23.543499404760023 - type: nauc_ndcg_at_10_std value: 11.55823632781553 - type: nauc_ndcg_at_1_diff1 value: 67.7764270505257 - type: nauc_ndcg_at_1_max value: 38.45166604737058 - type: nauc_ndcg_at_1_std value: 1.9919181988552352 - type: nauc_ndcg_at_20_diff1 value: 24.757843275306726 - type: nauc_ndcg_at_20_max value: 23.951154200380827 - type: nauc_ndcg_at_20_std value: 12.931320453044886 - type: nauc_ndcg_at_3_diff1 value: 24.37742630418847 - type: nauc_ndcg_at_3_max value: 21.310512304883723 - type: nauc_ndcg_at_3_std value: 6.503993200818077 - type: nauc_ndcg_at_5_diff1 value: 24.813706829269716 - type: nauc_ndcg_at_5_max value: 22.993657212898 - type: nauc_ndcg_at_5_std value: 9.34462052506809 - type: nauc_precision_at_1000_diff1 value: -0.6506415756958156 - type: nauc_precision_at_1000_max value: 28.039755644694875 - type: nauc_precision_at_1000_std value: 53.46474329623814 - type: nauc_precision_at_100_diff1 value: 3.78462668236152 - type: nauc_precision_at_100_max value: 22.501700881673862 - type: nauc_precision_at_100_std value: 40.56672716474142 - type: nauc_precision_at_10_diff1 value: 9.156113228907534 - type: nauc_precision_at_10_max value: 19.734206254833254 - type: nauc_precision_at_10_std value: 19.986282545779602 - type: nauc_precision_at_1_diff1 value: 67.7764270505257 - type: nauc_precision_at_1_max value: 38.45166604737058 - type: nauc_precision_at_1_std value: 1.9919181988552352 - type: nauc_precision_at_20_diff1 value: 6.6164335644470125 - type: nauc_precision_at_20_max value: 20.29343459608317 - type: nauc_precision_at_20_std value: 26.51115475333977 - type: nauc_precision_at_3_diff1 value: 12.476520554399546 - type: nauc_precision_at_3_max value: 16.69401409858964 - type: nauc_precision_at_3_std value: 8.165880294907444 - type: nauc_precision_at_5_diff1 value: 11.783242828320958 - type: nauc_precision_at_5_max value: 19.0679467875759 - type: nauc_precision_at_5_std value: 13.615358345509884 - type: nauc_recall_at_1000_diff1 value: -0.6506415756960168 - type: nauc_recall_at_1000_max value: 28.039755644694786 - type: nauc_recall_at_1000_std value: 53.46474329623801 - type: nauc_recall_at_100_diff1 value: 3.7846266823613877 - type: nauc_recall_at_100_max value: 22.501700881674008 - type: nauc_recall_at_100_std value: 40.566727164741366 - type: nauc_recall_at_10_diff1 value: 9.15611322890755 - type: nauc_recall_at_10_max value: 19.73420625483318 - type: nauc_recall_at_10_std value: 19.98628254577951 - type: nauc_recall_at_1_diff1 value: 67.7764270505257 - type: nauc_recall_at_1_max value: 38.45166604737058 - type: nauc_recall_at_1_std value: 1.9919181988552352 - type: nauc_recall_at_20_diff1 value: 6.616433564446929 - type: nauc_recall_at_20_max value: 20.293434596083248 - type: nauc_recall_at_20_std value: 26.5111547533396 - type: nauc_recall_at_3_diff1 value: 12.476520554399531 - type: nauc_recall_at_3_max value: 16.69401409858966 - type: nauc_recall_at_3_std value: 8.165880294907438 - type: nauc_recall_at_5_diff1 value: 11.783242828320999 - type: nauc_recall_at_5_max value: 19.067946787575845 - type: nauc_recall_at_5_std value: 13.61535834550991 - type: ndcg_at_1 value: 78.974 - type: ndcg_at_10 value: 72.21 - type: ndcg_at_100 value: 75.264 - type: ndcg_at_1000 value: 76.259 - type: ndcg_at_20 value: 73.628 - type: ndcg_at_3 value: 67.047 - type: ndcg_at_5 value: 69.974 - type: precision_at_1 value: 78.974 - type: precision_at_10 value: 15.267 - type: precision_at_100 value: 1.762 - type: precision_at_1000 value: 0.189 - type: precision_at_20 value: 8.09 - type: precision_at_3 value: 43.309 - type: precision_at_5 value: 28.294000000000004 - type: recall_at_1 value: 39.487 - type: recall_at_10 value: 76.334 - type: recall_at_100 value: 88.076 - type: recall_at_1000 value: 94.59100000000001 - type: recall_at_20 value: 80.898 - type: recall_at_3 value: 64.96300000000001 - type: recall_at_5 value: 70.736 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: main_score value: 42.027 - type: map_at_1 value: 22.118 - type: map_at_10 value: 34.816 - type: map_at_100 value: 35.983 - type: map_at_1000 value: 36.028999999999996 - type: map_at_20 value: 35.545 - type: map_at_3 value: 30.752000000000002 - type: map_at_5 value: 33.114 - type: mrr_at_1 value: 22.793696275071635 - type: mrr_at_10 value: 35.47250079592483 - type: mrr_at_100 value: 36.576471512902856 - type: mrr_at_1000 value: 36.616205680509786 - type: mrr_at_20 value: 36.16557033864942 - type: mrr_at_3 value: 31.48758357211065 - type: mrr_at_5 value: 33.80563514804202 - type: nauc_map_at_1000_diff1 value: 32.89234100489284 - type: nauc_map_at_1000_max value: 1.1802816553581001 - type: nauc_map_at_1000_std value: -20.187692925732446 - type: nauc_map_at_100_diff1 value: 32.88694493681772 - type: nauc_map_at_100_max value: 1.1732717578080365 - type: nauc_map_at_100_std value: -20.164165529035245 - type: nauc_map_at_10_diff1 value: 32.826182211848796 - type: nauc_map_at_10_max value: 1.1551262165737235 - type: nauc_map_at_10_std value: -20.88326292319754 - type: nauc_map_at_1_diff1 value: 36.12732122790642 - type: nauc_map_at_1_max value: 1.8197550109156913 - type: nauc_map_at_1_std value: -17.205625720792167 - type: nauc_map_at_20_diff1 value: 32.83333177195551 - type: nauc_map_at_20_max value: 1.0937431645506202 - type: nauc_map_at_20_std value: -20.503956514646145 - type: nauc_map_at_3_diff1 value: 32.76264193805814 - type: nauc_map_at_3_max value: 0.8560962042500389 - type: nauc_map_at_3_std value: -20.608930717315577 - type: nauc_map_at_5_diff1 value: 32.78673238978775 - type: nauc_map_at_5_max value: 1.0511863039329437 - type: nauc_map_at_5_std value: -21.02164728626011 - type: nauc_mrr_at_1000_diff1 value: 32.610323934702286 - type: nauc_mrr_at_1000_max value: 1.276669121901405 - type: nauc_mrr_at_1000_std value: -19.908120615285043 - type: nauc_mrr_at_100_diff1 value: 32.601373758102795 - type: nauc_mrr_at_100_max value: 1.2752735149992132 - type: nauc_mrr_at_100_std value: -19.87937042610101 - type: nauc_mrr_at_10_diff1 value: 32.55795432078168 - type: nauc_mrr_at_10_max value: 1.2881786969258637 - type: nauc_mrr_at_10_std value: -20.54564519015977 - type: nauc_mrr_at_1_diff1 value: 35.596301376443726 - type: nauc_mrr_at_1_max value: 1.7633238037306902 - type: nauc_mrr_at_1_std value: -17.1999420019887 - type: nauc_mrr_at_20_diff1 value: 32.57185739111023 - type: nauc_mrr_at_20_max value: 1.2212620853201877 - type: nauc_mrr_at_20_std value: -20.179517281041264 - type: nauc_mrr_at_3_diff1 value: 32.42681377099514 - type: nauc_mrr_at_3_max value: 0.8745921708861145 - type: nauc_mrr_at_3_std value: -20.41017687790572 - type: nauc_mrr_at_5_diff1 value: 32.499107129648266 - type: nauc_mrr_at_5_max value: 1.1159673851851573 - type: nauc_mrr_at_5_std value: -20.695143502133824 - type: nauc_ndcg_at_1000_diff1 value: 32.16957965806702 - type: nauc_ndcg_at_1000_max value: 1.6763998947980905 - type: nauc_ndcg_at_1000_std value: -18.970592350332893 - type: nauc_ndcg_at_100_diff1 value: 31.977550102558872 - type: nauc_ndcg_at_100_max value: 1.5625858650110014 - type: nauc_ndcg_at_100_std value: -17.990456766123835 - type: nauc_ndcg_at_10_diff1 value: 31.82738932481356 - type: nauc_ndcg_at_10_max value: 1.1661362042692103 - type: nauc_ndcg_at_10_std value: -21.872680193994217 - type: nauc_ndcg_at_1_diff1 value: 35.596301376443726 - type: nauc_ndcg_at_1_max value: 1.7633238037306902 - type: nauc_ndcg_at_1_std value: -17.1999420019887 - type: nauc_ndcg_at_20_diff1 value: 31.749656399266264 - type: nauc_ndcg_at_20_max value: 0.9629024493088691 - type: nauc_ndcg_at_20_std value: -20.4379403899277 - type: nauc_ndcg_at_3_diff1 value: 31.731361436850836 - type: nauc_ndcg_at_3_max value: 0.531749791578849 - type: nauc_ndcg_at_3_std value: -21.551112910698674 - type: nauc_ndcg_at_5_diff1 value: 31.785373941157303 - type: nauc_ndcg_at_5_max value: 0.86207769368333 - type: nauc_ndcg_at_5_std value: -22.24923399160171 - type: nauc_precision_at_1000_diff1 value: -3.841288331986519 - type: nauc_precision_at_1000_max value: 13.558041371634976 - type: nauc_precision_at_1000_std value: 15.181510484512827 - type: nauc_precision_at_100_diff1 value: 12.441154582709053 - type: nauc_precision_at_100_max value: 8.428136255841935 - type: nauc_precision_at_100_std value: 14.710391839731656 - type: nauc_precision_at_10_diff1 value: 26.185854813986705 - type: nauc_precision_at_10_max value: 1.6348387310504464 - type: nauc_precision_at_10_std value: -23.448927004357298 - type: nauc_precision_at_1_diff1 value: 35.596301376443726 - type: nauc_precision_at_1_max value: 1.7633238037306902 - type: nauc_precision_at_1_std value: -17.1999420019887 - type: nauc_precision_at_20_diff1 value: 22.69194179544158 - type: nauc_precision_at_20_max value: 1.2972015009169306 - type: nauc_precision_at_20_std value: -15.751482380060269 - type: nauc_precision_at_3_diff1 value: 28.255531512125188 - type: nauc_precision_at_3_max value: -0.3715575458464333 - type: nauc_precision_at_3_std value: -24.227970454057697 - type: nauc_precision_at_5_diff1 value: 27.65497951098847 - type: nauc_precision_at_5_max value: 0.449773375292472 - type: nauc_precision_at_5_std value: -25.37445450938601 - type: nauc_recall_at_1000_diff1 value: 15.243948516763819 - type: nauc_recall_at_1000_max value: 41.821227805251375 - type: nauc_recall_at_1000_std value: 61.66297794838101 - type: nauc_recall_at_100_diff1 value: 24.516543685029994 - type: nauc_recall_at_100_max value: 7.093972966253228 - type: nauc_recall_at_100_std value: 17.244452321212282 - type: nauc_recall_at_10_diff1 value: 28.404243095182828 - type: nauc_recall_at_10_max value: 1.0805210480930945 - type: nauc_recall_at_10_std value: -24.885018657039527 - type: nauc_recall_at_1_diff1 value: 36.12732122790642 - type: nauc_recall_at_1_max value: 1.8197550109156913 - type: nauc_recall_at_1_std value: -17.205625720792167 - type: nauc_recall_at_20_diff1 value: 26.956250169438512 - type: nauc_recall_at_20_max value: 0.023973408161285917 - type: nauc_recall_at_20_std value: -18.32944444428131 - type: nauc_recall_at_3_diff1 value: 28.9894205130054 - type: nauc_recall_at_3_max value: -0.36140658021466865 - type: nauc_recall_at_3_std value: -24.022505107768364 - type: nauc_recall_at_5_diff1 value: 28.907023434955104 - type: nauc_recall_at_5_max value: 0.2501037567297729 - type: nauc_recall_at_5_std value: -25.719919602271496 - type: ndcg_at_1 value: 22.794 - type: ndcg_at_10 value: 42.027 - type: ndcg_at_100 value: 47.601 - type: ndcg_at_1000 value: 48.713 - type: ndcg_at_20 value: 44.623000000000005 - type: ndcg_at_3 value: 33.772999999999996 - type: ndcg_at_5 value: 37.991 - type: precision_at_1 value: 22.794 - type: precision_at_10 value: 6.711 - type: precision_at_100 value: 0.9490000000000001 - type: precision_at_1000 value: 0.105 - type: precision_at_20 value: 3.8920000000000003 - type: precision_at_3 value: 14.46 - type: precision_at_5 value: 10.822 - type: recall_at_1 value: 22.118 - type: recall_at_10 value: 64.201 - type: recall_at_100 value: 89.878 - type: recall_at_1000 value: 98.259 - type: recall_at_20 value: 74.34100000000001 - type: recall_at_3 value: 41.8 - type: recall_at_5 value: 51.959 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: main_score value: 36.201 - type: map_at_1 value: 5.654 - type: map_at_10 value: 13.402 - type: map_at_100 value: 16.849 - type: map_at_1000 value: 18.264 - type: map_at_20 value: 14.832 - type: map_at_3 value: 9.619 - type: map_at_5 value: 11.483 - type: mrr_at_1 value: 47.6780185758514 - type: mrr_at_10 value: 56.47906531033466 - type: mrr_at_100 value: 57.04539749991402 - type: mrr_at_1000 value: 57.08810157607369 - type: mrr_at_20 value: 56.88003170105462 - type: mrr_at_3 value: 54.43756449948401 - type: mrr_at_5 value: 55.660474716202266 - type: nauc_map_at_1000_diff1 value: 31.134615238698192 - type: nauc_map_at_1000_max value: 36.09522002487132 - type: nauc_map_at_1000_std value: 14.72627666649002 - type: nauc_map_at_100_diff1 value: 32.777473351864444 - type: nauc_map_at_100_max value: 35.25391471621035 - type: nauc_map_at_100_std value: 12.024428973861083 - type: nauc_map_at_10_diff1 value: 36.46466466148528 - type: nauc_map_at_10_max value: 29.707805406826722 - type: nauc_map_at_10_std value: 2.0678757794226335 - type: nauc_map_at_1_diff1 value: 54.30208426149679 - type: nauc_map_at_1_max value: 18.69125148481608 - type: nauc_map_at_1_std value: -8.970955660291802 - type: nauc_map_at_20_diff1 value: 34.76513311600623 - type: nauc_map_at_20_max value: 32.20666003570514 - type: nauc_map_at_20_std value: 5.924889441518581 - type: nauc_map_at_3_diff1 value: 45.73465176835491 - type: nauc_map_at_3_max value: 23.492291524989106 - type: nauc_map_at_3_std value: -5.0123536561688855 - type: nauc_map_at_5_diff1 value: 39.7128319374107 - type: nauc_map_at_5_max value: 25.84231729559691 - type: nauc_map_at_5_std value: -2.0861428981140344 - type: nauc_mrr_at_1000_diff1 value: 33.0997881703397 - type: nauc_mrr_at_1000_max value: 52.7089709923531 - type: nauc_mrr_at_1000_std value: 28.8517952674151 - type: nauc_mrr_at_100_diff1 value: 33.1094984027438 - type: nauc_mrr_at_100_max value: 52.74301398138847 - type: nauc_mrr_at_100_std value: 28.897997840300892 - type: nauc_mrr_at_10_diff1 value: 33.300713655464925 - type: nauc_mrr_at_10_max value: 52.572139698742184 - type: nauc_mrr_at_10_std value: 28.66875615527188 - type: nauc_mrr_at_1_diff1 value: 32.57632582147155 - type: nauc_mrr_at_1_max value: 46.020072246328816 - type: nauc_mrr_at_1_std value: 20.99097889820076 - type: nauc_mrr_at_20_diff1 value: 33.04083904518949 - type: nauc_mrr_at_20_max value: 52.597451362456994 - type: nauc_mrr_at_20_std value: 28.681527293587898 - type: nauc_mrr_at_3_diff1 value: 33.64864656322754 - type: nauc_mrr_at_3_max value: 51.82256412011279 - type: nauc_mrr_at_3_std value: 27.241260746740686 - type: nauc_mrr_at_5_diff1 value: 33.53201325467246 - type: nauc_mrr_at_5_max value: 52.79440885773516 - type: nauc_mrr_at_5_std value: 28.663081392086028 - type: nauc_ndcg_at_1000_diff1 value: 28.632650542040714 - type: nauc_ndcg_at_1000_max value: 51.24103069835822 - type: nauc_ndcg_at_1000_std value: 35.05503784757999 - type: nauc_ndcg_at_100_diff1 value: 29.082177715298503 - type: nauc_ndcg_at_100_max value: 45.24750203464315 - type: nauc_ndcg_at_100_std value: 27.146548925680914 - type: nauc_ndcg_at_10_diff1 value: 25.123554466093594 - type: nauc_ndcg_at_10_max value: 42.74355537806512 - type: nauc_ndcg_at_10_std value: 22.234407997803935 - type: nauc_ndcg_at_1_diff1 value: 33.75083940012058 - type: nauc_ndcg_at_1_max value: 44.44319402133161 - type: nauc_ndcg_at_1_std value: 19.146499358406487 - type: nauc_ndcg_at_20_diff1 value: 24.954207968331872 - type: nauc_ndcg_at_20_max value: 41.25991844405748 - type: nauc_ndcg_at_20_std value: 22.169009285868864 - type: nauc_ndcg_at_3_diff1 value: 28.186539942033516 - type: nauc_ndcg_at_3_max value: 44.40790009754965 - type: nauc_ndcg_at_3_std value: 20.99226576085115 - type: nauc_ndcg_at_5_diff1 value: 25.498387899376706 - type: nauc_ndcg_at_5_max value: 43.174709766261316 - type: nauc_ndcg_at_5_std value: 21.88111962672031 - type: nauc_precision_at_1000_diff1 value: -16.22321012507648 - type: nauc_precision_at_1000_max value: 5.808852256649677 - type: nauc_precision_at_1000_std value: 19.875641776698824 - type: nauc_precision_at_100_diff1 value: -10.248089374355486 - type: nauc_precision_at_100_max value: 19.29065415127588 - type: nauc_precision_at_100_std value: 31.75019665627339 - type: nauc_precision_at_10_diff1 value: 3.6783257583955056 - type: nauc_precision_at_10_max value: 39.22286010695767 - type: nauc_precision_at_10_std value: 31.225485732801022 - type: nauc_precision_at_1_diff1 value: 32.57632582147155 - type: nauc_precision_at_1_max value: 46.020072246328816 - type: nauc_precision_at_1_std value: 20.99097889820076 - type: nauc_precision_at_20_diff1 value: -3.1632510833242784 - type: nauc_precision_at_20_max value: 31.575496762405734 - type: nauc_precision_at_20_std value: 31.576283324468115 - type: nauc_precision_at_3_diff1 value: 17.78864585545647 - type: nauc_precision_at_3_max value: 44.201289661125585 - type: nauc_precision_at_3_std value: 25.447840649726693 - type: nauc_precision_at_5_diff1 value: 9.986748662091358 - type: nauc_precision_at_5_max value: 41.214164860776755 - type: nauc_precision_at_5_std value: 28.22551704127726 - type: nauc_recall_at_1000_diff1 value: 10.984331766850506 - type: nauc_recall_at_1000_max value: 24.641216018034104 - type: nauc_recall_at_1000_std value: 26.91064221008446 - type: nauc_recall_at_100_diff1 value: 23.7009352078473 - type: nauc_recall_at_100_max value: 30.176031609451297 - type: nauc_recall_at_100_std value: 20.360365243211564 - type: nauc_recall_at_10_diff1 value: 28.11831737650638 - type: nauc_recall_at_10_max value: 24.21539670487414 - type: nauc_recall_at_10_std value: 2.245504974150148 - type: nauc_recall_at_1_diff1 value: 54.30208426149679 - type: nauc_recall_at_1_max value: 18.69125148481608 - type: nauc_recall_at_1_std value: -8.970955660291802 - type: nauc_recall_at_20_diff1 value: 26.199425305139908 - type: nauc_recall_at_20_max value: 24.66704097503736 - type: nauc_recall_at_20_std value: 5.86052107206246 - type: nauc_recall_at_3_diff1 value: 42.88348677575622 - type: nauc_recall_at_3_max value: 21.189371077603308 - type: nauc_recall_at_3_std value: -4.537510127238226 - type: nauc_recall_at_5_diff1 value: 30.7936756722569 - type: nauc_recall_at_5_max value: 21.06136406164962 - type: nauc_recall_at_5_std value: -1.4113804735229794 - type: ndcg_at_1 value: 45.975 - type: ndcg_at_10 value: 36.201 - type: ndcg_at_100 value: 32.736 - type: ndcg_at_1000 value: 41.099000000000004 - type: ndcg_at_20 value: 33.724 - type: ndcg_at_3 value: 42.242000000000004 - type: ndcg_at_5 value: 40.137 - type: precision_at_1 value: 47.678 - type: precision_at_10 value: 26.904 - type: precision_at_100 value: 8.368 - type: precision_at_1000 value: 2.078 - type: precision_at_20 value: 19.845 - type: precision_at_3 value: 40.351 - type: precision_at_5 value: 35.108 - type: recall_at_1 value: 5.654 - type: recall_at_10 value: 17.793 - type: recall_at_100 value: 32.483000000000004 - type: recall_at_1000 value: 63.294 - type: recall_at_20 value: 21.754 - type: recall_at_3 value: 10.771 - type: recall_at_5 value: 14.084 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: main_score value: 62.464 - type: map_at_1 value: 38.0 - type: map_at_10 value: 54.806 - type: map_at_100 value: 55.599 - type: map_at_1000 value: 55.617000000000004 - type: map_at_20 value: 55.336 - type: map_at_3 value: 50.58200000000001 - type: map_at_5 value: 53.181 - type: mrr_at_1 value: 42.46813441483198 - type: mrr_at_10 value: 57.060710147326446 - type: mrr_at_100 value: 57.60978373431328 - type: mrr_at_1000 value: 57.62192762809547 - type: mrr_at_20 value: 57.43431796174232 - type: mrr_at_3 value: 53.78041714947835 - type: mrr_at_5 value: 55.81257242178437 - type: nauc_map_at_1000_diff1 value: 38.337572188308194 - type: nauc_map_at_1000_max value: 27.550035254787197 - type: nauc_map_at_1000_std value: -7.5513729587308145 - type: nauc_map_at_100_diff1 value: 38.335337794455015 - type: nauc_map_at_100_max value: 27.56919614414171 - type: nauc_map_at_100_std value: -7.526017855405723 - type: nauc_map_at_10_diff1 value: 38.308131361353816 - type: nauc_map_at_10_max value: 27.691849580929933 - type: nauc_map_at_10_std value: -7.971461731555123 - type: nauc_map_at_1_diff1 value: 42.721072690634884 - type: nauc_map_at_1_max value: 21.750451486885332 - type: nauc_map_at_1_std value: -9.99540950522643 - type: nauc_map_at_20_diff1 value: 38.25792874982169 - type: nauc_map_at_20_max value: 27.68877906159661 - type: nauc_map_at_20_std value: -7.560753583212102 - type: nauc_map_at_3_diff1 value: 37.950570055936254 - type: nauc_map_at_3_max value: 26.257969511794858 - type: nauc_map_at_3_std value: -9.236868658300553 - type: nauc_map_at_5_diff1 value: 37.99893219450212 - type: nauc_map_at_5_max value: 27.293454259158057 - type: nauc_map_at_5_std value: -8.734089449603806 - type: nauc_mrr_at_1000_diff1 value: 37.777767467474774 - type: nauc_mrr_at_1000_max value: 27.39507603748298 - type: nauc_mrr_at_1000_std value: -5.554754076870114 - type: nauc_mrr_at_100_diff1 value: 37.77981674583538 - type: nauc_mrr_at_100_max value: 27.411100989441557 - type: nauc_mrr_at_100_std value: -5.539061231412731 - type: nauc_mrr_at_10_diff1 value: 37.72399003363479 - type: nauc_mrr_at_10_max value: 27.618142546685416 - type: nauc_mrr_at_10_std value: -5.6819843907448195 - type: nauc_mrr_at_1_diff1 value: 41.17596078958236 - type: nauc_mrr_at_1_max value: 23.32588591818617 - type: nauc_mrr_at_1_std value: -7.126628034623689 - type: nauc_mrr_at_20_diff1 value: 37.695136721588 - type: nauc_mrr_at_20_max value: 27.52850676467322 - type: nauc_mrr_at_20_std value: -5.50667995515647 - type: nauc_mrr_at_3_diff1 value: 37.23845700908964 - type: nauc_mrr_at_3_max value: 26.69389772971012 - type: nauc_mrr_at_3_std value: -6.31868405989011 - type: nauc_mrr_at_5_diff1 value: 37.33757394192838 - type: nauc_mrr_at_5_max value: 27.42091593836207 - type: nauc_mrr_at_5_std value: -5.993243330132065 - type: nauc_ndcg_at_1000_diff1 value: 37.74836061640332 - type: nauc_ndcg_at_1000_max value: 29.03148916289089 - type: nauc_ndcg_at_1000_std value: -5.543065770074502 - type: nauc_ndcg_at_100_diff1 value: 37.75593955089626 - type: nauc_ndcg_at_100_max value: 29.67109480272493 - type: nauc_ndcg_at_100_std value: -4.773697596687493 - type: nauc_ndcg_at_10_diff1 value: 37.41701174824348 - type: nauc_ndcg_at_10_max value: 30.448703434043445 - type: nauc_ndcg_at_10_std value: -6.306202666419071 - type: nauc_ndcg_at_1_diff1 value: 41.17596078958236 - type: nauc_ndcg_at_1_max value: 23.32588591818617 - type: nauc_ndcg_at_1_std value: -7.126628034623689 - type: nauc_ndcg_at_20_diff1 value: 37.17445197824622 - type: nauc_ndcg_at_20_max value: 30.47378561555209 - type: nauc_ndcg_at_20_std value: -4.921584853993488 - type: nauc_ndcg_at_3_diff1 value: 36.5261976812068 - type: nauc_ndcg_at_3_max value: 27.560538820208926 - type: nauc_ndcg_at_3_std value: -8.556686332882931 - type: nauc_ndcg_at_5_diff1 value: 36.571462759614526 - type: nauc_ndcg_at_5_max value: 29.363401730752585 - type: nauc_ndcg_at_5_std value: -7.825739170420347 - type: nauc_precision_at_1000_diff1 value: -12.588899483401223 - type: nauc_precision_at_1000_max value: 2.641097890578701 - type: nauc_precision_at_1000_std value: 17.643107625788748 - type: nauc_precision_at_100_diff1 value: -8.40579874206785 - type: nauc_precision_at_100_max value: 9.725496771040037 - type: nauc_precision_at_100_std value: 21.558582760191243 - type: nauc_precision_at_10_diff1 value: 6.619157191854486 - type: nauc_precision_at_10_max value: 23.767406373688402 - type: nauc_precision_at_10_std value: 10.428535003478808 - type: nauc_precision_at_1_diff1 value: 41.17596078958236 - type: nauc_precision_at_1_max value: 23.32588591818617 - type: nauc_precision_at_1_std value: -7.126628034623689 - type: nauc_precision_at_20_diff1 value: -0.6449974218292859 - type: nauc_precision_at_20_max value: 20.211503851418783 - type: nauc_precision_at_20_std value: 17.922745410142575 - type: nauc_precision_at_3_diff1 value: 19.710276097428657 - type: nauc_precision_at_3_max value: 26.768918044758706 - type: nauc_precision_at_3_std value: -1.0636448912049246 - type: nauc_precision_at_5_diff1 value: 13.073181337982613 - type: nauc_precision_at_5_max value: 26.418340338971024 - type: nauc_precision_at_5_std value: 2.9842078949528688 - type: nauc_recall_at_1000_diff1 value: 30.52411148739828 - type: nauc_recall_at_1000_max value: 90.96409807536762 - type: nauc_recall_at_1000_std value: 83.94857830921949 - type: nauc_recall_at_100_diff1 value: 36.936303690592155 - type: nauc_recall_at_100_max value: 71.91515014325869 - type: nauc_recall_at_100_std value: 48.93061263403371 - type: nauc_recall_at_10_diff1 value: 32.84292362076269 - type: nauc_recall_at_10_max value: 44.27252783122478 - type: nauc_recall_at_10_std value: -1.5981198975612385 - type: nauc_recall_at_1_diff1 value: 42.721072690634884 - type: nauc_recall_at_1_max value: 21.750451486885332 - type: nauc_recall_at_1_std value: -9.99540950522643 - type: nauc_recall_at_20_diff1 value: 29.36724417081702 - type: nauc_recall_at_20_max value: 52.035846390214715 - type: nauc_recall_at_20_std value: 11.967264191332818 - type: nauc_recall_at_3_diff1 value: 31.634923771936098 - type: nauc_recall_at_3_max value: 30.225743369869473 - type: nauc_recall_at_3_std value: -9.253665347118615 - type: nauc_recall_at_5_diff1 value: 30.66271853090737 - type: nauc_recall_at_5_max value: 35.70815715994996 - type: nauc_recall_at_5_std value: -7.836012956078996 - type: ndcg_at_1 value: 42.468 - type: ndcg_at_10 value: 62.464 - type: ndcg_at_100 value: 65.618 - type: ndcg_at_1000 value: 66.014 - type: ndcg_at_20 value: 64.12 - type: ndcg_at_3 value: 54.790000000000006 - type: ndcg_at_5 value: 58.992 - type: precision_at_1 value: 42.468 - type: precision_at_10 value: 9.959 - type: precision_at_100 value: 1.174 - type: precision_at_1000 value: 0.121 - type: precision_at_20 value: 5.380999999999999 - type: precision_at_3 value: 24.73 - type: precision_at_5 value: 17.299999999999997 - type: recall_at_1 value: 38.0 - type: recall_at_10 value: 83.22699999999999 - type: recall_at_100 value: 96.584 - type: recall_at_1000 value: 99.512 - type: recall_at_20 value: 89.291 - type: recall_at_3 value: 63.666 - type: recall_at_5 value: 73.27900000000001 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: e4e08e0b7dbe3c8700f0daef558ff32256715259 metrics: - type: main_score value: 87.366 - type: map_at_1 value: 69.95700000000001 - type: map_at_10 value: 83.55 - type: map_at_100 value: 84.196 - type: map_at_1000 value: 84.21600000000001 - type: map_at_20 value: 83.982 - type: map_at_3 value: 80.647 - type: map_at_5 value: 82.443 - type: mrr_at_1 value: 80.39 - type: mrr_at_10 value: 86.65646031746004 - type: mrr_at_100 value: 86.7852113210373 - type: mrr_at_1000 value: 86.78651118354796 - type: mrr_at_20 value: 86.75772838878498 - type: mrr_at_3 value: 85.67499999999971 - type: mrr_at_5 value: 86.33749999999962 - type: nauc_map_at_1000_diff1 value: 76.68189702770007 - type: nauc_map_at_1000_max value: 36.19988239025682 - type: nauc_map_at_1000_std value: -26.231691135645736 - type: nauc_map_at_100_diff1 value: 76.68832712120171 - type: nauc_map_at_100_max value: 36.18627717337547 - type: nauc_map_at_100_std value: -26.28243886166 - type: nauc_map_at_10_diff1 value: 76.88888516032657 - type: nauc_map_at_10_max value: 35.69809861085124 - type: nauc_map_at_10_std value: -27.859425473864224 - type: nauc_map_at_1_diff1 value: 79.5243725217315 - type: nauc_map_at_1_max value: 27.092773841207002 - type: nauc_map_at_1_std value: -26.223200911204543 - type: nauc_map_at_20_diff1 value: 76.74938996155176 - type: nauc_map_at_20_max value: 36.07373781351406 - type: nauc_map_at_20_std value: -26.891400098628015 - type: nauc_map_at_3_diff1 value: 77.29604745045076 - type: nauc_map_at_3_max value: 33.11431059356283 - type: nauc_map_at_3_std value: -29.555237195931085 - type: nauc_map_at_5_diff1 value: 77.14069217901078 - type: nauc_map_at_5_max value: 34.68656073526487 - type: nauc_map_at_5_std value: -28.945053669861508 - type: nauc_mrr_at_1000_diff1 value: 76.66087451567746 - type: nauc_mrr_at_1000_max value: 38.78133177265328 - type: nauc_mrr_at_1000_std value: -23.75726541774991 - type: nauc_mrr_at_100_diff1 value: 76.66117078261013 - type: nauc_mrr_at_100_max value: 38.782533036423885 - type: nauc_mrr_at_100_std value: -23.752587601473568 - type: nauc_mrr_at_10_diff1 value: 76.65866401411019 - type: nauc_mrr_at_10_max value: 38.87950311049704 - type: nauc_mrr_at_10_std value: -23.873660706680578 - type: nauc_mrr_at_1_diff1 value: 77.42633506487041 - type: nauc_mrr_at_1_max value: 37.93973722217786 - type: nauc_mrr_at_1_std value: -23.3984130771317 - type: nauc_mrr_at_20_diff1 value: 76.66210684923414 - type: nauc_mrr_at_20_max value: 38.81293033048911 - type: nauc_mrr_at_20_std value: -23.736590746133736 - type: nauc_mrr_at_3_diff1 value: 76.33711764736019 - type: nauc_mrr_at_3_max value: 38.5659231830368 - type: nauc_mrr_at_3_std value: -23.99588149124865 - type: nauc_mrr_at_5_diff1 value: 76.57123830226054 - type: nauc_mrr_at_5_max value: 38.97947097392977 - type: nauc_mrr_at_5_std value: -23.943668957974246 - type: nauc_ndcg_at_1000_diff1 value: 76.38447339050585 - type: nauc_ndcg_at_1000_max value: 37.756822792877934 - type: nauc_ndcg_at_1000_std value: -24.046995734357164 - type: nauc_ndcg_at_100_diff1 value: 76.44058018066822 - type: nauc_ndcg_at_100_max value: 37.72948294169218 - type: nauc_ndcg_at_100_std value: -24.083432140741795 - type: nauc_ndcg_at_10_diff1 value: 76.56246287923074 - type: nauc_ndcg_at_10_max value: 37.0329253490553 - type: nauc_ndcg_at_10_std value: -26.6495163705961 - type: nauc_ndcg_at_1_diff1 value: 77.4085129990432 - type: nauc_ndcg_at_1_max value: 38.06139172214421 - type: nauc_ndcg_at_1_std value: -23.656477126977386 - type: nauc_ndcg_at_20_diff1 value: 76.50192496743098 - type: nauc_ndcg_at_20_max value: 37.51759311013985 - type: nauc_ndcg_at_20_std value: -25.45517058360004 - type: nauc_ndcg_at_3_diff1 value: 75.94398494081794 - type: nauc_ndcg_at_3_max value: 35.7666711547279 - type: nauc_ndcg_at_3_std value: -26.866022682361578 - type: nauc_ndcg_at_5_diff1 value: 76.47334274088344 - type: nauc_ndcg_at_5_max value: 36.40830331490731 - type: nauc_ndcg_at_5_std value: -27.170121189572765 - type: nauc_precision_at_1000_diff1 value: -43.33672630765437 - type: nauc_precision_at_1000_max value: -5.089751329149161 - type: nauc_precision_at_1000_std value: 30.6241447847051 - type: nauc_precision_at_100_diff1 value: -42.736833035629864 - type: nauc_precision_at_100_max value: -4.060198408346224 - type: nauc_precision_at_100_std value: 29.807050266205344 - type: nauc_precision_at_10_diff1 value: -35.90810562245906 - type: nauc_precision_at_10_max value: 1.1633204529249133 - type: nauc_precision_at_10_std value: 20.129691203276018 - type: nauc_precision_at_1_diff1 value: 77.4085129990432 - type: nauc_precision_at_1_max value: 38.06139172214421 - type: nauc_precision_at_1_std value: -23.656477126977386 - type: nauc_precision_at_20_diff1 value: -40.2132286912738 - type: nauc_precision_at_20_max value: -1.3004735030734194 - type: nauc_precision_at_20_std value: 25.15612293757488 - type: nauc_precision_at_3_diff1 value: -13.873825299883904 - type: nauc_precision_at_3_max value: 11.038689278907233 - type: nauc_precision_at_3_std value: 5.4276449621706 - type: nauc_precision_at_5_diff1 value: -27.151668633894737 - type: nauc_precision_at_5_max value: 5.795130010163115 - type: nauc_precision_at_5_std value: 13.220722167587375 - type: nauc_recall_at_1000_diff1 value: 83.903950427863 - type: nauc_recall_at_1000_max value: 37.82919000897223 - type: nauc_recall_at_1000_std value: 70.65670846771707 - type: nauc_recall_at_100_diff1 value: 75.23306095335836 - type: nauc_recall_at_100_max value: 37.54281648247423 - type: nauc_recall_at_100_std value: 8.434289114377373 - type: nauc_recall_at_10_diff1 value: 72.7872912723047 - type: nauc_recall_at_10_max value: 34.261519652104184 - type: nauc_recall_at_10_std value: -34.60101950810808 - type: nauc_recall_at_1_diff1 value: 79.5243725217315 - type: nauc_recall_at_1_max value: 27.092773841207002 - type: nauc_recall_at_1_std value: -26.223200911204543 - type: nauc_recall_at_20_diff1 value: 72.8297963091964 - type: nauc_recall_at_20_max value: 36.070220569670916 - type: nauc_recall_at_20_std value: -27.20897179168245 - type: nauc_recall_at_3_diff1 value: 73.47456374650459 - type: nauc_recall_at_3_max value: 29.901663407294816 - type: nauc_recall_at_3_std value: -32.83329537040381 - type: nauc_recall_at_5_diff1 value: 73.05025750827126 - type: nauc_recall_at_5_max value: 32.35733470860963 - type: nauc_recall_at_5_std value: -34.32357558493091 - type: ndcg_at_1 value: 80.4 - type: ndcg_at_10 value: 87.366 - type: ndcg_at_100 value: 88.7 - type: ndcg_at_1000 value: 88.842 - type: ndcg_at_20 value: 88.11 - type: ndcg_at_3 value: 84.52499999999999 - type: ndcg_at_5 value: 86.047 - type: precision_at_1 value: 80.4 - type: precision_at_10 value: 13.235 - type: precision_at_100 value: 1.516 - type: precision_at_1000 value: 0.156 - type: precision_at_20 value: 7.037 - type: precision_at_3 value: 36.9 - type: precision_at_5 value: 24.236 - type: recall_at_1 value: 69.95700000000001 - type: recall_at_10 value: 94.535 - type: recall_at_100 value: 99.164 - type: recall_at_1000 value: 99.855 - type: recall_at_20 value: 96.974 - type: recall_at_3 value: 86.33800000000001 - type: recall_at_5 value: 90.69 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88 metrics: - type: main_score value: 21.492 - type: map_at_1 value: 5.192 - type: map_at_10 value: 12.959000000000001 - type: map_at_100 value: 14.963999999999999 - type: map_at_1000 value: 15.261 - type: map_at_20 value: 13.988999999999999 - type: map_at_3 value: 9.235 - type: map_at_5 value: 11.042 - type: mrr_at_1 value: 25.5 - type: mrr_at_10 value: 36.37313492063491 - type: mrr_at_100 value: 37.36517957347626 - type: mrr_at_1000 value: 37.42538601073437 - type: mrr_at_20 value: 36.987896404421136 - type: mrr_at_3 value: 32.966666666666654 - type: mrr_at_5 value: 34.95166666666664 - type: nauc_map_at_1000_diff1 value: 13.635120934154395 - type: nauc_map_at_1000_max value: 28.03542983005195 - type: nauc_map_at_1000_std value: 17.07156940311778 - type: nauc_map_at_100_diff1 value: 13.59237295184475 - type: nauc_map_at_100_max value: 27.992291365051237 - type: nauc_map_at_100_std value: 16.926533467400464 - type: nauc_map_at_10_diff1 value: 14.149193235999993 - type: nauc_map_at_10_max value: 26.520643811139305 - type: nauc_map_at_10_std value: 13.168673602548925 - type: nauc_map_at_1_diff1 value: 20.096094508148465 - type: nauc_map_at_1_max value: 17.41582245576302 - type: nauc_map_at_1_std value: 5.771729007558897 - type: nauc_map_at_20_diff1 value: 13.977726400526427 - type: nauc_map_at_20_max value: 27.2322235491895 - type: nauc_map_at_20_std value: 14.972781677750435 - type: nauc_map_at_3_diff1 value: 17.371153027460355 - type: nauc_map_at_3_max value: 24.457758503208254 - type: nauc_map_at_3_std value: 7.719726821179824 - type: nauc_map_at_5_diff1 value: 14.600442843442574 - type: nauc_map_at_5_max value: 25.899736370856296 - type: nauc_map_at_5_std value: 10.125349354853359 - type: nauc_mrr_at_1000_diff1 value: 18.70342821390236 - type: nauc_mrr_at_1000_max value: 23.365194520549114 - type: nauc_mrr_at_1000_std value: 12.185114294903236 - type: nauc_mrr_at_100_diff1 value: 18.677858738015907 - type: nauc_mrr_at_100_max value: 23.372641996726742 - type: nauc_mrr_at_100_std value: 12.216130561991909 - type: nauc_mrr_at_10_diff1 value: 18.79094453090232 - type: nauc_mrr_at_10_max value: 23.511686337006466 - type: nauc_mrr_at_10_std value: 11.879716687008134 - type: nauc_mrr_at_1_diff1 value: 20.10455171810408 - type: nauc_mrr_at_1_max value: 17.741566234315428 - type: nauc_mrr_at_1_std value: 6.1676764583652215 - type: nauc_mrr_at_20_diff1 value: 18.70143648544655 - type: nauc_mrr_at_20_max value: 23.45603239095019 - type: nauc_mrr_at_20_std value: 12.244613576686202 - type: nauc_mrr_at_3_diff1 value: 18.894662528857374 - type: nauc_mrr_at_3_max value: 23.3739038101588 - type: nauc_mrr_at_3_std value: 10.4709044796543 - type: nauc_mrr_at_5_diff1 value: 18.877786065095563 - type: nauc_mrr_at_5_max value: 23.78061081203872 - type: nauc_mrr_at_5_std value: 11.847882917869622 - type: nauc_ndcg_at_1000_diff1 value: 13.99159027398115 - type: nauc_ndcg_at_1000_max value: 29.44766808611483 - type: nauc_ndcg_at_1000_std value: 24.289749574699915 - type: nauc_ndcg_at_100_diff1 value: 13.164020363258746 - type: nauc_ndcg_at_100_max value: 29.642442997167723 - type: nauc_ndcg_at_100_std value: 23.761764515453866 - type: nauc_ndcg_at_10_diff1 value: 14.839883268638546 - type: nauc_ndcg_at_10_max value: 27.21043708455449 - type: nauc_ndcg_at_10_std value: 15.56110419291775 - type: nauc_ndcg_at_1_diff1 value: 20.10455171810408 - type: nauc_ndcg_at_1_max value: 17.741566234315428 - type: nauc_ndcg_at_1_std value: 6.1676764583652215 - type: nauc_ndcg_at_20_diff1 value: 14.27998110295395 - type: nauc_ndcg_at_20_max value: 28.2492026337839 - type: nauc_ndcg_at_20_std value: 18.822356982979105 - type: nauc_ndcg_at_3_diff1 value: 17.659263157535445 - type: nauc_ndcg_at_3_max value: 25.416706421591396 - type: nauc_ndcg_at_3_std value: 9.650689638152636 - type: nauc_ndcg_at_5_diff1 value: 15.38459833918123 - type: nauc_ndcg_at_5_max value: 26.92495519416969 - type: nauc_ndcg_at_5_std value: 12.71017696809276 - type: nauc_precision_at_1000_diff1 value: 6.128490135458364 - type: nauc_precision_at_1000_max value: 23.52693893261883 - type: nauc_precision_at_1000_std value: 36.280432732819925 - type: nauc_precision_at_100_diff1 value: 5.306163791220436 - type: nauc_precision_at_100_max value: 27.67851033239246 - type: nauc_precision_at_100_std value: 34.29821573752515 - type: nauc_precision_at_10_diff1 value: 10.829686435425472 - type: nauc_precision_at_10_max value: 27.201648684015318 - type: nauc_precision_at_10_std value: 19.376999508233254 - type: nauc_precision_at_1_diff1 value: 20.10455171810408 - type: nauc_precision_at_1_max value: 17.741566234315428 - type: nauc_precision_at_1_std value: 6.1676764583652215 - type: nauc_precision_at_20_diff1 value: 9.416169626702048 - type: nauc_precision_at_20_max value: 27.65257998670333 - type: nauc_precision_at_20_std value: 24.761868509805826 - type: nauc_precision_at_3_diff1 value: 16.666456902017348 - type: nauc_precision_at_3_max value: 27.9969730961105 - type: nauc_precision_at_3_std value: 10.991562741393231 - type: nauc_precision_at_5_diff1 value: 12.26205064462843 - type: nauc_precision_at_5_max value: 29.083848730874095 - type: nauc_precision_at_5_std value: 15.66630836555747 - type: nauc_recall_at_1000_diff1 value: 5.600277836894063 - type: nauc_recall_at_1000_max value: 23.228705161815526 - type: nauc_recall_at_1000_std value: 36.822431061799485 - type: nauc_recall_at_100_diff1 value: 4.991781244867178 - type: nauc_recall_at_100_max value: 27.70095625483475 - type: nauc_recall_at_100_std value: 34.67168431597854 - type: nauc_recall_at_10_diff1 value: 10.580860425931972 - type: nauc_recall_at_10_max value: 27.145829414223666 - type: nauc_recall_at_10_std value: 19.330630157067382 - type: nauc_recall_at_1_diff1 value: 20.096094508148465 - type: nauc_recall_at_1_max value: 17.41582245576302 - type: nauc_recall_at_1_std value: 5.771729007558897 - type: nauc_recall_at_20_diff1 value: 9.06945331260344 - type: nauc_recall_at_20_max value: 27.56725251066482 - type: nauc_recall_at_20_std value: 24.77644509886098 - type: nauc_recall_at_3_diff1 value: 16.660507676429322 - type: nauc_recall_at_3_max value: 27.816546386536434 - type: nauc_recall_at_3_std value: 10.687824478247007 - type: nauc_recall_at_5_diff1 value: 11.992514446369388 - type: nauc_recall_at_5_max value: 28.789031176671948 - type: nauc_recall_at_5_std value: 15.422118990090805 - type: ndcg_at_1 value: 25.5 - type: ndcg_at_10 value: 21.492 - type: ndcg_at_100 value: 29.022 - type: ndcg_at_1000 value: 34.298 - type: ndcg_at_20 value: 24.237000000000002 - type: ndcg_at_3 value: 20.392 - type: ndcg_at_5 value: 17.801000000000002 - type: precision_at_1 value: 25.5 - type: precision_at_10 value: 11.09 - type: precision_at_100 value: 2.1919999999999997 - type: precision_at_1000 value: 0.346 - type: precision_at_20 value: 7.135 - type: precision_at_3 value: 18.933 - type: precision_at_5 value: 15.52 - type: recall_at_1 value: 5.192 - type: recall_at_10 value: 22.512999999999998 - type: recall_at_100 value: 44.505 - type: recall_at_1000 value: 70.267 - type: recall_at_20 value: 28.965000000000003 - type: recall_at_3 value: 11.522 - type: recall_at_5 value: 15.751999999999999 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: main_score value: 71.586 - type: map_at_1 value: 56.760999999999996 - type: map_at_10 value: 66.893 - type: map_at_100 value: 67.42 - type: map_at_1000 value: 67.44200000000001 - type: map_at_20 value: 67.232 - type: map_at_3 value: 64.193 - type: map_at_5 value: 65.73400000000001 - type: mrr_at_1 value: 60.0 - type: mrr_at_10 value: 68.20383597883595 - type: mrr_at_100 value: 68.58867453733343 - type: mrr_at_1000 value: 68.61117469977329 - type: mrr_at_20 value: 68.43973740684265 - type: mrr_at_3 value: 66.11111111111111 - type: mrr_at_5 value: 67.44444444444446 - type: nauc_map_at_1000_diff1 value: 72.66688261123035 - type: nauc_map_at_1000_max value: 61.02926282006283 - type: nauc_map_at_1000_std value: 11.084549829740526 - type: nauc_map_at_100_diff1 value: 72.66226192320828 - type: nauc_map_at_100_max value: 61.04393223108811 - type: nauc_map_at_100_std value: 11.101529343291695 - type: nauc_map_at_10_diff1 value: 72.66732266693091 - type: nauc_map_at_10_max value: 61.24124296311832 - type: nauc_map_at_10_std value: 10.91179451961794 - type: nauc_map_at_1_diff1 value: 74.2356464256346 - type: nauc_map_at_1_max value: 54.06962758957632 - type: nauc_map_at_1_std value: 0.8037891907963532 - type: nauc_map_at_20_diff1 value: 72.65198594061253 - type: nauc_map_at_20_max value: 61.130159351448185 - type: nauc_map_at_20_std value: 11.2246899245522 - type: nauc_map_at_3_diff1 value: 72.78578673303954 - type: nauc_map_at_3_max value: 59.19073262936321 - type: nauc_map_at_3_std value: 8.460301560522968 - type: nauc_map_at_5_diff1 value: 72.55004168261968 - type: nauc_map_at_5_max value: 59.75181935082357 - type: nauc_map_at_5_std value: 9.440299527201889 - type: nauc_mrr_at_1000_diff1 value: 72.82720348470325 - type: nauc_mrr_at_1000_max value: 62.344231223741446 - type: nauc_mrr_at_1000_std value: 12.60196558488974 - type: nauc_mrr_at_100_diff1 value: 72.82236849255094 - type: nauc_mrr_at_100_max value: 62.35799491393125 - type: nauc_mrr_at_100_std value: 12.617900773655673 - type: nauc_mrr_at_10_diff1 value: 72.7722847495086 - type: nauc_mrr_at_10_max value: 62.66642401155435 - type: nauc_mrr_at_10_std value: 12.906381237738746 - type: nauc_mrr_at_1_diff1 value: 74.71208073612343 - type: nauc_mrr_at_1_max value: 59.50430394775893 - type: nauc_mrr_at_1_std value: 8.129514198080512 - type: nauc_mrr_at_20_diff1 value: 72.78312367361772 - type: nauc_mrr_at_20_max value: 62.421122493761885 - type: nauc_mrr_at_20_std value: 12.693437522498588 - type: nauc_mrr_at_3_diff1 value: 73.50670156385345 - type: nauc_mrr_at_3_max value: 62.01717537699209 - type: nauc_mrr_at_3_std value: 11.926548252191182 - type: nauc_mrr_at_5_diff1 value: 72.62204028549876 - type: nauc_mrr_at_5_max value: 62.319358766312085 - type: nauc_mrr_at_5_std value: 13.081257923284342 - type: nauc_ndcg_at_1000_diff1 value: 72.29960539074736 - type: nauc_ndcg_at_1000_max value: 62.75096959221402 - type: nauc_ndcg_at_1000_std value: 13.81528462505362 - type: nauc_ndcg_at_100_diff1 value: 72.19985782073529 - type: nauc_ndcg_at_100_max value: 63.18837705326287 - type: nauc_ndcg_at_100_std value: 14.506479655117138 - type: nauc_ndcg_at_10_diff1 value: 71.85759847832983 - type: nauc_ndcg_at_10_max value: 64.150996056865 - type: nauc_ndcg_at_10_std value: 14.580606901634278 - type: nauc_ndcg_at_1_diff1 value: 74.71208073612343 - type: nauc_ndcg_at_1_max value: 59.50430394775893 - type: nauc_ndcg_at_1_std value: 8.129514198080512 - type: nauc_ndcg_at_20_diff1 value: 71.80987178228351 - type: nauc_ndcg_at_20_max value: 63.56269460865743 - type: nauc_ndcg_at_20_std value: 15.024978004625922 - type: nauc_ndcg_at_3_diff1 value: 72.35095651602592 - type: nauc_ndcg_at_3_max value: 61.60548011855679 - type: nauc_ndcg_at_3_std value: 12.048248788835263 - type: nauc_ndcg_at_5_diff1 value: 71.48615621881864 - type: nauc_ndcg_at_5_max value: 61.72870035979784 - type: nauc_ndcg_at_5_std value: 12.83048357446691 - type: nauc_precision_at_1000_diff1 value: -14.743011420972 - type: nauc_precision_at_1000_max value: 19.281995763080158 - type: nauc_precision_at_1000_std value: 49.6140660398164 - type: nauc_precision_at_100_diff1 value: 0.11278174806205563 - type: nauc_precision_at_100_max value: 29.704511820077332 - type: nauc_precision_at_100_std value: 47.84916954122579 - type: nauc_precision_at_10_diff1 value: 20.498227967235728 - type: nauc_precision_at_10_max value: 47.883119365891595 - type: nauc_precision_at_10_std value: 45.182178693450595 - type: nauc_precision_at_1_diff1 value: 74.71208073612343 - type: nauc_precision_at_1_max value: 59.50430394775893 - type: nauc_precision_at_1_std value: 8.129514198080512 - type: nauc_precision_at_20_diff1 value: 12.551737222341455 - type: nauc_precision_at_20_max value: 40.618899501225634 - type: nauc_precision_at_20_std value: 48.5598454249067 - type: nauc_precision_at_3_diff1 value: 47.67720764601145 - type: nauc_precision_at_3_max value: 56.50632017305064 - type: nauc_precision_at_3_std value: 31.14175140162157 - type: nauc_precision_at_5_diff1 value: 35.10058622792819 - type: nauc_precision_at_5_max value: 51.88948872657981 - type: nauc_precision_at_5_std value: 37.62796957461928 - type: nauc_recall_at_1000_diff1 value: 79.57516339869238 - type: nauc_recall_at_1000_max value: 86.11111111111035 - type: nauc_recall_at_1000_std value: 79.57516339869238 - type: nauc_recall_at_100_diff1 value: 70.50859559510081 - type: nauc_recall_at_100_max value: 79.17009941231396 - type: nauc_recall_at_100_std value: 44.32910419069595 - type: nauc_recall_at_10_diff1 value: 66.16118569361245 - type: nauc_recall_at_10_max value: 74.73542948302286 - type: nauc_recall_at_10_std value: 27.680330939810037 - type: nauc_recall_at_1_diff1 value: 74.2356464256346 - type: nauc_recall_at_1_max value: 54.06962758957632 - type: nauc_recall_at_1_std value: 0.8037891907963532 - type: nauc_recall_at_20_diff1 value: 65.4748436545527 - type: nauc_recall_at_20_max value: 73.81532199081235 - type: nauc_recall_at_20_std value: 33.59324708196253 - type: nauc_recall_at_3_diff1 value: 68.83194804473622 - type: nauc_recall_at_3_max value: 61.77722610439669 - type: nauc_recall_at_3_std value: 13.984923756556714 - type: nauc_recall_at_5_diff1 value: 65.51467417209523 - type: nauc_recall_at_5_max value: 64.08276291427661 - type: nauc_recall_at_5_std value: 19.976472037847167 - type: ndcg_at_1 value: 60.0 - type: ndcg_at_10 value: 71.586 - type: ndcg_at_100 value: 73.76899999999999 - type: ndcg_at_1000 value: 74.386 - type: ndcg_at_20 value: 72.612 - type: ndcg_at_3 value: 66.944 - type: ndcg_at_5 value: 69.333 - type: precision_at_1 value: 60.0 - type: precision_at_10 value: 9.6 - type: precision_at_100 value: 1.073 - type: precision_at_1000 value: 0.11199999999999999 - type: precision_at_20 value: 5.033 - type: precision_at_3 value: 26.333000000000002 - type: precision_at_5 value: 17.4 - type: recall_at_1 value: 56.760999999999996 - type: recall_at_10 value: 84.589 - type: recall_at_100 value: 94.333 - type: recall_at_1000 value: 99.333 - type: recall_at_20 value: 88.43299999999999 - type: recall_at_3 value: 72.10600000000001 - type: recall_at_5 value: 78.194 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: bb9466bac8153a0349341eb1b22e06409e78ef4e metrics: - type: main_score value: 84.60600000000001 - type: map_at_1 value: 0.257 - type: map_at_10 value: 2.196 - type: map_at_100 value: 13.252 - type: map_at_1000 value: 31.473000000000003 - type: map_at_20 value: 4.023000000000001 - type: map_at_3 value: 0.722 - type: map_at_5 value: 1.146 - type: mrr_at_1 value: 94.0 - type: mrr_at_10 value: 97.0 - type: mrr_at_100 value: 97.0 - type: mrr_at_1000 value: 97.0 - type: mrr_at_20 value: 97.0 - type: mrr_at_3 value: 97.0 - type: mrr_at_5 value: 97.0 - type: nauc_map_at_1000_diff1 value: -30.674816554207062 - type: nauc_map_at_1000_max value: 53.18598689657068 - type: nauc_map_at_1000_std value: 78.88325309469121 - type: nauc_map_at_100_diff1 value: -17.6877824653978 - type: nauc_map_at_100_max value: 19.584159765315658 - type: nauc_map_at_100_std value: 48.051154190992726 - type: nauc_map_at_10_diff1 value: 20.076631089898626 - type: nauc_map_at_10_max value: -8.642556160185636 - type: nauc_map_at_10_std value: -5.768698617334298 - type: nauc_map_at_1_diff1 value: 27.342260509653798 - type: nauc_map_at_1_max value: -23.400451210297994 - type: nauc_map_at_1_std value: -21.152006353733853 - type: nauc_map_at_20_diff1 value: 8.019321726240506 - type: nauc_map_at_20_max value: -1.4826378210544222 - type: nauc_map_at_20_std value: 5.698208117745366 - type: nauc_map_at_3_diff1 value: 32.073377946749446 - type: nauc_map_at_3_max value: -13.099353983204654 - type: nauc_map_at_3_std value: -15.36319127398037 - type: nauc_map_at_5_diff1 value: 22.500045815797876 - type: nauc_map_at_5_max value: -8.548135411428023 - type: nauc_map_at_5_std value: -8.547850460331334 - type: nauc_mrr_at_1000_diff1 value: -6.022408963585526 - type: nauc_mrr_at_1000_max value: 4.481792717087155 - type: nauc_mrr_at_1000_std value: 51.6962340491753 - type: nauc_mrr_at_100_diff1 value: -6.022408963585526 - type: nauc_mrr_at_100_max value: 4.481792717087155 - type: nauc_mrr_at_100_std value: 51.6962340491753 - type: nauc_mrr_at_10_diff1 value: -6.022408963585526 - type: nauc_mrr_at_10_max value: 4.481792717087155 - type: nauc_mrr_at_10_std value: 51.6962340491753 - type: nauc_mrr_at_1_diff1 value: -6.022408963585076 - type: nauc_mrr_at_1_max value: 4.481792717087146 - type: nauc_mrr_at_1_std value: 51.69623404917518 - type: nauc_mrr_at_20_diff1 value: -6.022408963585526 - type: nauc_mrr_at_20_max value: 4.481792717087155 - type: nauc_mrr_at_20_std value: 51.6962340491753 - type: nauc_mrr_at_3_diff1 value: -6.022408963585526 - type: nauc_mrr_at_3_max value: 4.481792717087155 - type: nauc_mrr_at_3_std value: 51.6962340491753 - type: nauc_mrr_at_5_diff1 value: -6.022408963585526 - type: nauc_mrr_at_5_max value: 4.481792717087155 - type: nauc_mrr_at_5_std value: 51.6962340491753 - type: nauc_ndcg_at_1000_diff1 value: -20.79697283984295 - type: nauc_ndcg_at_1000_max value: 52.97671908009218 - type: nauc_ndcg_at_1000_std value: 75.43907707019758 - type: nauc_ndcg_at_100_diff1 value: -38.620752706946455 - type: nauc_ndcg_at_100_max value: 49.41307462381511 - type: nauc_ndcg_at_100_std value: 81.33299379244252 - type: nauc_ndcg_at_10_diff1 value: -18.611906363037356 - type: nauc_ndcg_at_10_max value: 44.20544651664479 - type: nauc_ndcg_at_10_std value: 61.322552829935816 - type: nauc_ndcg_at_1_diff1 value: 18.625935567849073 - type: nauc_ndcg_at_1_max value: -10.104132769280879 - type: nauc_ndcg_at_1_std value: 22.449560689879743 - type: nauc_ndcg_at_20_diff1 value: -30.61130208138771 - type: nauc_ndcg_at_20_max value: 52.68851710375231 - type: nauc_ndcg_at_20_std value: 69.72357683382992 - type: nauc_ndcg_at_3_diff1 value: 5.695394821691213 - type: nauc_ndcg_at_3_max value: 37.909122367102135 - type: nauc_ndcg_at_3_std value: 46.2366603255159 - type: nauc_ndcg_at_5_diff1 value: -15.273067832464731 - type: nauc_ndcg_at_5_max value: 49.7054639475091 - type: nauc_ndcg_at_5_std value: 58.83754007826166 - type: nauc_precision_at_1000_diff1 value: -31.565302588492035 - type: nauc_precision_at_1000_max value: 52.56214379514724 - type: nauc_precision_at_1000_std value: 53.40618234326055 - type: nauc_precision_at_100_diff1 value: -44.67273120709088 - type: nauc_precision_at_100_max value: 48.30381155522576 - type: nauc_precision_at_100_std value: 82.1984661602578 - type: nauc_precision_at_10_diff1 value: -24.737383556860145 - type: nauc_precision_at_10_max value: 52.816815002878556 - type: nauc_precision_at_10_std value: 67.99052410030845 - type: nauc_precision_at_1_diff1 value: -6.022408963585076 - type: nauc_precision_at_1_max value: 4.481792717087146 - type: nauc_precision_at_1_std value: 51.69623404917518 - type: nauc_precision_at_20_diff1 value: -40.23628054967093 - type: nauc_precision_at_20_max value: 56.980056980057014 - type: nauc_precision_at_20_std value: 76.60976777785895 - type: nauc_precision_at_3_diff1 value: -4.661784068466279 - type: nauc_precision_at_3_max value: 59.052007899934125 - type: nauc_precision_at_3_std value: 58.187952600394986 - type: nauc_precision_at_5_diff1 value: -38.11848143512736 - type: nauc_precision_at_5_max value: 68.6149353358365 - type: nauc_precision_at_5_std value: 73.55652899457661 - type: nauc_recall_at_1000_diff1 value: -14.886527444436345 - type: nauc_recall_at_1000_max value: 48.07492302795808 - type: nauc_recall_at_1000_std value: 65.05623212485906 - type: nauc_recall_at_100_diff1 value: -8.148385729388195 - type: nauc_recall_at_100_max value: 8.041615364614533 - type: nauc_recall_at_100_std value: 33.77187914574611 - type: nauc_recall_at_10_diff1 value: 24.333628413035942 - type: nauc_recall_at_10_max value: -14.577877145192078 - type: nauc_recall_at_10_std value: -12.131819145098557 - type: nauc_recall_at_1_diff1 value: 27.342260509653798 - type: nauc_recall_at_1_max value: -23.400451210297994 - type: nauc_recall_at_1_std value: -21.152006353733853 - type: nauc_recall_at_20_diff1 value: 13.695556376785564 - type: nauc_recall_at_20_max value: -8.872009346408264 - type: nauc_recall_at_20_std value: -3.163199444247112 - type: nauc_recall_at_3_diff1 value: 32.00442538217753 - type: nauc_recall_at_3_max value: -15.159737942664552 - type: nauc_recall_at_3_std value: -17.530833132440645 - type: nauc_recall_at_5_diff1 value: 22.64740552912405 - type: nauc_recall_at_5_max value: -12.947090597010414 - type: nauc_recall_at_5_std value: -12.914478822476807 - type: ndcg_at_1 value: 88.0 - type: ndcg_at_10 value: 84.60600000000001 - type: ndcg_at_100 value: 64.31700000000001 - type: ndcg_at_1000 value: 56.40500000000001 - type: ndcg_at_20 value: 80.561 - type: ndcg_at_3 value: 87.87700000000001 - type: ndcg_at_5 value: 86.641 - type: precision_at_1 value: 94.0 - type: precision_at_10 value: 88.2 - type: precision_at_100 value: 65.9 - type: precision_at_1000 value: 25.019999999999996 - type: precision_at_20 value: 84.7 - type: precision_at_3 value: 92.0 - type: precision_at_5 value: 90.0 - type: recall_at_1 value: 0.257 - type: recall_at_10 value: 2.338 - type: recall_at_100 value: 15.831999999999999 - type: recall_at_1000 value: 52.519000000000005 - type: recall_at_20 value: 4.367 - type: recall_at_3 value: 0.74 - type: recall_at_5 value: 1.196 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: main_score value: 31.426 - type: map_at_1 value: 3.4709999999999996 - type: map_at_10 value: 13.236999999999998 - type: map_at_100 value: 19.521 - type: map_at_1000 value: 21.224 - type: map_at_20 value: 15.626000000000001 - type: map_at_3 value: 7.152 - type: map_at_5 value: 9.914000000000001 - type: mrr_at_1 value: 44.89795918367347 - type: mrr_at_10 value: 57.54373177842565 - type: mrr_at_100 value: 57.855267710139536 - type: mrr_at_1000 value: 57.855267710139536 - type: mrr_at_20 value: 57.70071764969724 - type: mrr_at_3 value: 52.72108843537414 - type: mrr_at_5 value: 55.06802721088435 - type: nauc_map_at_1000_diff1 value: 21.148857552115558 - type: nauc_map_at_1000_max value: 2.0837572569021323 - type: nauc_map_at_1000_std value: 3.203419709665347 - type: nauc_map_at_100_diff1 value: 21.383778167597878 - type: nauc_map_at_100_max value: 0.965767943155967 - type: nauc_map_at_100_std value: 0.3949924961020957 - type: nauc_map_at_10_diff1 value: 27.178555638086394 - type: nauc_map_at_10_max value: 4.480675175857958 - type: nauc_map_at_10_std value: -13.69553539513878 - type: nauc_map_at_1_diff1 value: 27.63901823865334 - type: nauc_map_at_1_max value: -18.6387233237763 - type: nauc_map_at_1_std value: -27.02164241863646 - type: nauc_map_at_20_diff1 value: 23.892104752374888 - type: nauc_map_at_20_max value: 3.5343136621362348 - type: nauc_map_at_20_std value: -8.765101188860816 - type: nauc_map_at_3_diff1 value: 22.065793929837493 - type: nauc_map_at_3_max value: 0.8063396680860568 - type: nauc_map_at_3_std value: -20.404849396621824 - type: nauc_map_at_5_diff1 value: 22.66626080580714 - type: nauc_map_at_5_max value: 5.423340658352383 - type: nauc_map_at_5_std value: -18.31523779843455 - type: nauc_mrr_at_1000_diff1 value: 30.520722269282665 - type: nauc_mrr_at_1000_max value: -16.644959497742267 - type: nauc_mrr_at_1000_std value: -16.3824126273053 - type: nauc_mrr_at_100_diff1 value: 30.520722269282665 - type: nauc_mrr_at_100_max value: -16.644959497742267 - type: nauc_mrr_at_100_std value: -16.3824126273053 - type: nauc_mrr_at_10_diff1 value: 30.428248939332974 - type: nauc_mrr_at_10_max value: -16.300183919261585 - type: nauc_mrr_at_10_std value: -15.404823235836309 - type: nauc_mrr_at_1_diff1 value: 27.041346572613474 - type: nauc_mrr_at_1_max value: -23.181309312755804 - type: nauc_mrr_at_1_std value: -24.33076726484014 - type: nauc_mrr_at_20_diff1 value: 30.676558567379303 - type: nauc_mrr_at_20_max value: -16.914268763031416 - type: nauc_mrr_at_20_std value: -15.77742854976336 - type: nauc_mrr_at_3_diff1 value: 31.718457109787096 - type: nauc_mrr_at_3_max value: -15.508391132202235 - type: nauc_mrr_at_3_std value: -20.33229438349494 - type: nauc_mrr_at_5_diff1 value: 28.73798376227693 - type: nauc_mrr_at_5_max value: -16.086295031060196 - type: nauc_mrr_at_5_std value: -15.644604635769321 - type: nauc_ndcg_at_1000_diff1 value: 22.158724660189606 - type: nauc_ndcg_at_1000_max value: -3.1755686809941475 - type: nauc_ndcg_at_1000_std value: 19.258386224159075 - type: nauc_ndcg_at_100_diff1 value: 21.83846748649288 - type: nauc_ndcg_at_100_max value: -10.939957598756036 - type: nauc_ndcg_at_100_std value: 14.729678880436623 - type: nauc_ndcg_at_10_diff1 value: 26.944882726098424 - type: nauc_ndcg_at_10_max value: -3.5176483833346617 - type: nauc_ndcg_at_10_std value: -5.400606773697211 - type: nauc_ndcg_at_1_diff1 value: 26.649410985172985 - type: nauc_ndcg_at_1_max value: -18.806716526067493 - type: nauc_ndcg_at_1_std value: -25.100244999343506 - type: nauc_ndcg_at_20_diff1 value: 24.860266153648315 - type: nauc_ndcg_at_20_max value: -7.521401821712892 - type: nauc_ndcg_at_20_std value: -3.3696577425983003 - type: nauc_ndcg_at_3_diff1 value: 23.9933326962406 - type: nauc_ndcg_at_3_max value: -0.4609479344284664 - type: nauc_ndcg_at_3_std value: -15.176459166869897 - type: nauc_ndcg_at_5_diff1 value: 22.50595978713142 - type: nauc_ndcg_at_5_max value: -2.1093870656000857 - type: nauc_ndcg_at_5_std value: -12.732197425528257 - type: nauc_precision_at_1000_diff1 value: -20.335120385950024 - type: nauc_precision_at_1000_max value: 26.95109729939765 - type: nauc_precision_at_1000_std value: 29.981685890622117 - type: nauc_precision_at_100_diff1 value: -2.782114329320704 - type: nauc_precision_at_100_max value: 2.9489322002048604 - type: nauc_precision_at_100_std value: 67.3074073674319 - type: nauc_precision_at_10_diff1 value: 21.385177180383383 - type: nauc_precision_at_10_max value: -2.4696365259422817 - type: nauc_precision_at_10_std value: 14.469784299536673 - type: nauc_precision_at_1_diff1 value: 27.041346572613474 - type: nauc_precision_at_1_max value: -23.181309312755804 - type: nauc_precision_at_1_std value: -24.33076726484014 - type: nauc_precision_at_20_diff1 value: 11.993846579997673 - type: nauc_precision_at_20_max value: -2.4792189693296227 - type: nauc_precision_at_20_std value: 28.581394687807745 - type: nauc_precision_at_3_diff1 value: 20.70568446328836 - type: nauc_precision_at_3_max value: 0.37326398699875984 - type: nauc_precision_at_3_std value: -12.983918676694389 - type: nauc_precision_at_5_diff1 value: 19.47466335828124 - type: nauc_precision_at_5_max value: -1.8921617684385994 - type: nauc_precision_at_5_std value: -6.533875294402164 - type: nauc_recall_at_1000_diff1 value: 7.611201305723156 - type: nauc_recall_at_1000_max value: 5.6416194035820055 - type: nauc_recall_at_1000_std value: 61.695208644278 - type: nauc_recall_at_100_diff1 value: 10.0183258158735 - type: nauc_recall_at_100_max value: -10.950612455698973 - type: nauc_recall_at_100_std value: 33.06069987640471 - type: nauc_recall_at_10_diff1 value: 24.738210305731535 - type: nauc_recall_at_10_max value: -2.6592454032071546 - type: nauc_recall_at_10_std value: -4.83987517793115 - type: nauc_recall_at_1_diff1 value: 27.63901823865334 - type: nauc_recall_at_1_max value: -18.6387233237763 - type: nauc_recall_at_1_std value: -27.02164241863646 - type: nauc_recall_at_20_diff1 value: 17.79601177409034 - type: nauc_recall_at_20_max value: -6.681637093148051 - type: nauc_recall_at_20_std value: 3.369193919932238 - type: nauc_recall_at_3_diff1 value: 24.9589431081204 - type: nauc_recall_at_3_max value: 2.4783640980500232 - type: nauc_recall_at_3_std value: -19.567415651090702 - type: nauc_recall_at_5_diff1 value: 23.71803410135437 - type: nauc_recall_at_5_max value: 1.6294309357641652 - type: nauc_recall_at_5_std value: -15.365511906408983 - type: ndcg_at_1 value: 40.816 - type: ndcg_at_10 value: 31.426 - type: ndcg_at_100 value: 41.558 - type: ndcg_at_1000 value: 53.042 - type: ndcg_at_20 value: 31.108999999999998 - type: ndcg_at_3 value: 35.518 - type: ndcg_at_5 value: 33.235 - type: precision_at_1 value: 44.897999999999996 - type: precision_at_10 value: 27.551 - type: precision_at_100 value: 8.204 - type: precision_at_1000 value: 1.582 - type: precision_at_20 value: 19.796 - type: precision_at_3 value: 36.735 - type: precision_at_5 value: 33.061 - type: recall_at_1 value: 3.4709999999999996 - type: recall_at_10 value: 19.563 - type: recall_at_100 value: 50.3 - type: recall_at_1000 value: 85.13199999999999 - type: recall_at_20 value: 26.738 - type: recall_at_3 value: 7.8420000000000005 - type: recall_at_5 value: 11.994 - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 68.29850746268657 - type: ap value: 30.109785890841966 - type: ap_weighted value: 30.109785890841966 - type: f1 value: 61.76875915202924 - type: f1_weighted value: 71.32073190458556 - type: main_score value: 68.29850746268657 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification (default) type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 90.3068 - type: ap value: 86.17914339624038 - type: ap_weighted value: 86.17914339624038 - type: f1 value: 90.29716826358077 - type: f1_weighted value: 90.29716826358077 - type: main_score value: 90.3068 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 46.272000000000006 - type: f1 value: 45.57042543386915 - type: f1_weighted value: 45.57042543386915 - type: main_score value: 46.272000000000006 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P (default) type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: main_score value: 44.9469238081379 - type: v_measure value: 44.9469238081379 - type: v_measure_std value: 13.26811262671461 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S (default) type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: main_score value: 34.12071448053325 - type: v_measure value: 34.12071448053325 - type: v_measure_std value: 13.7019879046405 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions (default) type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: main_score value: 61.597667288657846 - type: map value: 61.597667288657846 - type: mrr value: 75.57940904893813 - type: nAUC_map_diff1 value: 8.745172077340095 - type: nAUC_map_max value: 20.114863024035493 - type: nAUC_map_std value: 15.991351189572192 - type: nAUC_mrr_diff1 value: 20.781369244159983 - type: nAUC_mrr_max value: 30.78542570228559 - type: nAUC_mrr_std value: 19.861484857303676 - task: type: STS dataset: name: MTEB BIOSSES (default) type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cosine_pearson value: 88.55587996301419 - type: cosine_spearman value: 86.40317357420093 - type: euclidean_pearson value: 86.93771958250231 - type: euclidean_spearman value: 86.40317357420093 - type: main_score value: 86.40317357420093 - type: manhattan_pearson value: 86.92196577117366 - type: manhattan_spearman value: 85.79834051556095 - type: pearson value: 88.55587996301419 - type: spearman value: 86.40317357420093 - task: type: Classification dataset: name: MTEB Banking77Classification (default) type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 80.0064935064935 - type: f1 value: 79.29524254086299 - type: f1_weighted value: 79.295242540863 - type: main_score value: 80.0064935064935 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P (default) type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: main_score value: 35.27186813341181 - type: v_measure value: 35.27186813341181 - type: v_measure_std value: 0.8621482145872432 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S (default) type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: main_score value: 28.411805064852295 - type: v_measure value: 28.411805064852295 - type: v_measure_std value: 0.7194290078011281 - task: type: Classification dataset: name: MTEB EmotionClassification (default) type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 43.675 - type: f1 value: 40.15061931375577 - type: f1_weighted value: 45.714186572727066 - type: main_score value: 43.675 - task: type: Classification dataset: name: MTEB ImdbClassification (default) type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 84.35640000000001 - type: ap value: 79.07507736685174 - type: ap_weighted value: 79.07507736685174 - type: f1 value: 84.32288494833531 - type: f1_weighted value: 84.32288494833531 - type: main_score value: 84.35640000000001 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 91.35658914728684 - type: f1 value: 90.86877537911086 - type: f1_weighted value: 91.3282092774443 - type: main_score value: 91.35658914728684 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 60.63611491108071 - type: f1 value: 42.78886482112741 - type: f1_weighted value: 63.44208631840539 - type: main_score value: 60.63611491108071 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 4672e20407010da34463acc759c162ca9734bca6 metrics: - type: accuracy value: 66.68796234028245 - type: f1 value: 64.44940791000278 - type: f1_weighted value: 65.77554417406792 - type: main_score value: 66.68796234028245 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8 metrics: - type: accuracy value: 73.0598520511096 - type: f1 value: 72.14267273884774 - type: f1_weighted value: 72.93345180137516 - type: main_score value: 73.0598520511096 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P (default) type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: main_score value: 31.143081341699606 - type: v_measure value: 31.143081341699606 - type: v_measure_std value: 1.5578716347076906 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S (default) type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: main_score value: 27.010818869829556 - type: v_measure value: 27.010818869829556 - type: v_measure_std value: 1.1771554540819378 - task: type: Reranking dataset: name: MTEB MindSmallReranking (default) type: mteb/mind_small config: default split: test revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7 metrics: - type: main_score value: 30.20503776754942 - type: map value: 30.20503776754942 - type: mrr value: 31.076636002733437 - type: nAUC_map_diff1 value: 7.290568655287842 - type: nAUC_map_max value: -21.381599355932945 - type: nAUC_map_std value: -7.709920607543168 - type: nAUC_mrr_diff1 value: 7.558397329284913 - type: nAUC_mrr_max value: -15.981397186427607 - type: nAUC_mrr_std value: -4.870495243168834 - task: type: Clustering dataset: name: MTEB RedditClustering (default) type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: main_score value: 51.85893476633338 - type: v_measure value: 51.85893476633338 - type: v_measure_std value: 4.704770139385852 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P (default) type: mteb/reddit-clustering-p2p config: default split: test revision: 385e3cb46b4cfa89021f56c4380204149d0efe33 metrics: - type: main_score value: 61.8124222918822 - type: v_measure value: 61.8124222918822 - type: v_measure_std value: 11.994472578100165 - task: type: STS dataset: name: MTEB SICK-R (default) type: mteb/sickr-sts config: default split: test revision: 20a6d6f312dd54037fe07a32d58e5e168867909d metrics: - type: cosine_pearson value: 77.63310776935984 - type: cosine_spearman value: 69.86468291111039 - type: euclidean_pearson value: 73.91537077798837 - type: euclidean_spearman value: 69.86468376650203 - type: main_score value: 69.86468291111039 - type: manhattan_pearson value: 73.68616048370464 - type: manhattan_spearman value: 69.76232036206659 - type: pearson value: 77.63310776935984 - type: spearman value: 69.86468291111039 - task: type: STS dataset: name: MTEB STS12 (default) type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cosine_pearson value: 57.71716838245049 - type: cosine_spearman value: 61.797855543446424 - type: euclidean_pearson value: 58.22958675325848 - type: euclidean_spearman value: 61.797855543446424 - type: main_score value: 61.797855543446424 - type: manhattan_pearson value: 57.63117544997929 - type: manhattan_spearman value: 61.3629404350085 - type: pearson value: 57.71716838245049 - type: spearman value: 61.797855543446424 - task: type: STS dataset: name: MTEB STS13 (default) type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cosine_pearson value: 82.30260026790903 - type: cosine_spearman value: 82.66959813070869 - type: euclidean_pearson value: 82.08383017580783 - type: euclidean_spearman value: 82.66959813070869 - type: main_score value: 82.66959813070869 - type: manhattan_pearson value: 81.77991451392153 - type: manhattan_spearman value: 82.3652534745606 - type: pearson value: 82.30260026790903 - type: spearman value: 82.66959813070869 - task: type: STS dataset: name: MTEB STS14 (default) type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cosine_pearson value: 71.50608384084478 - type: cosine_spearman value: 68.94968064977785 - type: euclidean_pearson value: 70.73381299949564 - type: euclidean_spearman value: 68.94968064977785 - type: main_score value: 68.94968064977785 - type: manhattan_pearson value: 70.5385486953787 - type: manhattan_spearman value: 68.82132770672365 - type: pearson value: 71.50608384084478 - type: spearman value: 68.94968064977785 - task: type: STS dataset: name: MTEB STS15 (default) type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cosine_pearson value: 73.66969825874907 - type: cosine_spearman value: 75.55374982088381 - type: euclidean_pearson value: 75.9339313749594 - type: euclidean_spearman value: 75.55374982088381 - type: main_score value: 75.55374982088381 - type: manhattan_pearson value: 75.88287553383817 - type: manhattan_spearman value: 75.50729812977688 - type: pearson value: 73.66969825874907 - type: spearman value: 75.55374982088381 - task: type: STS dataset: name: MTEB STS16 (default) type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cosine_pearson value: 74.5954724414016 - type: cosine_spearman value: 77.2688820850505 - type: euclidean_pearson value: 77.19866353971555 - type: euclidean_spearman value: 77.2688820850505 - type: main_score value: 77.2688820850505 - type: manhattan_pearson value: 77.27072603680978 - type: manhattan_spearman value: 77.29408453673607 - type: pearson value: 74.5954724414016 - type: spearman value: 77.2688820850505 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: faeb762787bd10488a50c8b5be4a3b82e411949c metrics: - type: cosine_pearson value: 71.52588722654055 - type: cosine_spearman value: 74.97235736456061 - type: euclidean_pearson value: 74.51952528854038 - type: euclidean_spearman value: 74.97235736456061 - type: main_score value: 74.97235736456061 - type: manhattan_pearson value: 74.48272300884209 - type: manhattan_spearman value: 74.80633649415176 - type: pearson value: 71.52588722654055 - type: spearman value: 74.97235736456061 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3 metrics: - type: cosine_pearson value: 68.80031120401976 - type: cosine_spearman value: 69.07945196478491 - type: euclidean_pearson value: 68.99674496430792 - type: euclidean_spearman value: 69.07945196478491 - type: main_score value: 69.07945196478491 - type: manhattan_pearson value: 69.00236107775687 - type: manhattan_spearman value: 68.98064879049272 - type: pearson value: 68.80031120401976 - type: spearman value: 69.07945196478491 - task: type: STS dataset: name: MTEB STSBenchmark (default) type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cosine_pearson value: 65.6898007230089 - type: cosine_spearman value: 69.72386211803668 - type: euclidean_pearson value: 69.04523003701475 - type: euclidean_spearman value: 69.72386211803668 - type: main_score value: 69.72386211803668 - type: manhattan_pearson value: 68.80479743770702 - type: manhattan_spearman value: 69.43264575177459 - type: pearson value: 65.6898007230089 - type: spearman value: 69.72386211803668 - task: type: Reranking dataset: name: MTEB SciDocsRR (default) type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: main_score value: 79.74088066874383 - type: map value: 79.74088066874383 - type: mrr value: 94.47697455050397 - type: nAUC_map_diff1 value: 8.036086256905502 - type: nAUC_map_max value: 54.88199803816819 - type: nAUC_map_std value: 69.16267942176574 - type: nAUC_mrr_diff1 value: 50.020738477678115 - type: nAUC_mrr_max value: 83.28922770326483 - type: nAUC_mrr_std value: 83.63973501802224 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions (default) type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cosine_accuracy value: 99.83861386138614 - type: cosine_accuracy_threshold value: 74.75666999816895 - type: cosine_ap value: 96.15132792066652 - type: cosine_f1 value: 91.84890656063618 - type: cosine_f1_threshold value: 71.70594930648804 - type: cosine_precision value: 91.30434782608695 - type: cosine_recall value: 92.4 - type: dot_accuracy value: 99.83861386138614 - type: dot_accuracy_threshold value: 74.75666999816895 - type: dot_ap value: 96.15132792066653 - type: dot_f1 value: 91.84890656063618 - type: dot_f1_threshold value: 71.70596122741699 - type: dot_precision value: 91.30434782608695 - type: dot_recall value: 92.4 - type: euclidean_accuracy value: 99.83861386138614 - type: euclidean_accuracy_threshold value: 71.05395793914795 - type: euclidean_ap value: 96.15132792066652 - type: euclidean_f1 value: 91.84890656063618 - type: euclidean_f1_threshold value: 75.22505521774292 - type: euclidean_precision value: 91.30434782608695 - type: euclidean_recall value: 92.4 - type: main_score value: 96.15132792066653 - type: manhattan_accuracy value: 99.83564356435643 - type: manhattan_accuracy_threshold value: 1547.6950645446777 - type: manhattan_ap value: 96.06151211452136 - type: manhattan_f1 value: 91.61676646706587 - type: manhattan_f1_threshold value: 1626.3608932495117 - type: manhattan_precision value: 91.43426294820716 - type: manhattan_recall value: 91.8 - type: max_ap value: 96.15132792066653 - type: max_f1 value: 91.84890656063618 - type: max_precision value: 91.43426294820716 - type: max_recall value: 92.4 - type: similarity_accuracy value: 99.83861386138614 - type: similarity_accuracy_threshold value: 74.75666999816895 - type: similarity_ap value: 96.15132792066652 - type: similarity_f1 value: 91.84890656063618 - type: similarity_f1_threshold value: 71.70594930648804 - type: similarity_precision value: 91.30434782608695 - type: similarity_recall value: 92.4 - task: type: Clustering dataset: name: MTEB StackExchangeClustering (default) type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: main_score value: 61.24120328328453 - type: v_measure value: 61.24120328328453 - type: v_measure_std value: 3.9946560691100372 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P (default) type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: main_score value: 33.808268374864745 - type: v_measure value: 33.808268374864745 - type: v_measure_std value: 1.2212188701887239 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions (default) type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: main_score value: 52.19806018468037 - type: map value: 52.19806018468037 - type: mrr value: 52.98921462524404 - type: nAUC_map_diff1 value: 37.41443156995912 - type: nAUC_map_max value: 9.410262727675603 - type: nAUC_map_std value: 8.7094185014992 - type: nAUC_mrr_diff1 value: 37.78202772392581 - type: nAUC_mrr_max value: 10.517635536565816 - type: nAUC_mrr_std value: 8.509423813772491 - task: type: Summarization dataset: name: MTEB SummEval (default) type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cosine_pearson value: 30.48413700430812 - type: cosine_spearman value: 30.357162200875816 - type: dot_pearson value: 30.484140144824938 - type: dot_spearman value: 30.357162200875816 - type: main_score value: 30.357162200875816 - type: pearson value: 30.48413700430812 - type: spearman value: 30.357162200875816 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification (default) type: mteb/toxic_conversations_50k config: default split: test revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de metrics: - type: accuracy value: 66.8359375 - type: ap value: 12.482653786025985 - type: ap_weighted value: 12.482653786025985 - type: f1 value: 51.328608527332385 - type: f1_weighted value: 74.07974463955398 - type: main_score value: 66.8359375 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification (default) type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 53.907753254103 - type: f1 value: 54.22707647269581 - type: f1_weighted value: 53.611822984407695 - type: main_score value: 53.907753254103 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering (default) type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: main_score value: 38.1364789307295 - type: v_measure value: 38.1364789307295 - type: v_measure_std value: 2.0731634966352077 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 (default) type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cosine_accuracy value: 82.66674614054956 - type: cosine_accuracy_threshold value: 79.80123162269592 - type: cosine_ap value: 63.28209719072804 - type: cosine_f1 value: 60.16389710903711 - type: cosine_f1_threshold value: 72.22893834114075 - type: cosine_precision value: 52.90232185748599 - type: cosine_recall value: 69.73614775725594 - type: dot_accuracy value: 82.66674614054956 - type: dot_accuracy_threshold value: 79.8012375831604 - type: dot_ap value: 63.282103870645166 - type: dot_f1 value: 60.16389710903711 - type: dot_f1_threshold value: 72.22894430160522 - type: dot_precision value: 52.90232185748599 - type: dot_recall value: 69.73614775725594 - type: euclidean_accuracy value: 82.66674614054956 - type: euclidean_accuracy_threshold value: 63.55905532836914 - type: euclidean_ap value: 63.282095399953164 - type: euclidean_f1 value: 60.16389710903711 - type: euclidean_f1_threshold value: 74.5265781879425 - type: euclidean_precision value: 52.90232185748599 - type: euclidean_recall value: 69.73614775725594 - type: main_score value: 63.282103870645166 - type: manhattan_accuracy value: 82.74423317637242 - type: manhattan_accuracy_threshold value: 1415.380859375 - type: manhattan_ap value: 63.26931757839598 - type: manhattan_f1 value: 60.11014948859166 - type: manhattan_f1_threshold value: 1632.522201538086 - type: manhattan_precision value: 52.359506559624045 - type: manhattan_recall value: 70.55408970976254 - type: max_ap value: 63.282103870645166 - type: max_f1 value: 60.16389710903711 - type: max_precision value: 52.90232185748599 - type: max_recall value: 70.55408970976254 - type: similarity_accuracy value: 82.66674614054956 - type: similarity_accuracy_threshold value: 79.80123162269592 - type: similarity_ap value: 63.28209719072804 - type: similarity_f1 value: 60.16389710903711 - type: similarity_f1_threshold value: 72.22893834114075 - type: similarity_precision value: 52.90232185748599 - type: similarity_recall value: 69.73614775725594 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus (default) type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cosine_accuracy value: 88.10105949470253 - type: cosine_accuracy_threshold value: 68.95147562026978 - type: cosine_ap value: 84.65516103854583 - type: cosine_f1 value: 76.54581123301605 - type: cosine_f1_threshold value: 63.92929553985596 - type: cosine_precision value: 72.46526344751685 - type: cosine_recall value: 81.11333538651063 - type: dot_accuracy value: 88.10105949470253 - type: dot_accuracy_threshold value: 68.95147562026978 - type: dot_ap value: 84.65516301437592 - type: dot_f1 value: 76.54581123301605 - type: dot_f1_threshold value: 63.92928957939148 - type: dot_precision value: 72.46526344751685 - type: dot_recall value: 81.11333538651063 - type: euclidean_accuracy value: 88.10105949470253 - type: euclidean_accuracy_threshold value: 78.80169153213501 - type: euclidean_ap value: 84.65517268264233 - type: euclidean_f1 value: 76.54581123301605 - type: euclidean_f1_threshold value: 84.93610620498657 - type: euclidean_precision value: 72.46526344751685 - type: euclidean_recall value: 81.11333538651063 - type: main_score value: 84.65517268264233 - type: manhattan_accuracy value: 88.08941669577366 - type: manhattan_accuracy_threshold value: 1739.3169403076172 - type: manhattan_ap value: 84.64592398855694 - type: manhattan_f1 value: 76.62890540443034 - type: manhattan_f1_threshold value: 1861.344337463379 - type: manhattan_precision value: 72.09775967413442 - type: manhattan_recall value: 81.76778564829073 - type: max_ap value: 84.65517268264233 - type: max_f1 value: 76.62890540443034 - type: max_precision value: 72.46526344751685 - type: max_recall value: 81.76778564829073 - type: similarity_accuracy value: 88.10105949470253 - type: similarity_accuracy_threshold value: 68.95147562026978 - type: similarity_ap value: 84.65516103854583 - type: similarity_f1 value: 76.54581123301605 - type: similarity_f1_threshold value: 63.92929553985596 - type: similarity_precision value: 72.46526344751685 - type: similarity_recall value: 81.11333538651063 --- <h1 align="center">Snowflake's Arctic-embed-m-v1.5</h1> <h4 align="center"> <p> <a href=#news>News</a> | <a href=#this-model>This Model</a> | <a href=#usage>Usage</a> | <a href="#faq">FAQ</a> | <a href="#contact">Contact</a> | <a href="#license">License</a> | <a href="#acknowledgement">Acknowledgement</a> <p> </h4> <img referrerpolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=8ab1f2d9-8425-4212-9bf3-717f7ac637e4" /> ## News 12/11/2024: Release of [Technical Report for 2.0 model](https://arxiv.org/abs/2412.04506) 12/04/2024: Release of [L-2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-l-v2.0) and [M-2.0](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-v2.0) 07/26/2024: Release preprint [[2407.18887] Embedding And Clustering Your Data Can Improve Contrastive Pretraining](https://arxiv.org/abs/2407.18887) on arXiv. 07/18/2024: Release of `snowflake-arctic-embed-m-v1.5`, capable of producing highly compressible embedding vectors that preserve quality even when squished as small as 128 bytes per vector. Details about the development of this model are available in the [launch post on the Snowflake engineering blog](https://www.snowflake.com/engineering-blog/arctic-embed-m-v1-5-enterprise-retrieval/). 05/10/2024: Release of the [technical report on Arctic Embed](https://arxiv.org/abs/2405.05374) 04/16/2024: Original release the `snowflake-arctic-embed` family of text embedding models. ## This Model This model is an updated version of [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) designed to improve embedding vector compressibility. This model achieves a slightly higher performance overall without compression, and it is additionally capable of retaining most of its retrieval quality even down to 128 byte embedding vectors through a combination of [Matryoshka Representation Learning (MRL)](https://arxiv.org/abs/2205.13147) and uniform scalar quanitization. | Model Name | MTEB Retrieval Score (NDCG @ 10) | |:------------------------------------------------------------------------------------------------|:---------------------------------| | [snowflake-arctic-embed-m-v1.5](https://huggingface.co/Snowflake/snowflake-arctic-embed-m-v1.5) | 55.14 | | [snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m/) | 54.91 | Compared to several other models trained with MRL to produce 256-dimensional embedding vectors, `snowflake-arctic-embed-m-v1.5` retains a higher degree of original model quality and delivers better retrieval quality on the MTEB Retrieval benchmark. | Model | Model Parameters | MTEB Retrieval Score at 256 Dimensions (fraction of arctic-embed-m-v1.5) | |:------------------------------|:-------------------|:---------------------------------------------------------------------------| | Snowflake arctic-embed-m-v1.5 | 109M | 54.2 (100%) | | Google gecko | 1200M | 52.4 (97%) | | OpenAI text-embedding-3-large | Not Published | 51.7 (95%) | | Nomic nomic-embed-text-v1.5 | 138M | 50.8 (94%) | Additionally, this model was designed to pair well with a corpus-independent scalar quantization scheme to achieve great performance even in as little as 128 bytes per vector (24x compression compared to 768 dimensional vectors stored in float32). | Model Version | Dimensionality | Scalar Quantization | Bytes Per Vector (fraction of baseline) | MTEB Retrieval Score (fraction of baseline) | Vectors Per GB (improvement over baseline) | |:----------------|-----------------:|:----------------------|:------------------------------------------|:----------------------------------------------|:---------------------------------------------| | v1 | 768 | None (float32) | 3072 (100%) | 54.9 (100%) | 0.33M (1.0x) | | v1 | 768 | int8 | 768 (25%) | 54.9 (100%) | 1.3M (4x) | | v1.5 | 768 | int8 | 768 (25%) | 55.1 (100%) | 1.3M (4x) | | v1.5 | 256 | int8 | 256 (8.3%) | 54.2 (99%) | 3.9M (12x) | | v1.5 | 256 | int4 | 128 (4.2%) | 53.7 (98%) | 7.8M (24x) | NOTE: Good uniform scalar quantization ranges to use with this model (and which were used in the eval above), are -0.18 to +0.18 for 4bit and -0.3 to +0.3 for 8bit. For a detailed walkthrough of using integer quantization with `snowflake-arctic-embed-m-v1.5`, check out our [example notebook on GitHub](https://github.com/Snowflake-Labs/arctic-embed/tree/main/compressed_embeddings_examples/score_arctic_embed_m_v1dot5_with_quantization.ipynb). ## Usage ### Using Sentence Transformers You can use the sentence-transformers package to use any of the snowflake-arctic-embed models. Here's an example for `snowflake-arctic-embed-m-v1.5`. ```python import torch from sentence_transformers import SentenceTransformer from torch.nn.functional import normalize # Model constant. MODEL_ID = "Snowflake/snowflake-arctic-embed-m-v1.5" # Your queries and docs. queries = ['what is snowflake?', 'Where can I get the best tacos?'] documents = ['The Data Cloud!', 'Mexico City of Course!'] # Load the model. model = SentenceTransformer(MODEL_ID) # Generate text embeddings. query_embeddings = model.encode(queries, prompt_name="query") document_embeddings = model.encode(documents) # Scores via dotproduct. scores = query_embeddings @ document_embeddings.T # Pretty-print the results. for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) print(f'Query: "{query}"') for document, score in doc_score_pairs: print(f'Score: {score:.4f} | Document: "{document}"') print() #### OUTPUT #### # Query: "what is snowflake?" # Score: 0.3521 | Document: "The Data Cloud!" # Score: 0.2358 | Document: "Mexico City of Course!" # Query: "Where can I get the best tacos?" # Score: 0.3884 | Document: "Mexico City of Course!" # Score: 0.2389 | Document: "The Data Cloud!" # #### Variation: Truncated Embeddings #### query_embeddings_256 = normalize(torch.from_numpy(query_embeddings)[:, :256]) document_embeddings_256 = normalize(torch.from_numpy(document_embeddings)[:, :256]) scores_256 = query_embeddings_256 @ document_embeddings_256.T # Pretty-print the results. for query, query_scores in zip(queries, scores_256): doc_score_pairs = sorted(zip(documents, query_scores), key=lambda x: x[1], reverse=True) print(f'Query: "{query}"') for document, score in doc_score_pairs: print(f'Score: {score:.4f} | Document: "{document}"') print() #### OUTPUT #### # Query: "what is snowflake?" # Score: 0.3852 | Document: "The Data Cloud!" # Score: 0.2721 | Document: "Mexico City of Course!" # Query: "Where can I get the best tacos?" # Score: 0.4337 | Document: "Mexico City of Course!" # Score: 0.2886 | Document: "The Data Cloud!" # ``` ### Using Huggingface transformers You can use the transformers package to use an snowflake-arctic-embed model, too. For optimal retrieval quality, remember to use the CLS token for embeddings and to use the query prefix below (just on the query). ```python import torch from torch.nn.functional import normalize from transformers import AutoModel, AutoTokenizer # Model constants. MODEL_ID = "Snowflake/snowflake-arctic-embed-m-v1.5" QUERY_PREFIX = 'Represent this sentence for searching relevant passages: ' # Your queries and docs. queries = ['what is snowflake?', 'Where can I get the best tacos?'] documents = ['The Data Cloud!', 'Mexico City of Course!'] # Load the model and tokenizer. tokenizer = AutoTokenizer.from_pretrained(MODEL_ID) model = AutoModel.from_pretrained(MODEL_ID, add_pooling_layer=False) model.eval() # Add query prefix and tokenize queries and docs. queries_with_prefix = [f"{QUERY_PREFIX}{q}" for q in queries] query_tokens = tokenizer(queries_with_prefix, padding=True, truncation=True, return_tensors='pt', max_length=512) document_tokens = tokenizer(documents, padding=True, truncation=True, return_tensors='pt', max_length=512) # Use the model to generate text embeddings. with torch.inference_mode(): query_embeddings = model(**query_tokens)[0][:, 0] document_embeddings = model(**document_tokens)[0][:, 0] # Remember to normalize embeddings. query_embeddings = normalize(query_embeddings) document_embeddings = normalize(document_embeddings) # Scores via dotproduct. scores = query_embeddings @ document_embeddings.T # Pretty-print the results. for query, query_scores in zip(queries, scores): doc_score_pairs = list(zip(documents, query_scores)) doc_score_pairs = sorted(doc_score_pairs, key=lambda x: x[1], reverse=True) print(f'Query: "{query}"') for document, score in doc_score_pairs: print(f'Score: {score:.4f} | Document: "{document}"') print() #### OUTPUT #### # Query: "what is snowflake?" # Score: 0.3521 | Document: "The Data Cloud!" # Score: 0.2358 | Document: "Mexico City of Course!" # Query: "Where can I get the best tacos?" # Score: 0.3884 | Document: "Mexico City of Course!" # Score: 0.2389 | Document: "The Data Cloud!" # #### Variation: Truncated Embeddings #### query_embeddings_256 = normalize(query_embeddings[:, :256]) document_embeddings_256 = normalize(document_embeddings[:, :256]) scores_256 = query_embeddings_256 @ document_embeddings_256.T # Pretty-print the results. for query, query_scores in zip(queries, scores_256): doc_score_pairs = sorted(zip(documents, query_scores), key=lambda x: x[1], reverse=True) print(f'Query: "{query}"') for document, score in doc_score_pairs: print(f'Score: {score:.4f} | Document: "{document}"') print() #### OUTPUT #### # Query: "what is snowflake?" # Score: 0.3852 | Document: "The Data Cloud!" # Score: 0.2721 | Document: "Mexico City of Course!" # Query: "Where can I get the best tacos?" # Score: 0.4337 | Document: "Mexico City of Course!" # Score: 0.2886 | Document: "The Data Cloud!" # ``` ### Using Transformers.js If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@xenova/transformers) by running: ```bash npm i @xenova/transformers ``` You can then use the model to compute embeddings as follows: ```js import { pipeline, dot } from '@xenova/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Snowflake/snowflake-arctic-embed-m-v1.5', { quantized: false, // Comment out this line to use the quantized version }); // Generate sentence embeddings const sentences = [ 'Represent this sentence for searching relevant passages: Where can I get the best tacos?', 'The Data Cloud!', 'Mexico City of Course!', ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => dot(source_embeddings, x)); console.log(similarities); // [0.15664823859882132, 0.24481869975470627] ``` ### Compressing to 128 bytes This model is designed to generate embeddings which compress well down to 128 bytes via a two-part compression scheme: 1. Truncation and renormalization to 256 dimensions (a la Matryoskha Representation Learning, see [the original paper for reference](https://arxiv.org/abs/2205.13147)). 2. 4-bit uniform scalar quantization of all 256 values to the same range (-0.18 to +0.18). - For 8-bit uniform scalar quantization, the slightly wider range -0.3 to +0.3 tends to work slightly better given how much more granular 8-bit quantization is. For an in-depth examples, check out our [arctic-embed GitHub repositiory](https://github.com/Snowflake-Labs/arctic-embed). ## FAQ TBD ## Contact Feel free to open an issue or pull request if you have any questions or suggestions about this project. You also can email Daniel Campos([email protected]). ## License Arctic is licensed under the [Apache-2](https://www.apache.org/licenses/LICENSE-2.0). The released models can be used for commercial purposes free of charge. ## Acknowledgement We want to thank the open-source community, which has provided the great building blocks upon which we could make our models. We thank our modeling engineers, Danmei Xu, Luke Merrick, Gaurav Nuti, and Daniel Campos, for making these great models possible. We thank our leadership, Himabindu Pucha, Kelvin So, Vivek Raghunathan, and Sridhar Ramaswamy, for supporting this work. We also thank the open-source community for producing the great models we could build on top of and making these releases possible. Finally, we thank the researchers who created BEIR and MTEB benchmarks. It is largely thanks to their tireless work to define what better looks like that we could improve model performance.
[ "BIOSSES", "SCIFACT" ]
OrcaDB/gte-base-en-v1.5
OrcaDB
sentence-similarity
[ "transformers", "safetensors", "new", "feature-extraction", "sentence-transformers", "gte", "mteb", "transformers.js", "sentence-similarity", "custom_code", "en", "arxiv:2407.19669", "arxiv:2308.03281", "license:apache-2.0", "model-index", "text-embeddings-inference", "endpoints_compatible", "region:us" ]
"2024-11-08T19:45:52Z"
2024-11-08T22:34:51+00:00
62,202
0
--- language: - en library_name: transformers license: apache-2.0 tags: - sentence-transformers - gte - mteb - transformers.js - sentence-similarity model-index: - name: gte-base-en-v1.5 results: - task: type: Classification dataset: name: MTEB AmazonCounterfactualClassification (en) type: mteb/amazon_counterfactual config: en split: test revision: e8379541af4e31359cca9fbcf4b00f2671dba205 metrics: - type: accuracy value: 74.7910447761194 - type: ap value: 37.053785713650626 - type: f1 value: 68.51101510998551 - task: type: Classification dataset: name: MTEB AmazonPolarityClassification type: mteb/amazon_polarity config: default split: test revision: e2d317d38cd51312af73b3d32a06d1a08b442046 metrics: - type: accuracy value: 93.016875 - type: ap value: 89.17750268426342 - type: f1 value: 92.9970977240524 - task: type: Classification dataset: name: MTEB AmazonReviewsClassification (en) type: mteb/amazon_reviews_multi config: en split: test revision: 1399c76144fd37290681b995c656ef9b2e06e26d metrics: - type: accuracy value: 53.312000000000005 - type: f1 value: 52.98175784163017 - task: type: Retrieval dataset: name: MTEB ArguAna type: mteb/arguana config: default split: test revision: c22ab2a51041ffd869aaddef7af8d8215647e41a metrics: - type: map_at_1 value: 38.193 - type: map_at_10 value: 54.848 - type: map_at_100 value: 55.388000000000005 - type: map_at_1000 value: 55.388999999999996 - type: map_at_3 value: 50.427 - type: map_at_5 value: 53.105000000000004 - type: mrr_at_1 value: 39.047 - type: mrr_at_10 value: 55.153 - type: mrr_at_100 value: 55.686 - type: mrr_at_1000 value: 55.688 - type: mrr_at_3 value: 50.676 - type: mrr_at_5 value: 53.417 - type: ndcg_at_1 value: 38.193 - type: ndcg_at_10 value: 63.486 - type: ndcg_at_100 value: 65.58 - type: ndcg_at_1000 value: 65.61 - type: ndcg_at_3 value: 54.494 - type: ndcg_at_5 value: 59.339 - type: precision_at_1 value: 38.193 - type: precision_at_10 value: 9.075 - type: precision_at_100 value: 0.9939999999999999 - type: precision_at_1000 value: 0.1 - type: precision_at_3 value: 22.096 - type: precision_at_5 value: 15.619 - type: recall_at_1 value: 38.193 - type: recall_at_10 value: 90.754 - type: recall_at_100 value: 99.431 - type: recall_at_1000 value: 99.644 - type: recall_at_3 value: 66.28699999999999 - type: recall_at_5 value: 78.094 - task: type: Clustering dataset: name: MTEB ArxivClusteringP2P type: mteb/arxiv-clustering-p2p config: default split: test revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d metrics: - type: v_measure value: 47.508221208908964 - task: type: Clustering dataset: name: MTEB ArxivClusteringS2S type: mteb/arxiv-clustering-s2s config: default split: test revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53 metrics: - type: v_measure value: 42.04668382560096 - task: type: Reranking dataset: name: MTEB AskUbuntuDupQuestions type: mteb/askubuntudupquestions-reranking config: default split: test revision: 2000358ca161889fa9c082cb41daa8dcfb161a54 metrics: - type: map value: 61.828759903716815 - type: mrr value: 74.37343358395991 - task: type: STS dataset: name: MTEB BIOSSES type: mteb/biosses-sts config: default split: test revision: d3fb88f8f02e40887cd149695127462bbcf29b4a metrics: - type: cos_sim_pearson value: 85.03673698773017 - type: cos_sim_spearman value: 83.6470866785058 - type: euclidean_pearson value: 82.64048673096565 - type: euclidean_spearman value: 83.63142367101115 - type: manhattan_pearson value: 82.71493099760228 - type: manhattan_spearman value: 83.60491704294326 - task: type: Classification dataset: name: MTEB Banking77Classification type: mteb/banking77 config: default split: test revision: 0fd18e25b25c072e09e0d92ab615fda904d66300 metrics: - type: accuracy value: 86.73376623376623 - type: f1 value: 86.70294049278262 - task: type: Clustering dataset: name: MTEB BiorxivClusteringP2P type: mteb/biorxiv-clustering-p2p config: default split: test revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40 metrics: - type: v_measure value: 40.31923804167062 - task: type: Clustering dataset: name: MTEB BiorxivClusteringS2S type: mteb/biorxiv-clustering-s2s config: default split: test revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908 metrics: - type: v_measure value: 37.552547125348454 - task: type: Retrieval dataset: name: MTEB CQADupstackAndroidRetrieval type: mteb/cqadupstack-android config: default split: test revision: f46a197baaae43b4f621051089b82a364682dfeb metrics: - type: map_at_1 value: 30.567 - type: map_at_10 value: 41.269 - type: map_at_100 value: 42.689 - type: map_at_1000 value: 42.84 - type: map_at_3 value: 37.567 - type: map_at_5 value: 39.706 - type: mrr_at_1 value: 37.053000000000004 - type: mrr_at_10 value: 46.900999999999996 - type: mrr_at_100 value: 47.662 - type: mrr_at_1000 value: 47.713 - type: mrr_at_3 value: 43.801 - type: mrr_at_5 value: 45.689 - type: ndcg_at_1 value: 37.053000000000004 - type: ndcg_at_10 value: 47.73 - type: ndcg_at_100 value: 53.128 - type: ndcg_at_1000 value: 55.300000000000004 - type: ndcg_at_3 value: 42.046 - type: ndcg_at_5 value: 44.782 - type: precision_at_1 value: 37.053000000000004 - type: precision_at_10 value: 9.142 - type: precision_at_100 value: 1.485 - type: precision_at_1000 value: 0.197 - type: precision_at_3 value: 20.076 - type: precision_at_5 value: 14.535 - type: recall_at_1 value: 30.567 - type: recall_at_10 value: 60.602999999999994 - type: recall_at_100 value: 83.22800000000001 - type: recall_at_1000 value: 96.696 - type: recall_at_3 value: 44.336999999999996 - type: recall_at_5 value: 51.949 - task: type: Retrieval dataset: name: MTEB CQADupstackEnglishRetrieval type: mteb/cqadupstack-english config: default split: test revision: ad9991cb51e31e31e430383c75ffb2885547b5f0 metrics: - type: map_at_1 value: 28.538000000000004 - type: map_at_10 value: 38.757999999999996 - type: map_at_100 value: 40.129 - type: map_at_1000 value: 40.262 - type: map_at_3 value: 35.866 - type: map_at_5 value: 37.417 - type: mrr_at_1 value: 36.051 - type: mrr_at_10 value: 44.868 - type: mrr_at_100 value: 45.568999999999996 - type: mrr_at_1000 value: 45.615 - type: mrr_at_3 value: 42.558 - type: mrr_at_5 value: 43.883 - type: ndcg_at_1 value: 36.051 - type: ndcg_at_10 value: 44.584 - type: ndcg_at_100 value: 49.356 - type: ndcg_at_1000 value: 51.39 - type: ndcg_at_3 value: 40.389 - type: ndcg_at_5 value: 42.14 - type: precision_at_1 value: 36.051 - type: precision_at_10 value: 8.446 - type: precision_at_100 value: 1.411 - type: precision_at_1000 value: 0.19 - type: precision_at_3 value: 19.639 - type: precision_at_5 value: 13.796 - type: recall_at_1 value: 28.538000000000004 - type: recall_at_10 value: 54.99000000000001 - type: recall_at_100 value: 75.098 - type: recall_at_1000 value: 87.848 - type: recall_at_3 value: 42.236000000000004 - type: recall_at_5 value: 47.377 - task: type: Retrieval dataset: name: MTEB CQADupstackGamingRetrieval type: mteb/cqadupstack-gaming config: default split: test revision: 4885aa143210c98657558c04aaf3dc47cfb54340 metrics: - type: map_at_1 value: 37.188 - type: map_at_10 value: 50.861000000000004 - type: map_at_100 value: 51.917 - type: map_at_1000 value: 51.964999999999996 - type: map_at_3 value: 47.144000000000005 - type: map_at_5 value: 49.417 - type: mrr_at_1 value: 42.571 - type: mrr_at_10 value: 54.086999999999996 - type: mrr_at_100 value: 54.739000000000004 - type: mrr_at_1000 value: 54.762 - type: mrr_at_3 value: 51.285000000000004 - type: mrr_at_5 value: 53.0 - type: ndcg_at_1 value: 42.571 - type: ndcg_at_10 value: 57.282 - type: ndcg_at_100 value: 61.477000000000004 - type: ndcg_at_1000 value: 62.426 - type: ndcg_at_3 value: 51.0 - type: ndcg_at_5 value: 54.346000000000004 - type: precision_at_1 value: 42.571 - type: precision_at_10 value: 9.467 - type: precision_at_100 value: 1.2550000000000001 - type: precision_at_1000 value: 0.13799999999999998 - type: precision_at_3 value: 23.114 - type: precision_at_5 value: 16.250999999999998 - type: recall_at_1 value: 37.188 - type: recall_at_10 value: 73.068 - type: recall_at_100 value: 91.203 - type: recall_at_1000 value: 97.916 - type: recall_at_3 value: 56.552 - type: recall_at_5 value: 64.567 - task: type: Retrieval dataset: name: MTEB CQADupstackGisRetrieval type: mteb/cqadupstack-gis config: default split: test revision: 5003b3064772da1887988e05400cf3806fe491f2 metrics: - type: map_at_1 value: 25.041000000000004 - type: map_at_10 value: 33.86 - type: map_at_100 value: 34.988 - type: map_at_1000 value: 35.064 - type: map_at_3 value: 31.049 - type: map_at_5 value: 32.845 - type: mrr_at_1 value: 26.893 - type: mrr_at_10 value: 35.594 - type: mrr_at_100 value: 36.617 - type: mrr_at_1000 value: 36.671 - type: mrr_at_3 value: 33.051 - type: mrr_at_5 value: 34.61 - type: ndcg_at_1 value: 26.893 - type: ndcg_at_10 value: 38.674 - type: ndcg_at_100 value: 44.178 - type: ndcg_at_1000 value: 46.089999999999996 - type: ndcg_at_3 value: 33.485 - type: ndcg_at_5 value: 36.402 - type: precision_at_1 value: 26.893 - type: precision_at_10 value: 5.989 - type: precision_at_100 value: 0.918 - type: precision_at_1000 value: 0.11100000000000002 - type: precision_at_3 value: 14.2 - type: precision_at_5 value: 10.26 - type: recall_at_1 value: 25.041000000000004 - type: recall_at_10 value: 51.666000000000004 - type: recall_at_100 value: 76.896 - type: recall_at_1000 value: 91.243 - type: recall_at_3 value: 38.035999999999994 - type: recall_at_5 value: 44.999 - task: type: Retrieval dataset: name: MTEB CQADupstackMathematicaRetrieval type: mteb/cqadupstack-mathematica config: default split: test revision: 90fceea13679c63fe563ded68f3b6f06e50061de metrics: - type: map_at_1 value: 15.909999999999998 - type: map_at_10 value: 23.901 - type: map_at_100 value: 25.165 - type: map_at_1000 value: 25.291000000000004 - type: map_at_3 value: 21.356 - type: map_at_5 value: 22.816 - type: mrr_at_1 value: 20.025000000000002 - type: mrr_at_10 value: 28.382 - type: mrr_at_100 value: 29.465000000000003 - type: mrr_at_1000 value: 29.535 - type: mrr_at_3 value: 25.933 - type: mrr_at_5 value: 27.332 - type: ndcg_at_1 value: 20.025000000000002 - type: ndcg_at_10 value: 29.099000000000004 - type: ndcg_at_100 value: 35.127 - type: ndcg_at_1000 value: 38.096000000000004 - type: ndcg_at_3 value: 24.464 - type: ndcg_at_5 value: 26.709 - type: precision_at_1 value: 20.025000000000002 - type: precision_at_10 value: 5.398 - type: precision_at_100 value: 0.9690000000000001 - type: precision_at_1000 value: 0.13699999999999998 - type: precision_at_3 value: 11.774 - type: precision_at_5 value: 8.632 - type: recall_at_1 value: 15.909999999999998 - type: recall_at_10 value: 40.672000000000004 - type: recall_at_100 value: 66.855 - type: recall_at_1000 value: 87.922 - type: recall_at_3 value: 28.069 - type: recall_at_5 value: 33.812 - task: type: Retrieval dataset: name: MTEB CQADupstackPhysicsRetrieval type: mteb/cqadupstack-physics config: default split: test revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4 metrics: - type: map_at_1 value: 30.175 - type: map_at_10 value: 41.36 - type: map_at_100 value: 42.701 - type: map_at_1000 value: 42.817 - type: map_at_3 value: 37.931 - type: map_at_5 value: 39.943 - type: mrr_at_1 value: 35.611 - type: mrr_at_10 value: 46.346 - type: mrr_at_100 value: 47.160000000000004 - type: mrr_at_1000 value: 47.203 - type: mrr_at_3 value: 43.712 - type: mrr_at_5 value: 45.367000000000004 - type: ndcg_at_1 value: 35.611 - type: ndcg_at_10 value: 47.532000000000004 - type: ndcg_at_100 value: 53.003 - type: ndcg_at_1000 value: 55.007 - type: ndcg_at_3 value: 42.043 - type: ndcg_at_5 value: 44.86 - type: precision_at_1 value: 35.611 - type: precision_at_10 value: 8.624 - type: precision_at_100 value: 1.332 - type: precision_at_1000 value: 0.169 - type: precision_at_3 value: 20.083000000000002 - type: precision_at_5 value: 14.437 - type: recall_at_1 value: 30.175 - type: recall_at_10 value: 60.5 - type: recall_at_100 value: 83.399 - type: recall_at_1000 value: 96.255 - type: recall_at_3 value: 45.448 - type: recall_at_5 value: 52.432 - task: type: Retrieval dataset: name: MTEB CQADupstackProgrammersRetrieval type: mteb/cqadupstack-programmers config: default split: test revision: 6184bc1440d2dbc7612be22b50686b8826d22b32 metrics: - type: map_at_1 value: 22.467000000000002 - type: map_at_10 value: 33.812999999999995 - type: map_at_100 value: 35.248000000000005 - type: map_at_1000 value: 35.359 - type: map_at_3 value: 30.316 - type: map_at_5 value: 32.233000000000004 - type: mrr_at_1 value: 28.310999999999996 - type: mrr_at_10 value: 38.979 - type: mrr_at_100 value: 39.937 - type: mrr_at_1000 value: 39.989999999999995 - type: mrr_at_3 value: 36.244 - type: mrr_at_5 value: 37.871 - type: ndcg_at_1 value: 28.310999999999996 - type: ndcg_at_10 value: 40.282000000000004 - type: ndcg_at_100 value: 46.22 - type: ndcg_at_1000 value: 48.507 - type: ndcg_at_3 value: 34.596 - type: ndcg_at_5 value: 37.267 - type: precision_at_1 value: 28.310999999999996 - type: precision_at_10 value: 7.831 - type: precision_at_100 value: 1.257 - type: precision_at_1000 value: 0.164 - type: precision_at_3 value: 17.275 - type: precision_at_5 value: 12.556999999999999 - type: recall_at_1 value: 22.467000000000002 - type: recall_at_10 value: 54.14099999999999 - type: recall_at_100 value: 79.593 - type: recall_at_1000 value: 95.063 - type: recall_at_3 value: 38.539 - type: recall_at_5 value: 45.403 - task: type: Retrieval dataset: name: MTEB CQADupstackRetrieval type: mteb/cqadupstack config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 24.18591666666667 - type: map_at_10 value: 33.84258333333333 - type: map_at_100 value: 35.11391666666666 - type: map_at_1000 value: 35.23258333333333 - type: map_at_3 value: 30.764249999999997 - type: map_at_5 value: 32.52333333333334 - type: mrr_at_1 value: 28.54733333333333 - type: mrr_at_10 value: 37.81725 - type: mrr_at_100 value: 38.716499999999996 - type: mrr_at_1000 value: 38.77458333333333 - type: mrr_at_3 value: 35.157833333333336 - type: mrr_at_5 value: 36.69816666666667 - type: ndcg_at_1 value: 28.54733333333333 - type: ndcg_at_10 value: 39.51508333333334 - type: ndcg_at_100 value: 44.95316666666666 - type: ndcg_at_1000 value: 47.257083333333334 - type: ndcg_at_3 value: 34.205833333333324 - type: ndcg_at_5 value: 36.78266666666667 - type: precision_at_1 value: 28.54733333333333 - type: precision_at_10 value: 7.082583333333334 - type: precision_at_100 value: 1.1590833333333332 - type: precision_at_1000 value: 0.15516666666666662 - type: precision_at_3 value: 15.908750000000001 - type: precision_at_5 value: 11.505416666666669 - type: recall_at_1 value: 24.18591666666667 - type: recall_at_10 value: 52.38758333333333 - type: recall_at_100 value: 76.13666666666667 - type: recall_at_1000 value: 91.99066666666667 - type: recall_at_3 value: 37.78333333333334 - type: recall_at_5 value: 44.30141666666666 - task: type: Retrieval dataset: name: MTEB CQADupstackStatsRetrieval type: mteb/cqadupstack-stats config: default split: test revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a metrics: - type: map_at_1 value: 21.975 - type: map_at_10 value: 29.781000000000002 - type: map_at_100 value: 30.847 - type: map_at_1000 value: 30.94 - type: map_at_3 value: 27.167 - type: map_at_5 value: 28.633999999999997 - type: mrr_at_1 value: 24.387 - type: mrr_at_10 value: 32.476 - type: mrr_at_100 value: 33.337 - type: mrr_at_1000 value: 33.403 - type: mrr_at_3 value: 29.881999999999998 - type: mrr_at_5 value: 31.339 - type: ndcg_at_1 value: 24.387 - type: ndcg_at_10 value: 34.596 - type: ndcg_at_100 value: 39.635 - type: ndcg_at_1000 value: 42.079 - type: ndcg_at_3 value: 29.516 - type: ndcg_at_5 value: 31.959 - type: precision_at_1 value: 24.387 - type: precision_at_10 value: 5.6129999999999995 - type: precision_at_100 value: 0.8909999999999999 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 12.73 - type: precision_at_5 value: 9.171999999999999 - type: recall_at_1 value: 21.975 - type: recall_at_10 value: 46.826 - type: recall_at_100 value: 69.554 - type: recall_at_1000 value: 87.749 - type: recall_at_3 value: 33.016 - type: recall_at_5 value: 38.97 - task: type: Retrieval dataset: name: MTEB CQADupstackTexRetrieval type: mteb/cqadupstack-tex config: default split: test revision: 46989137a86843e03a6195de44b09deda022eec7 metrics: - type: map_at_1 value: 15.614 - type: map_at_10 value: 22.927 - type: map_at_100 value: 24.185000000000002 - type: map_at_1000 value: 24.319 - type: map_at_3 value: 20.596 - type: map_at_5 value: 21.854000000000003 - type: mrr_at_1 value: 18.858 - type: mrr_at_10 value: 26.535999999999998 - type: mrr_at_100 value: 27.582 - type: mrr_at_1000 value: 27.665 - type: mrr_at_3 value: 24.295 - type: mrr_at_5 value: 25.532 - type: ndcg_at_1 value: 18.858 - type: ndcg_at_10 value: 27.583000000000002 - type: ndcg_at_100 value: 33.635 - type: ndcg_at_1000 value: 36.647 - type: ndcg_at_3 value: 23.348 - type: ndcg_at_5 value: 25.257 - type: precision_at_1 value: 18.858 - type: precision_at_10 value: 5.158 - type: precision_at_100 value: 0.964 - type: precision_at_1000 value: 0.13999999999999999 - type: precision_at_3 value: 11.092 - type: precision_at_5 value: 8.1 - type: recall_at_1 value: 15.614 - type: recall_at_10 value: 37.916 - type: recall_at_100 value: 65.205 - type: recall_at_1000 value: 86.453 - type: recall_at_3 value: 26.137 - type: recall_at_5 value: 31.087999999999997 - task: type: Retrieval dataset: name: MTEB CQADupstackUnixRetrieval type: mteb/cqadupstack-unix config: default split: test revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53 metrics: - type: map_at_1 value: 23.078000000000003 - type: map_at_10 value: 31.941999999999997 - type: map_at_100 value: 33.196999999999996 - type: map_at_1000 value: 33.303 - type: map_at_3 value: 28.927000000000003 - type: map_at_5 value: 30.707 - type: mrr_at_1 value: 26.866 - type: mrr_at_10 value: 35.557 - type: mrr_at_100 value: 36.569 - type: mrr_at_1000 value: 36.632 - type: mrr_at_3 value: 32.897999999999996 - type: mrr_at_5 value: 34.437 - type: ndcg_at_1 value: 26.866 - type: ndcg_at_10 value: 37.372 - type: ndcg_at_100 value: 43.248 - type: ndcg_at_1000 value: 45.632 - type: ndcg_at_3 value: 31.852999999999998 - type: ndcg_at_5 value: 34.582 - type: precision_at_1 value: 26.866 - type: precision_at_10 value: 6.511 - type: precision_at_100 value: 1.078 - type: precision_at_1000 value: 0.13899999999999998 - type: precision_at_3 value: 14.582999999999998 - type: precision_at_5 value: 10.634 - type: recall_at_1 value: 23.078000000000003 - type: recall_at_10 value: 50.334 - type: recall_at_100 value: 75.787 - type: recall_at_1000 value: 92.485 - type: recall_at_3 value: 35.386 - type: recall_at_5 value: 42.225 - task: type: Retrieval dataset: name: MTEB CQADupstackWebmastersRetrieval type: mteb/cqadupstack-webmasters config: default split: test revision: 160c094312a0e1facb97e55eeddb698c0abe3571 metrics: - type: map_at_1 value: 22.203999999999997 - type: map_at_10 value: 31.276 - type: map_at_100 value: 32.844 - type: map_at_1000 value: 33.062999999999995 - type: map_at_3 value: 27.733999999999998 - type: map_at_5 value: 29.64 - type: mrr_at_1 value: 27.272999999999996 - type: mrr_at_10 value: 36.083 - type: mrr_at_100 value: 37.008 - type: mrr_at_1000 value: 37.076 - type: mrr_at_3 value: 33.004 - type: mrr_at_5 value: 34.664 - type: ndcg_at_1 value: 27.272999999999996 - type: ndcg_at_10 value: 37.763000000000005 - type: ndcg_at_100 value: 43.566 - type: ndcg_at_1000 value: 46.356 - type: ndcg_at_3 value: 31.673000000000002 - type: ndcg_at_5 value: 34.501 - type: precision_at_1 value: 27.272999999999996 - type: precision_at_10 value: 7.470000000000001 - type: precision_at_100 value: 1.502 - type: precision_at_1000 value: 0.24 - type: precision_at_3 value: 14.756 - type: precision_at_5 value: 11.225 - type: recall_at_1 value: 22.203999999999997 - type: recall_at_10 value: 51.437999999999995 - type: recall_at_100 value: 76.845 - type: recall_at_1000 value: 94.38600000000001 - type: recall_at_3 value: 34.258 - type: recall_at_5 value: 41.512 - task: type: Retrieval dataset: name: MTEB CQADupstackWordpressRetrieval type: mteb/cqadupstack-wordpress config: default split: test revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4 metrics: - type: map_at_1 value: 17.474 - type: map_at_10 value: 26.362999999999996 - type: map_at_100 value: 27.456999999999997 - type: map_at_1000 value: 27.567999999999998 - type: map_at_3 value: 23.518 - type: map_at_5 value: 25.068 - type: mrr_at_1 value: 18.669 - type: mrr_at_10 value: 27.998 - type: mrr_at_100 value: 28.953 - type: mrr_at_1000 value: 29.03 - type: mrr_at_3 value: 25.230999999999998 - type: mrr_at_5 value: 26.654 - type: ndcg_at_1 value: 18.669 - type: ndcg_at_10 value: 31.684 - type: ndcg_at_100 value: 36.864999999999995 - type: ndcg_at_1000 value: 39.555 - type: ndcg_at_3 value: 26.057000000000002 - type: ndcg_at_5 value: 28.587 - type: precision_at_1 value: 18.669 - type: precision_at_10 value: 5.3420000000000005 - type: precision_at_100 value: 0.847 - type: precision_at_1000 value: 0.12 - type: precision_at_3 value: 11.583 - type: precision_at_5 value: 8.466 - type: recall_at_1 value: 17.474 - type: recall_at_10 value: 46.497 - type: recall_at_100 value: 69.977 - type: recall_at_1000 value: 89.872 - type: recall_at_3 value: 31.385999999999996 - type: recall_at_5 value: 37.283 - task: type: Retrieval dataset: name: MTEB ClimateFEVER type: mteb/climate-fever config: default split: test revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380 metrics: - type: map_at_1 value: 17.173 - type: map_at_10 value: 30.407 - type: map_at_100 value: 32.528 - type: map_at_1000 value: 32.698 - type: map_at_3 value: 25.523 - type: map_at_5 value: 28.038 - type: mrr_at_1 value: 38.958 - type: mrr_at_10 value: 51.515 - type: mrr_at_100 value: 52.214000000000006 - type: mrr_at_1000 value: 52.237 - type: mrr_at_3 value: 48.502 - type: mrr_at_5 value: 50.251000000000005 - type: ndcg_at_1 value: 38.958 - type: ndcg_at_10 value: 40.355000000000004 - type: ndcg_at_100 value: 47.68 - type: ndcg_at_1000 value: 50.370000000000005 - type: ndcg_at_3 value: 33.946 - type: ndcg_at_5 value: 36.057 - type: precision_at_1 value: 38.958 - type: precision_at_10 value: 12.508 - type: precision_at_100 value: 2.054 - type: precision_at_1000 value: 0.256 - type: precision_at_3 value: 25.581 - type: precision_at_5 value: 19.256999999999998 - type: recall_at_1 value: 17.173 - type: recall_at_10 value: 46.967 - type: recall_at_100 value: 71.47200000000001 - type: recall_at_1000 value: 86.238 - type: recall_at_3 value: 30.961 - type: recall_at_5 value: 37.539 - task: type: Retrieval dataset: name: MTEB DBPedia type: mteb/dbpedia config: default split: test revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659 metrics: - type: map_at_1 value: 8.999 - type: map_at_10 value: 18.989 - type: map_at_100 value: 26.133 - type: map_at_1000 value: 27.666 - type: map_at_3 value: 13.918 - type: map_at_5 value: 16.473 - type: mrr_at_1 value: 66.25 - type: mrr_at_10 value: 74.161 - type: mrr_at_100 value: 74.516 - type: mrr_at_1000 value: 74.524 - type: mrr_at_3 value: 72.875 - type: mrr_at_5 value: 73.613 - type: ndcg_at_1 value: 54.37499999999999 - type: ndcg_at_10 value: 39.902 - type: ndcg_at_100 value: 44.212 - type: ndcg_at_1000 value: 51.62 - type: ndcg_at_3 value: 45.193 - type: ndcg_at_5 value: 42.541000000000004 - type: precision_at_1 value: 66.25 - type: precision_at_10 value: 30.425 - type: precision_at_100 value: 9.754999999999999 - type: precision_at_1000 value: 2.043 - type: precision_at_3 value: 48.25 - type: precision_at_5 value: 40.65 - type: recall_at_1 value: 8.999 - type: recall_at_10 value: 24.133 - type: recall_at_100 value: 49.138999999999996 - type: recall_at_1000 value: 72.639 - type: recall_at_3 value: 15.287999999999998 - type: recall_at_5 value: 19.415 - task: type: Classification dataset: name: MTEB EmotionClassification type: mteb/emotion config: default split: test revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37 metrics: - type: accuracy value: 46.38999999999999 - type: f1 value: 41.444205512055234 - task: type: Retrieval dataset: name: MTEB FEVER type: mteb/fever config: default split: test revision: bea83ef9e8fb933d90a2f1d5515737465d613e12 metrics: - type: map_at_1 value: 87.35000000000001 - type: map_at_10 value: 92.837 - type: map_at_100 value: 92.996 - type: map_at_1000 value: 93.006 - type: map_at_3 value: 92.187 - type: map_at_5 value: 92.595 - type: mrr_at_1 value: 93.864 - type: mrr_at_10 value: 96.723 - type: mrr_at_100 value: 96.72500000000001 - type: mrr_at_1000 value: 96.72500000000001 - type: mrr_at_3 value: 96.64 - type: mrr_at_5 value: 96.71499999999999 - type: ndcg_at_1 value: 93.864 - type: ndcg_at_10 value: 94.813 - type: ndcg_at_100 value: 95.243 - type: ndcg_at_1000 value: 95.38600000000001 - type: ndcg_at_3 value: 94.196 - type: ndcg_at_5 value: 94.521 - type: precision_at_1 value: 93.864 - type: precision_at_10 value: 10.951 - type: precision_at_100 value: 1.1400000000000001 - type: precision_at_1000 value: 0.117 - type: precision_at_3 value: 35.114000000000004 - type: precision_at_5 value: 21.476 - type: recall_at_1 value: 87.35000000000001 - type: recall_at_10 value: 96.941 - type: recall_at_100 value: 98.397 - type: recall_at_1000 value: 99.21600000000001 - type: recall_at_3 value: 95.149 - type: recall_at_5 value: 96.131 - task: type: Retrieval dataset: name: MTEB FiQA2018 type: mteb/fiqa config: default split: test revision: 27a168819829fe9bcd655c2df245fb19452e8e06 metrics: - type: map_at_1 value: 24.476 - type: map_at_10 value: 40.11 - type: map_at_100 value: 42.229 - type: map_at_1000 value: 42.378 - type: map_at_3 value: 34.512 - type: map_at_5 value: 38.037 - type: mrr_at_1 value: 47.839999999999996 - type: mrr_at_10 value: 57.053 - type: mrr_at_100 value: 57.772 - type: mrr_at_1000 value: 57.799 - type: mrr_at_3 value: 54.552 - type: mrr_at_5 value: 56.011 - type: ndcg_at_1 value: 47.839999999999996 - type: ndcg_at_10 value: 48.650999999999996 - type: ndcg_at_100 value: 55.681000000000004 - type: ndcg_at_1000 value: 57.979 - type: ndcg_at_3 value: 43.923 - type: ndcg_at_5 value: 46.037 - type: precision_at_1 value: 47.839999999999996 - type: precision_at_10 value: 13.395000000000001 - type: precision_at_100 value: 2.0660000000000003 - type: precision_at_1000 value: 0.248 - type: precision_at_3 value: 29.064 - type: precision_at_5 value: 22.006 - type: recall_at_1 value: 24.476 - type: recall_at_10 value: 56.216 - type: recall_at_100 value: 81.798 - type: recall_at_1000 value: 95.48299999999999 - type: recall_at_3 value: 39.357 - type: recall_at_5 value: 47.802 - task: type: Retrieval dataset: name: MTEB HotpotQA type: mteb/hotpotqa config: default split: test revision: ab518f4d6fcca38d87c25209f94beba119d02014 metrics: - type: map_at_1 value: 42.728 - type: map_at_10 value: 57.737 - type: map_at_100 value: 58.531 - type: map_at_1000 value: 58.594 - type: map_at_3 value: 54.869 - type: map_at_5 value: 56.55 - type: mrr_at_1 value: 85.456 - type: mrr_at_10 value: 90.062 - type: mrr_at_100 value: 90.159 - type: mrr_at_1000 value: 90.16 - type: mrr_at_3 value: 89.37899999999999 - type: mrr_at_5 value: 89.81 - type: ndcg_at_1 value: 85.456 - type: ndcg_at_10 value: 67.755 - type: ndcg_at_100 value: 70.341 - type: ndcg_at_1000 value: 71.538 - type: ndcg_at_3 value: 63.735 - type: ndcg_at_5 value: 65.823 - type: precision_at_1 value: 85.456 - type: precision_at_10 value: 13.450000000000001 - type: precision_at_100 value: 1.545 - type: precision_at_1000 value: 0.16999999999999998 - type: precision_at_3 value: 38.861000000000004 - type: precision_at_5 value: 24.964 - type: recall_at_1 value: 42.728 - type: recall_at_10 value: 67.252 - type: recall_at_100 value: 77.265 - type: recall_at_1000 value: 85.246 - type: recall_at_3 value: 58.292 - type: recall_at_5 value: 62.41100000000001 - task: type: Classification dataset: name: MTEB ImdbClassification type: mteb/imdb config: default split: test revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7 metrics: - type: accuracy value: 87.4836 - type: ap value: 82.29552224030336 - type: f1 value: 87.42791432227448 - task: type: Retrieval dataset: name: MTEB MSMARCO type: mteb/msmarco config: default split: dev revision: c5a29a104738b98a9e76336939199e264163d4a0 metrics: - type: map_at_1 value: 23.015 - type: map_at_10 value: 35.621 - type: map_at_100 value: 36.809 - type: map_at_1000 value: 36.853 - type: map_at_3 value: 31.832 - type: map_at_5 value: 34.006 - type: mrr_at_1 value: 23.738999999999997 - type: mrr_at_10 value: 36.309999999999995 - type: mrr_at_100 value: 37.422 - type: mrr_at_1000 value: 37.461 - type: mrr_at_3 value: 32.592999999999996 - type: mrr_at_5 value: 34.736 - type: ndcg_at_1 value: 23.724999999999998 - type: ndcg_at_10 value: 42.617 - type: ndcg_at_100 value: 48.217999999999996 - type: ndcg_at_1000 value: 49.309 - type: ndcg_at_3 value: 34.905 - type: ndcg_at_5 value: 38.769 - type: precision_at_1 value: 23.724999999999998 - type: precision_at_10 value: 6.689 - type: precision_at_100 value: 0.9480000000000001 - type: precision_at_1000 value: 0.104 - type: precision_at_3 value: 14.89 - type: precision_at_5 value: 10.897 - type: recall_at_1 value: 23.015 - type: recall_at_10 value: 64.041 - type: recall_at_100 value: 89.724 - type: recall_at_1000 value: 98.00999999999999 - type: recall_at_3 value: 43.064 - type: recall_at_5 value: 52.31099999999999 - task: type: Classification dataset: name: MTEB MTOPDomainClassification (en) type: mteb/mtop_domain config: en split: test revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf metrics: - type: accuracy value: 96.49794801641588 - type: f1 value: 96.28931114498003 - task: type: Classification dataset: name: MTEB MTOPIntentClassification (en) type: mteb/mtop_intent config: en split: test revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba metrics: - type: accuracy value: 82.81121751025992 - type: f1 value: 63.18740125901853 - task: type: Classification dataset: name: MTEB MassiveIntentClassification (en) type: mteb/amazon_massive_intent config: en split: test revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7 metrics: - type: accuracy value: 77.66644250168123 - type: f1 value: 74.93211186867839 - task: type: Classification dataset: name: MTEB MassiveScenarioClassification (en) type: mteb/amazon_massive_scenario config: en split: test revision: 7d571f92784cd94a019292a1f45445077d0ef634 metrics: - type: accuracy value: 81.77202420981843 - type: f1 value: 81.63681969283554 - task: type: Clustering dataset: name: MTEB MedrxivClusteringP2P type: mteb/medrxiv-clustering-p2p config: default split: test revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73 metrics: - type: v_measure value: 34.596687684870645 - task: type: Clustering dataset: name: MTEB MedrxivClusteringS2S type: mteb/medrxiv-clustering-s2s config: default split: test revision: 35191c8c0dca72d8ff3efcd72aa802307d469663 metrics: - type: v_measure value: 32.26965660101405 - task: type: Reranking dataset: name: MTEB MindSmallReranking type: mteb/mind_small config: default split: test revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69 metrics: - type: map value: 31.33619694846802 - type: mrr value: 32.53719657720334 - task: type: Retrieval dataset: name: MTEB NFCorpus type: mteb/nfcorpus config: default split: test revision: ec0fa4fe99da2ff19ca1214b7966684033a58814 metrics: - type: map_at_1 value: 6.0729999999999995 - type: map_at_10 value: 13.245999999999999 - type: map_at_100 value: 16.747999999999998 - type: map_at_1000 value: 18.163 - type: map_at_3 value: 10.064 - type: map_at_5 value: 11.513 - type: mrr_at_1 value: 49.536 - type: mrr_at_10 value: 58.092 - type: mrr_at_100 value: 58.752 - type: mrr_at_1000 value: 58.78 - type: mrr_at_3 value: 56.398 - type: mrr_at_5 value: 57.389 - type: ndcg_at_1 value: 47.059 - type: ndcg_at_10 value: 35.881 - type: ndcg_at_100 value: 32.751999999999995 - type: ndcg_at_1000 value: 41.498000000000005 - type: ndcg_at_3 value: 42.518 - type: ndcg_at_5 value: 39.550999999999995 - type: precision_at_1 value: 49.536 - type: precision_at_10 value: 26.316 - type: precision_at_100 value: 8.084 - type: precision_at_1000 value: 2.081 - type: precision_at_3 value: 39.938 - type: precision_at_5 value: 34.056 - type: recall_at_1 value: 6.0729999999999995 - type: recall_at_10 value: 16.593 - type: recall_at_100 value: 32.883 - type: recall_at_1000 value: 64.654 - type: recall_at_3 value: 11.174000000000001 - type: recall_at_5 value: 13.528 - task: type: Retrieval dataset: name: MTEB NQ type: mteb/nq config: default split: test revision: b774495ed302d8c44a3a7ea25c90dbce03968f31 metrics: - type: map_at_1 value: 30.043 - type: map_at_10 value: 45.318999999999996 - type: map_at_100 value: 46.381 - type: map_at_1000 value: 46.412 - type: map_at_3 value: 40.941 - type: map_at_5 value: 43.662 - type: mrr_at_1 value: 33.98 - type: mrr_at_10 value: 47.870000000000005 - type: mrr_at_100 value: 48.681999999999995 - type: mrr_at_1000 value: 48.703 - type: mrr_at_3 value: 44.341 - type: mrr_at_5 value: 46.547 - type: ndcg_at_1 value: 33.98 - type: ndcg_at_10 value: 52.957 - type: ndcg_at_100 value: 57.434 - type: ndcg_at_1000 value: 58.103 - type: ndcg_at_3 value: 44.896 - type: ndcg_at_5 value: 49.353 - type: precision_at_1 value: 33.98 - type: precision_at_10 value: 8.786 - type: precision_at_100 value: 1.1280000000000001 - type: precision_at_1000 value: 0.11900000000000001 - type: precision_at_3 value: 20.577 - type: precision_at_5 value: 14.942 - type: recall_at_1 value: 30.043 - type: recall_at_10 value: 73.593 - type: recall_at_100 value: 93.026 - type: recall_at_1000 value: 97.943 - type: recall_at_3 value: 52.955 - type: recall_at_5 value: 63.132 - task: type: Retrieval dataset: name: MTEB QuoraRetrieval type: mteb/quora config: default split: test revision: None metrics: - type: map_at_1 value: 70.808 - type: map_at_10 value: 84.675 - type: map_at_100 value: 85.322 - type: map_at_1000 value: 85.33800000000001 - type: map_at_3 value: 81.68900000000001 - type: map_at_5 value: 83.543 - type: mrr_at_1 value: 81.5 - type: mrr_at_10 value: 87.59700000000001 - type: mrr_at_100 value: 87.705 - type: mrr_at_1000 value: 87.70599999999999 - type: mrr_at_3 value: 86.607 - type: mrr_at_5 value: 87.289 - type: ndcg_at_1 value: 81.51 - type: ndcg_at_10 value: 88.41799999999999 - type: ndcg_at_100 value: 89.644 - type: ndcg_at_1000 value: 89.725 - type: ndcg_at_3 value: 85.49900000000001 - type: ndcg_at_5 value: 87.078 - type: precision_at_1 value: 81.51 - type: precision_at_10 value: 13.438 - type: precision_at_100 value: 1.532 - type: precision_at_1000 value: 0.157 - type: precision_at_3 value: 37.363 - type: precision_at_5 value: 24.57 - type: recall_at_1 value: 70.808 - type: recall_at_10 value: 95.575 - type: recall_at_100 value: 99.667 - type: recall_at_1000 value: 99.98899999999999 - type: recall_at_3 value: 87.223 - type: recall_at_5 value: 91.682 - task: type: Clustering dataset: name: MTEB RedditClustering type: mteb/reddit-clustering config: default split: test revision: 24640382cdbf8abc73003fb0fa6d111a705499eb metrics: - type: v_measure value: 58.614831329137715 - task: type: Clustering dataset: name: MTEB RedditClusteringP2P type: mteb/reddit-clustering-p2p config: default split: test revision: 282350215ef01743dc01b456c7f5241fa8937f16 metrics: - type: v_measure value: 66.86580408560826 - task: type: Retrieval dataset: name: MTEB SCIDOCS type: mteb/scidocs config: default split: test revision: None metrics: - type: map_at_1 value: 5.093 - type: map_at_10 value: 13.014000000000001 - type: map_at_100 value: 15.412999999999998 - type: map_at_1000 value: 15.756999999999998 - type: map_at_3 value: 9.216000000000001 - type: map_at_5 value: 11.036999999999999 - type: mrr_at_1 value: 25.1 - type: mrr_at_10 value: 37.133 - type: mrr_at_100 value: 38.165 - type: mrr_at_1000 value: 38.198 - type: mrr_at_3 value: 33.217 - type: mrr_at_5 value: 35.732 - type: ndcg_at_1 value: 25.1 - type: ndcg_at_10 value: 21.918000000000003 - type: ndcg_at_100 value: 30.983 - type: ndcg_at_1000 value: 36.629 - type: ndcg_at_3 value: 20.544999999999998 - type: ndcg_at_5 value: 18.192 - type: precision_at_1 value: 25.1 - type: precision_at_10 value: 11.44 - type: precision_at_100 value: 2.459 - type: precision_at_1000 value: 0.381 - type: precision_at_3 value: 19.267 - type: precision_at_5 value: 16.16 - type: recall_at_1 value: 5.093 - type: recall_at_10 value: 23.215 - type: recall_at_100 value: 49.902 - type: recall_at_1000 value: 77.403 - type: recall_at_3 value: 11.733 - type: recall_at_5 value: 16.372999999999998 - task: type: STS dataset: name: MTEB SICK-R type: mteb/sickr-sts config: default split: test revision: a6ea5a8cab320b040a23452cc28066d9beae2cee metrics: - type: cos_sim_pearson value: 82.9365442977452 - type: cos_sim_spearman value: 79.36960687383745 - type: euclidean_pearson value: 79.6045204840714 - type: euclidean_spearman value: 79.26382712751337 - type: manhattan_pearson value: 79.4805084789529 - type: manhattan_spearman value: 79.21847863209523 - task: type: STS dataset: name: MTEB STS12 type: mteb/sts12-sts config: default split: test revision: a0d554a64d88156834ff5ae9920b964011b16384 metrics: - type: cos_sim_pearson value: 83.27906192961453 - type: cos_sim_spearman value: 74.38364712099211 - type: euclidean_pearson value: 78.54358927241223 - type: euclidean_spearman value: 74.22185560806376 - type: manhattan_pearson value: 78.50904327377751 - type: manhattan_spearman value: 74.2627500781748 - task: type: STS dataset: name: MTEB STS13 type: mteb/sts13-sts config: default split: test revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca metrics: - type: cos_sim_pearson value: 84.66863742649639 - type: cos_sim_spearman value: 84.70630905216271 - type: euclidean_pearson value: 84.64498334705334 - type: euclidean_spearman value: 84.87204770690148 - type: manhattan_pearson value: 84.65774227976077 - type: manhattan_spearman value: 84.91251851797985 - task: type: STS dataset: name: MTEB STS14 type: mteb/sts14-sts config: default split: test revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375 metrics: - type: cos_sim_pearson value: 83.1577763924467 - type: cos_sim_spearman value: 80.10314039230198 - type: euclidean_pearson value: 81.51346991046043 - type: euclidean_spearman value: 80.08678485109435 - type: manhattan_pearson value: 81.57058914661894 - type: manhattan_spearman value: 80.1516230725106 - task: type: STS dataset: name: MTEB STS15 type: mteb/sts15-sts config: default split: test revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3 metrics: - type: cos_sim_pearson value: 86.40310839662533 - type: cos_sim_spearman value: 87.16293477217867 - type: euclidean_pearson value: 86.50688711184775 - type: euclidean_spearman value: 87.08651444923031 - type: manhattan_pearson value: 86.54674677557857 - type: manhattan_spearman value: 87.15079017870971 - task: type: STS dataset: name: MTEB STS16 type: mteb/sts16-sts config: default split: test revision: 4d8694f8f0e0100860b497b999b3dbed754a0513 metrics: - type: cos_sim_pearson value: 84.32886275207817 - type: cos_sim_spearman value: 85.0190460590732 - type: euclidean_pearson value: 84.42553652784679 - type: euclidean_spearman value: 85.20027364279328 - type: manhattan_pearson value: 84.42926246281078 - type: manhattan_spearman value: 85.20187419804306 - task: type: STS dataset: name: MTEB STS17 (en-en) type: mteb/sts17-crosslingual-sts config: en-en split: test revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d metrics: - type: cos_sim_pearson value: 90.76732216967812 - type: cos_sim_spearman value: 90.63701653633909 - type: euclidean_pearson value: 90.26678186114682 - type: euclidean_spearman value: 90.67288073455427 - type: manhattan_pearson value: 90.20772020584582 - type: manhattan_spearman value: 90.60764863983702 - task: type: STS dataset: name: MTEB STS22 (en) type: mteb/sts22-crosslingual-sts config: en split: test revision: eea2b4fe26a775864c896887d910b76a8098ad3f metrics: - type: cos_sim_pearson value: 69.09280387698125 - type: cos_sim_spearman value: 68.62743151172162 - type: euclidean_pearson value: 69.89386398104689 - type: euclidean_spearman value: 68.71191066733556 - type: manhattan_pearson value: 69.92516500604872 - type: manhattan_spearman value: 68.80452846992576 - task: type: STS dataset: name: MTEB STSBenchmark type: mteb/stsbenchmark-sts config: default split: test revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831 metrics: - type: cos_sim_pearson value: 86.13178592019887 - type: cos_sim_spearman value: 86.03947178806887 - type: euclidean_pearson value: 85.87029414285313 - type: euclidean_spearman value: 86.04960843306998 - type: manhattan_pearson value: 85.92946858580146 - type: manhattan_spearman value: 86.12575341860442 - task: type: Reranking dataset: name: MTEB SciDocsRR type: mteb/scidocs-reranking config: default split: test revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab metrics: - type: map value: 85.16657063002837 - type: mrr value: 95.73671063867141 - task: type: Retrieval dataset: name: MTEB SciFact type: mteb/scifact config: default split: test revision: 0228b52cf27578f30900b9e5271d331663a030d7 metrics: - type: map_at_1 value: 63.510999999999996 - type: map_at_10 value: 72.76899999999999 - type: map_at_100 value: 73.303 - type: map_at_1000 value: 73.32499999999999 - type: map_at_3 value: 70.514 - type: map_at_5 value: 71.929 - type: mrr_at_1 value: 66.333 - type: mrr_at_10 value: 73.75 - type: mrr_at_100 value: 74.119 - type: mrr_at_1000 value: 74.138 - type: mrr_at_3 value: 72.222 - type: mrr_at_5 value: 73.122 - type: ndcg_at_1 value: 66.333 - type: ndcg_at_10 value: 76.774 - type: ndcg_at_100 value: 78.78500000000001 - type: ndcg_at_1000 value: 79.254 - type: ndcg_at_3 value: 73.088 - type: ndcg_at_5 value: 75.002 - type: precision_at_1 value: 66.333 - type: precision_at_10 value: 9.833 - type: precision_at_100 value: 1.093 - type: precision_at_1000 value: 0.11299999999999999 - type: precision_at_3 value: 28.222 - type: precision_at_5 value: 18.333 - type: recall_at_1 value: 63.510999999999996 - type: recall_at_10 value: 87.98899999999999 - type: recall_at_100 value: 96.5 - type: recall_at_1000 value: 100.0 - type: recall_at_3 value: 77.86699999999999 - type: recall_at_5 value: 82.73899999999999 - task: type: PairClassification dataset: name: MTEB SprintDuplicateQuestions type: mteb/sprintduplicatequestions-pairclassification config: default split: test revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46 metrics: - type: cos_sim_accuracy value: 99.78514851485149 - type: cos_sim_ap value: 94.94214383862038 - type: cos_sim_f1 value: 89.02255639097744 - type: cos_sim_precision value: 89.2462311557789 - type: cos_sim_recall value: 88.8 - type: dot_accuracy value: 99.78217821782178 - type: dot_ap value: 94.69965247836805 - type: dot_f1 value: 88.78695208970439 - type: dot_precision value: 90.54054054054053 - type: dot_recall value: 87.1 - type: euclidean_accuracy value: 99.78118811881188 - type: euclidean_ap value: 94.9865187695411 - type: euclidean_f1 value: 88.99950223992036 - type: euclidean_precision value: 88.60257680872151 - type: euclidean_recall value: 89.4 - type: manhattan_accuracy value: 99.78811881188119 - type: manhattan_ap value: 95.0021236766459 - type: manhattan_f1 value: 89.12071535022356 - type: manhattan_precision value: 88.54886475814413 - type: manhattan_recall value: 89.7 - type: max_accuracy value: 99.78811881188119 - type: max_ap value: 95.0021236766459 - type: max_f1 value: 89.12071535022356 - task: type: Clustering dataset: name: MTEB StackExchangeClustering type: mteb/stackexchange-clustering config: default split: test revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259 metrics: - type: v_measure value: 68.93190546593995 - task: type: Clustering dataset: name: MTEB StackExchangeClusteringP2P type: mteb/stackexchange-clustering-p2p config: default split: test revision: 815ca46b2622cec33ccafc3735d572c266efdb44 metrics: - type: v_measure value: 37.602808534760655 - task: type: Reranking dataset: name: MTEB StackOverflowDupQuestions type: mteb/stackoverflowdupquestions-reranking config: default split: test revision: e185fbe320c72810689fc5848eb6114e1ef5ec69 metrics: - type: map value: 52.29214480978073 - type: mrr value: 53.123169722434426 - task: type: Summarization dataset: name: MTEB SummEval type: mteb/summeval config: default split: test revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c metrics: - type: cos_sim_pearson value: 30.967800769650022 - type: cos_sim_spearman value: 31.168490040206926 - type: dot_pearson value: 30.888603021128553 - type: dot_spearman value: 31.028241262520385 - task: type: Retrieval dataset: name: MTEB TRECCOVID type: mteb/trec-covid config: default split: test revision: None metrics: - type: map_at_1 value: 0.22300000000000003 - type: map_at_10 value: 1.781 - type: map_at_100 value: 9.905999999999999 - type: map_at_1000 value: 23.455000000000002 - type: map_at_3 value: 0.569 - type: map_at_5 value: 0.918 - type: mrr_at_1 value: 84.0 - type: mrr_at_10 value: 91.067 - type: mrr_at_100 value: 91.067 - type: mrr_at_1000 value: 91.067 - type: mrr_at_3 value: 90.667 - type: mrr_at_5 value: 91.067 - type: ndcg_at_1 value: 78.0 - type: ndcg_at_10 value: 73.13499999999999 - type: ndcg_at_100 value: 55.32 - type: ndcg_at_1000 value: 49.532 - type: ndcg_at_3 value: 73.715 - type: ndcg_at_5 value: 72.74199999999999 - type: precision_at_1 value: 84.0 - type: precision_at_10 value: 78.8 - type: precision_at_100 value: 56.32 - type: precision_at_1000 value: 21.504 - type: precision_at_3 value: 77.333 - type: precision_at_5 value: 78.0 - type: recall_at_1 value: 0.22300000000000003 - type: recall_at_10 value: 2.049 - type: recall_at_100 value: 13.553 - type: recall_at_1000 value: 46.367999999999995 - type: recall_at_3 value: 0.604 - type: recall_at_5 value: 1.015 - task: type: Retrieval dataset: name: MTEB Touche2020 type: mteb/touche2020 config: default split: test revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f metrics: - type: map_at_1 value: 3.0380000000000003 - type: map_at_10 value: 10.188 - type: map_at_100 value: 16.395 - type: map_at_1000 value: 18.024 - type: map_at_3 value: 6.236 - type: map_at_5 value: 7.276000000000001 - type: mrr_at_1 value: 34.694 - type: mrr_at_10 value: 46.292 - type: mrr_at_100 value: 47.446 - type: mrr_at_1000 value: 47.446 - type: mrr_at_3 value: 41.156 - type: mrr_at_5 value: 44.32 - type: ndcg_at_1 value: 32.653 - type: ndcg_at_10 value: 25.219 - type: ndcg_at_100 value: 37.802 - type: ndcg_at_1000 value: 49.274 - type: ndcg_at_3 value: 28.605999999999998 - type: ndcg_at_5 value: 26.21 - type: precision_at_1 value: 34.694 - type: precision_at_10 value: 21.837 - type: precision_at_100 value: 7.776 - type: precision_at_1000 value: 1.522 - type: precision_at_3 value: 28.571 - type: precision_at_5 value: 25.306 - type: recall_at_1 value: 3.0380000000000003 - type: recall_at_10 value: 16.298000000000002 - type: recall_at_100 value: 48.712 - type: recall_at_1000 value: 83.16799999999999 - type: recall_at_3 value: 7.265000000000001 - type: recall_at_5 value: 9.551 - task: type: Classification dataset: name: MTEB ToxicConversationsClassification type: mteb/toxic_conversations_50k config: default split: test revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c metrics: - type: accuracy value: 83.978 - type: ap value: 24.751887949330015 - type: f1 value: 66.8685134049279 - task: type: Classification dataset: name: MTEB TweetSentimentExtractionClassification type: mteb/tweet_sentiment_extraction config: default split: test revision: d604517c81ca91fe16a244d1248fc021f9ecee7a metrics: - type: accuracy value: 61.573288058856825 - type: f1 value: 61.973261751726604 - task: type: Clustering dataset: name: MTEB TwentyNewsgroupsClustering type: mteb/twentynewsgroups-clustering config: default split: test revision: 6125ec4e24fa026cec8a478383ee943acfbd5449 metrics: - type: v_measure value: 48.75483298792469 - task: type: PairClassification dataset: name: MTEB TwitterSemEval2015 type: mteb/twittersemeval2015-pairclassification config: default split: test revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1 metrics: - type: cos_sim_accuracy value: 86.36824223639506 - type: cos_sim_ap value: 75.53126388573047 - type: cos_sim_f1 value: 67.9912831688245 - type: cos_sim_precision value: 66.11817501869858 - type: cos_sim_recall value: 69.9736147757256 - type: dot_accuracy value: 86.39804494248078 - type: dot_ap value: 75.27598891718046 - type: dot_f1 value: 67.91146284159763 - type: dot_precision value: 63.90505003490807 - type: dot_recall value: 72.45382585751979 - type: euclidean_accuracy value: 86.36228169517793 - type: euclidean_ap value: 75.51438087434647 - type: euclidean_f1 value: 68.02370523061066 - type: euclidean_precision value: 66.46525679758308 - type: euclidean_recall value: 69.65699208443272 - type: manhattan_accuracy value: 86.46361089586935 - type: manhattan_ap value: 75.50800785730111 - type: manhattan_f1 value: 67.9220437187253 - type: manhattan_precision value: 67.79705573080967 - type: manhattan_recall value: 68.04749340369392 - type: max_accuracy value: 86.46361089586935 - type: max_ap value: 75.53126388573047 - type: max_f1 value: 68.02370523061066 - task: type: PairClassification dataset: name: MTEB TwitterURLCorpus type: mteb/twitterurlcorpus-pairclassification config: default split: test revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf metrics: - type: cos_sim_accuracy value: 88.80350836341057 - type: cos_sim_ap value: 85.51101933260743 - type: cos_sim_f1 value: 77.9152271629704 - type: cos_sim_precision value: 75.27815662910056 - type: cos_sim_recall value: 80.74376347397599 - type: dot_accuracy value: 88.84425815966158 - type: dot_ap value: 85.49726945962519 - type: dot_f1 value: 77.94445269567801 - type: dot_precision value: 75.27251864601261 - type: dot_recall value: 80.81305820757623 - type: euclidean_accuracy value: 88.80350836341057 - type: euclidean_ap value: 85.4882880790211 - type: euclidean_f1 value: 77.87063284615103 - type: euclidean_precision value: 74.61022927689595 - type: euclidean_recall value: 81.42901139513397 - type: manhattan_accuracy value: 88.7161873714441 - type: manhattan_ap value: 85.45753871906821 - type: manhattan_f1 value: 77.8686401480111 - type: manhattan_precision value: 74.95903683123174 - type: manhattan_recall value: 81.01324299353249 - type: max_accuracy value: 88.84425815966158 - type: max_ap value: 85.51101933260743 - type: max_f1 value: 77.94445269567801 --- <!-- **English** | [中文](./README_zh.md) --> # gte-base-en-v1.5 We introduce `gte-v1.5` series, upgraded `gte` embeddings that support the context length of up to **8192**, while further enhancing model performance. The models are built upon the `transformer++` encoder [backbone](https://huggingface.co/Alibaba-NLP/new-impl) (BERT + RoPE + GLU). The `gte-v1.5` series achieve state-of-the-art scores on the MTEB benchmark within the same model size category and prodvide competitive on the LoCo long-context retrieval tests (refer to [Evaluation](#evaluation)). We also present the [`gte-Qwen1.5-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct), a SOTA instruction-tuned multi-lingual embedding model that ranked 2nd in MTEB and 1st in C-MTEB. <!-- Provide a longer summary of what this model is. --> - **Developed by:** Institute for Intelligent Computing, Alibaba Group - **Model type:** Text Embeddings - **Paper:** [mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval](https://arxiv.org/pdf/2407.19669) <!-- - **Demo [optional]:** [More Information Needed] --> ### Model list | Models | Language | Model Size | Max Seq. Length | Dimension | MTEB-en | LoCo | |:-----: | :-----: |:-----: |:-----: |:-----: | :-----: | :-----: | |[`gte-Qwen1.5-7B-instruct`](https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct)| Multiple | 7720 | 32768 | 4096 | 67.34 | 87.57 | |[`gte-large-en-v1.5`](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | English | 434 | 8192 | 1024 | 65.39 | 86.71 | |[`gte-base-en-v1.5`](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | English | 137 | 8192 | 768 | 64.11 | 87.44 | ## How to Get Started with the Model Use the code below to get started with the model. ```python # Requires transformers>=4.36.0 import torch.nn.functional as F from transformers import AutoModel, AutoTokenizer input_texts = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] model_path = 'Alibaba-NLP/gte-base-en-v1.5' tokenizer = AutoTokenizer.from_pretrained(model_path) model = AutoModel.from_pretrained(model_path, trust_remote_code=True) # Tokenize the input texts batch_dict = tokenizer(input_texts, max_length=8192, padding=True, truncation=True, return_tensors='pt') outputs = model(**batch_dict) embeddings = outputs.last_hidden_state[:, 0] # (Optionally) normalize embeddings embeddings = F.normalize(embeddings, p=2, dim=1) scores = (embeddings[:1] @ embeddings[1:].T) * 100 print(scores.tolist()) ``` **It is recommended to install xformers and enable unpadding for acceleration, refer to [enable-unpadding-and-xformers](https://huggingface.co/Alibaba-NLP/new-impl#recommendation-enable-unpadding-and-acceleration-with-xformers).** Use with `sentence-transformers`: ```python # Requires sentence_transformers>=2.7.0 from sentence_transformers import SentenceTransformer from sentence_transformers.util import cos_sim sentences = ['That is a happy person', 'That is a very happy person'] model = SentenceTransformer('Alibaba-NLP/gte-base-en-v1.5', trust_remote_code=True) embeddings = model.encode(sentences) print(cos_sim(embeddings[0], embeddings[1])) ``` Use with `transformers.js`: ```js // npm i @xenova/transformers import { pipeline, dot } from '@xenova/transformers'; // Create feature extraction pipeline const extractor = await pipeline('feature-extraction', 'Alibaba-NLP/gte-base-en-v1.5', { quantized: false, // Comment out this line to use the quantized version }); // Generate sentence embeddings const sentences = [ "what is the capital of China?", "how to implement quick sort in python?", "Beijing", "sorting algorithms" ] const output = await extractor(sentences, { normalize: true, pooling: 'cls' }); // Compute similarity scores const [source_embeddings, ...document_embeddings ] = output.tolist(); const similarities = document_embeddings.map(x => 100 * dot(source_embeddings, x)); console.log(similarities); // [34.504930869007296, 64.03973265120138, 19.520042686034362] ``` ## Training Details ### Training Data - Masked language modeling (MLM): `c4-en` - Weak-supervised contrastive pre-training (CPT): [GTE](https://arxiv.org/pdf/2308.03281.pdf) pre-training data - Supervised contrastive fine-tuning: [GTE](https://arxiv.org/pdf/2308.03281.pdf) fine-tuning data ### Training Procedure To enable the backbone model to support a context length of 8192, we adopted a multi-stage training strategy. The model first undergoes preliminary MLM pre-training on shorter lengths. And then, we resample the data, reducing the proportion of short texts, and continue the MLM pre-training. The entire training process is as follows: - MLM-2048: lr 5e-4, mlm_probability 0.3, batch_size 4096, num_steps 70000, rope_base 10000 - [MLM-8192](https://huggingface.co/Alibaba-NLP/gte-en-mlm-base): lr 5e-5, mlm_probability 0.3, batch_size 1024, num_steps 20000, rope_base 500000 - CPT: max_len 512, lr 2e-4, batch_size 32768, num_steps 100000 - Fine-tuning: TODO ## Evaluation ### MTEB The results of other models are retrieved from [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard). The gte evaluation setting: `mteb==1.2.0, fp16 auto mix precision, max_length=8192`, and set ntk scaling factor to 2 (equivalent to rope_base * 2). | Model Name | Param Size (M) | Dimension | Sequence Length | Average (56) | Class. (12) | Clust. (11) | Pair Class. (3) | Reran. (4) | Retr. (15) | STS (10) | Summ. (1) | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [**gte-large-en-v1.5**](https://huggingface.co/Alibaba-NLP/gte-large-en-v1.5) | 434 | 1024 | 8192 | **65.39** | 77.75 | 47.95 | 84.63 | 58.50 | 57.91 | 81.43 | 30.91 | | [mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) | 335 | 1024 | 512 | 64.68 | 75.64 | 46.71 | 87.2 | 60.11 | 54.39 | 85 | 32.71 | | [multilingual-e5-large-instruct](https://huggingface.co/intfloat/multilingual-e5-large-instruct) | 560 | 1024 | 514 | 64.41 | 77.56 | 47.1 | 86.19 | 58.58 | 52.47 | 84.78 | 30.39 | | [bge-large-en-v1.5](https://huggingface.co/BAAI/bge-large-en-v1.5)| 335 | 1024 | 512 | 64.23 | 75.97 | 46.08 | 87.12 | 60.03 | 54.29 | 83.11 | 31.61 | | [**gte-base-en-v1.5**](https://huggingface.co/Alibaba-NLP/gte-base-en-v1.5) | 137 | 768 | 8192 | **64.11** | 77.17 | 46.82 | 85.33 | 57.66 | 54.09 | 81.97 | 31.17 | | [bge-base-en-v1.5](https://huggingface.co/BAAI/bge-base-en-v1.5)| 109 | 768 | 512 | 63.55 | 75.53 | 45.77 | 86.55 | 58.86 | 53.25 | 82.4 | 31.07 | ### LoCo | Model Name | Dimension | Sequence Length | Average (5) | QsmsumRetrieval | SummScreenRetrieval | QasperAbastractRetrieval | QasperTitleRetrieval | GovReportRetrieval | |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| | [gte-qwen1.5-7b](https://huggingface.co/Alibaba-NLP/gte-qwen1.5-7b) | 4096 | 32768 | 87.57 | 49.37 | 93.10 | 99.67 | 97.54 | 98.21 | | [gte-large-v1.5](https://huggingface.co/Alibaba-NLP/gte-large-v1.5) |1024 | 8192 | 86.71 | 44.55 | 92.61 | 99.82 | 97.81 | 98.74 | | [gte-base-v1.5](https://huggingface.co/Alibaba-NLP/gte-base-v1.5) | 768 | 8192 | 87.44 | 49.91 | 91.78 | 99.82 | 97.13 | 98.58 | ## Citation If you find our paper or models helpful, please consider citing them as follows: ``` @misc{zhang2024mgte, title={mGTE: Generalized Long-Context Text Representation and Reranking Models for Multilingual Text Retrieval}, author={Xin Zhang and Yanzhao Zhang and Dingkun Long and Wen Xie and Ziqi Dai and Jialong Tang and Huan Lin and Baosong Yang and Pengjun Xie and Fei Huang and Meishan Zhang and Wenjie Li and Min Zhang}, year={2024}, eprint={2407.19669}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={https://arxiv.org/abs/2407.19669}, } @misc{li2023gte, title={Towards General Text Embeddings with Multi-stage Contrastive Learning}, author={Zehan Li and Xin Zhang and Yanzhao Zhang and Dingkun Long and Pengjun Xie and Meishan Zhang}, year={2023}, eprint={2308.03281}, archivePrefix={arXiv}, primaryClass={cs.CL}, url={https://arxiv.org/abs/2308.03281}, } ```
[ "BIOSSES", "SCIFACT" ]
EleutherAI/pythia-160m-deduped
EleutherAI
text-generation
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "causal-lm", "pythia", "en", "dataset:EleutherAI/the_pile_deduplicated", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2023-02-08T21:50:19Z"
2023-07-09T16:04:57+00:00
61,847
3
--- datasets: - EleutherAI/the_pile_deduplicated language: - en license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-160M-deduped ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-160M-deduped for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-160M-deduped as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-160M-deduped has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-160M-deduped will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-160M-deduped to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-160M-deduped may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-160M-deduped. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data Pythia-160M-deduped was trained on the Pile **after the dataset has been globally deduplicated**.<br> [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/). ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "SCIQ" ]
microsoft/maira-2
microsoft
text-generation
[ "transformers", "safetensors", "maira2", "text-generation", "conversational", "custom_code", "arxiv:2406.04449", "arxiv:1910.09700", "license:other", "autotrain_compatible", "region:us" ]
"2024-07-29T15:24:21Z"
2025-02-19T10:33:01+00:00
61,451
49
--- library_name: transformers license: other license_name: msrla license_link: https://huggingface.co/microsoft/maira-2/blob/main/LICENSE extra_gated_prompt: 'Please confirm that you have read and agree to the following disclaimer. The model(s) and/or software described in this repository are provided for research and development use only. The model(s) and/or software are not intended for use in clinical decision-making or for any other clinical use, and performance for clinical use has not been established. You bear sole responsibility for any use of these model(s) and/or software, including incorporation into any product intended for clinical use.' extra_gated_fields: I have read and agree to the disclaimer: checkbox --- # Model Card for MAIRA-2 <!-- Provide a quick summary of what the model is/does. --> MAIRA-2 is a multimodal transformer designed for the generation of grounded or non-grounded radiology reports from chest X-rays. It is described in more detail in [MAIRA-2: Grounded Radiology Report Generation (S. Bannur, K. Bouzid et al., 2024)](https://arxiv.org/abs/2406.04449). MAIRA-2 has been built for research purposes only and is being shared to facilitate comparison and further research. ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> MAIRA-2 is composed of the image encoder [RAD-DINO-MAIRA-2](https://huggingface.co/microsoft/rad-dino-maira-2) (used frozen), a projection layer (trained from scratch), and the language model [vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5) (fully fine-tuned). - **Developed by:** Microsoft Research Health Futures - **Model type:** Multimodal transformer - **Language(s) (NLP):** English - **License:** [MSRLA](./LICENSE) - **Finetuned from model [optional]:** [vicuna-7b-1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5), [RAD-DINO-MAIRA-2](https://huggingface.co/microsoft/rad-dino-maira-2) ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> MAIRA-2 is shared for research purposes only. It is **not meant to be used for clinical practice.** MAIRA-2 was not extensively tested for its capabilities and properties, including its accuracy and reliability in application settings, fairness across different demographics and uses, and security and privacy. ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> As inputs, MAIRA-2 takes a frontal chest X-ray, and any of the following: - A lateral view from the current study - A frontal view from the *prior* study, with accompanying prior report - The indication for the current study - The technique and comparison sections for the current study MAIRA-2 can generate the _findings_ section of the current study, in one of two forms: - Narrative text, without any image annotations (this is the typical report generation scenario). - As a grounded report, wherein all described findings are accompanied by zero or more bounding boxes indicating their location on the current frontal image. MAIRA-2 can also perform phrase grounding. In this case, it must also be provided with an input phrase. It will then repeat the phrase and generate a bounding box localising the finding described in the phrase. These use-cases are illustrated with [sample code below](README.md#use-case-1-and-2-findings-generation-with-or-without-grounding). ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> MAIRA-2 was trained on chest X-rays from adults with English language reports only, and is not expected to work on any other imaging modality or anatomy. Variations in the input prompt (e.g. changing the instruction) are likely to degrade performance, as this model was *not* optimised for arbitrary user inputs. As above, this is a research model which should not be used in any real clinical or production scenario. ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> ### Data biases MAIRA-2 was trained on chest X-ray report datasets from Spain (translated from the original Spanish to English) and the USA, listed below. Reporting styles, patient demographics and disease prevalence, and image acquisition protocols can vary across health systems and regions. These factors will impact the generalisability of the model. ### Model errors (fabrication, omission) This model does not perform perfectly on its tasks, as outlined in more detail in the [MAIRA-2 report](https://arxiv.org/abs/2406.04449). Hence, errors can be present in the generated (grounded) reports. ## How to Get Started with the Model We demonstrate below how to run inference with MAIRA-2 for its three capabilities: findings generation with and without grounding, and phrase grounding. ### Setup To run this sample code, you will need the following packages: ``` pillow protobuf sentencepiece torch transformers ``` Note: You may temporarily need to install transformers from source since MAIRA-2 requires `transformers>=4.46.0.dev0`. Due to an [incompatible commit](https://github.com/huggingface/transformers/commit/0f49deacbff3e57cde45222842c0db6375e4fa43) in transformers main, the current fix is to install a transformers version from or after commit [88d960937c81a32bfb63356a2e8ecf7999619681](https://github.com/huggingface/transformers/commit/88d960937c81a32bfb63356a2e8ecf7999619681) but before commit [0f49deacbff3e57cde45222842c0db6375e4fa43](https://github.com/huggingface/transformers/commit/0f49deacbff3e57cde45222842c0db6375e4fa43). ``` pip install git+https://github.com/huggingface/transformers.git@88d960937c81a32bfb63356a2e8ecf7999619681 ``` First, initialise the model and put it in eval mode. ```python from transformers import AutoModelForCausalLM, AutoProcessor from pathlib import Path import torch model = AutoModelForCausalLM.from_pretrained("microsoft/maira-2", trust_remote_code=True) processor = AutoProcessor.from_pretrained("microsoft/maira-2", trust_remote_code=True) device = torch.device("cuda") model = model.eval() model = model.to(device) ``` We need to get some data to demonstrate the forward pass. For this example, we'll collect an example from the IU X-ray dataset, which has a permissive license. ```python import requests from PIL import Image def get_sample_data() -> dict[str, Image.Image | str]: """ Download chest X-rays from IU-Xray, which we didn't train MAIRA-2 on. License is CC. We modified this function from the Rad-DINO repository on Huggingface. """ frontal_image_url = "https://openi.nlm.nih.gov/imgs/512/145/145/CXR145_IM-0290-1001.png" lateral_image_url = "https://openi.nlm.nih.gov/imgs/512/145/145/CXR145_IM-0290-2001.png" def download_and_open(url: str) -> Image.Image: response = requests.get(url, headers={"User-Agent": "MAIRA-2"}, stream=True) return Image.open(response.raw) frontal_image = download_and_open(frontal_image_url) lateral_image = download_and_open(lateral_image_url) sample_data = { "frontal": frontal_image, "lateral": lateral_image, "indication": "Dyspnea.", "comparison": "None.", "technique": "PA and lateral views of the chest.", "phrase": "Pleural effusion." # For the phrase grounding example. This patient has pleural effusion. } return sample_data sample_data = get_sample_data() ``` ### Use-case 1 and 2: Findings generation with or without grounding We can toggle whether MAIRA-2 generates a grounded report based on how we preprocess the inputs, as it uses a different prompt. Let's start without grounding (`get_grounding=False`). While generating, for non-grounded reporting use `max_new_tokens=300`, and for grounded reporting use `max_new_tokens=450` to accommodate additional box and object tokens. ```python processed_inputs = processor.format_and_preprocess_reporting_input( current_frontal=sample_data["frontal"], current_lateral=sample_data["lateral"], prior_frontal=None, # Our example has no prior indication=sample_data["indication"], technique=sample_data["technique"], comparison=sample_data["comparison"], prior_report=None, # Our example has no prior return_tensors="pt", get_grounding=False, # For this example we generate a non-grounded report ) processed_inputs = processed_inputs.to(device) with torch.no_grad(): output_decoding = model.generate( **processed_inputs, max_new_tokens=300, # Set to 450 for grounded reporting use_cache=True, ) prompt_length = processed_inputs["input_ids"].shape[-1] decoded_text = processor.decode(output_decoding[0][prompt_length:], skip_special_tokens=True) decoded_text = decoded_text.lstrip() # Findings generation completions have a single leading space prediction = processor.convert_output_to_plaintext_or_grounded_sequence(decoded_text) print("Parsed prediction:", prediction) ``` We get something that looks like this: > There is a large right pleural effusion with associated right basilar atelectasis. The left lung is clear. No pneumothorax is identified. The cardiomediastinal silhouette and hilar contours are normal. There is no free air under the diaphragm. Surgical clips are noted in the right upper quadrant of the abdomen. If we had set `get_grounding=True`, MAIRA-2 would generate a grounded report. For this example, that looks like this: ```python ('There is a large right pleural effusion.', [(0.055, 0.275, 0.445, 0.665)]), ('The left lung is clear.', None), ('No pneumothorax is identified.', None), ('The cardiomediastinal silhouette is within normal limits.', None), ('The visualized osseous structures are unremarkable.', None) ``` The generated bounding box coordinates are the `(x, y)` coordinates of the top left and bottom right corners of the box, e.g. `(x_topleft, y_topleft, x_bottomright, y_bottomright)`. These are relative to the _cropped_ image (that is, the image that MAIRA-2 ultimately got as input), so be careful while visualising. The processor provides a method `adjust_box_for_original_image_size` to get boxes relative to the original image shape. Note that MAIRA-2 generates slightly different reports for grounded and non-grounded reporting scenarios, a side-effect of its grounded reporting training data coming from a different data distribution. ### Use-case 3: Phrase Grounding Here the input is different as we provide the model with a phrase to ground in the image. Recall (`get_sample_data`) that our phrase here is just "Pleural effusion", which we already know is present in this image. ```python processed_inputs = processor.format_and_preprocess_phrase_grounding_input( frontal_image=sample_data["frontal"], phrase=sample_data["phrase"], return_tensors="pt", ) processed_inputs = processed_inputs.to(device) with torch.no_grad(): output_decoding = model.generate( **processed_inputs, max_new_tokens=150, use_cache=True, ) prompt_length = processed_inputs["input_ids"].shape[-1] decoded_text = processor.decode(output_decoding[0][prompt_length:], skip_special_tokens=True) prediction = processor.convert_output_to_plaintext_or_grounded_sequence(decoded_text) print("Parsed prediction:", prediction) ``` This gives us something like this: ```python ('Pleural effusion.', [(0.025, 0.345, 0.425, 0.575)]) ``` Again, as for grounded reporting we must remember the bbox coordinates are relative to the cropped image seen by MAIRA-2, use `processor.adjust_box_for_original_image_size` to get boxes adjusted for the original image shape. ## Training details We did not originally train MAIRA-2 using the exact model class provided here, however we have checked that its behaviour is the same. We provide this class to facilitate research re-use and inference. ### Training data MAIRA-2 was trained on a mix of public and private chest X-ray datasets. Each example comprises one or more CXR images and associated report text, with or without grounding (spatial annotations). The model is trained to generate the _findings_ section of the report, with or without grounding. | Dataset | Country | # examples (ungrounded) | # examples (grounded) | | ----- | ------ |------- | ----- | | [MIMIC-CXR](https://www.nature.com/articles/s41597-019-0322-0) | USA | 55 218 | 595* | | [PadChest](https://www.sciencedirect.com/science/article/abs/pii/S1361841520301614) | Spain | 52 828 | 3 122 | | USMix (Private) | USA | 118 031 | 53 613 | *We use the [MS-CXR](https://physionet.org/content/ms-cxr/) phrase grounding dataset to provide `grounding' examples from MIMIC-CXR. ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** NVIDIA A100 GPUs - **Hours used:** 1432 - **Cloud Provider:** Azure - **Compute Region:** West US 2 - **Carbon Emitted:** 107.4 CO₂ eq _(ostensibly offset by this provider)_ ## Citation <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** ``` @article{Bannur2024MAIRA2GR, title={MAIRA-2: Grounded Radiology Report Generation}, author={Shruthi Bannur and Kenza Bouzid and Daniel C. Castro and Anton Schwaighofer and Anja Thieme and Sam Bond-Taylor and Maximilian Ilse and Fernando P\'{e}rez-Garc\'{i}a and Valentina Salvatelli and Harshita Sharma and Felix Meissen and Mercy Prasanna Ranjit and Shaury Srivastav and Julia Gong and Noel C. F. Codella and Fabian Falck and Ozan Oktay and Matthew P. Lungren and Maria T. A. Wetscherek and Javier Alvarez-Valle and Stephanie L. Hyland}, journal={arXiv}, year={2024}, volume={abs/2406.04449}, url={https://arxiv.org/abs/2406.04449} } ``` **APA:** > Bannur*, S., Bouzid*, K., Castro, D. C., Schwaighofer, A., Thieme, A., Bond-Taylor, S., Ilse, M., Pérez-García, F., Salvatelli, V., Sharma, H., Meissen, F., Ranjit, M.P., Srivastav, S., Gong, J., Codella, N.C.F., Falck, F., Oktay, O., Lungren, M.P., Wetscherek, M.T., Alvarez-Valle, J., & Hyland, S. L. (2024). *MAIRA-2: Grounded Radiology Report Generation*. arXiv preprint abs/2406.04449. ## Model Card Contact - Stephanie Hyland ([`[email protected]`](mailto:[email protected])) - Shruthi Bannur ([`[email protected]`](mailto:[email protected]))
[ "BEAR" ]
EleutherAI/pythia-6.9b
EleutherAI
text-generation
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "causal-lm", "pythia", "en", "dataset:EleutherAI/pile", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
"2023-02-14T04:18:48Z"
2025-03-10T18:30:39+00:00
60,726
50
--- datasets: - EleutherAI/pile language: - en license: apache-2.0 tags: - pytorch - causal-lm - pythia --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was deliberately designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-6.9B ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:[email protected]). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-6.9B for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-6.9B as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-6.9B has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-6.9B will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-6.9B to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-6.9B may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-6.9B. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/).<br> The Pile was **not** deduplicated before being used to train Pythia-6.9B. ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
[ "SCIQ" ]